text
stringlengths
6
947k
repo_name
stringlengths
5
100
path
stringlengths
4
231
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
6
947k
score
float64
0
0.34
# Copyright 2010, 2012 Canonical Ltd. This software is licensed under # the GNU Affero General Public License version 3 (see the file # LICENSE). import logging import re from datetime import datetime from django.core.validators import validate_email from django.db import models from django.utils.translation import ugettext_lazy as _ from model_utils.managers import PassThroughManager from identityprovider.models import Account, Person from identityprovider.models.const import EmailStatus __all__ = ( 'EmailAddress', 'InvalidatedEmailAddress', ) PHONE_EMAIL_DOMAIN = 'phone.ubuntu' PHONE_EMAIL_INVALID_CHARS = re.compile(r"[^-!#$%&'*+/=?^_`{}|~0-9A-Z\.]", re.IGNORECASE) class EmailAddressQuerySet(models.query.QuerySet): def verified(self): return self.filter( status__in=(EmailStatus.VALIDATED, EmailStatus.PREFERRED)) class EmailAddressManager(PassThroughManager): def _generate_email_from_phone_id(self, phone_id): # replace chars not validated by django validate_email by # email = '%s@%s' % (PHONE_EMAIL_INVALID_CHARS.sub('#', phone_id), PHONE_EMAIL_DOMAIN) return email def create_from_phone_id(self, phone_id, account): email = self._generate_email_from_phone_id(phone_id) email_address = EmailAddress.objects.create( email=email, account=account, status=EmailStatus.NEW) return email_address def get_from_phone_id(self, phone_id): email = self._generate_email_from_phone_id(phone_id) email_address = self.get(email=email) return email_address class EmailAddress(models.Model): email = models.TextField(validators=[validate_email]) lp_person = models.IntegerField( db_column='person', blank=True, null=True, editable=False) status = models.IntegerField(choices=EmailStatus._get_choices()) date_created = models.DateTimeField( default=datetime.utcnow, blank=True, editable=False) account = models.ForeignKey( Account, db_column='account', blank=True, null=True) objects = EmailAddressManager.for_queryset_class(EmailAddressQuerySet)() class Meta: app_label = 'identityprovider' db_table = u'emailaddress' verbose_name_plural = _('Email addresses') def __unicode__(self): return self.email @property def is_preferred(self): return self.status == EmailStatus.PREFERRED @property def is_verifiable(self): suffix = '@%s' % PHONE_EMAIL_DOMAIN return not self.email.endswith(suffix) @property def is_verified(self): return self.status in (EmailStatus.VALIDATED, EmailStatus.PREFERRED) def invalidate(self): account = self.account if account is None and self.lp_person: try: person = Person.objects.get(id=self.lp_person) account = person.account except Person.DoesNotExist: pass invalidated_email = None if account: # create invalidated entry invalidated_email = InvalidatedEmailAddress.objects.create( email=self.email, account=account, date_created=self.date_created) else: logging.warning( "Could not create invalidated entry for %s, " "no associated account found" % self.email) # and delete from emails table self.delete() return invalidated_email class InvalidatedEmailAddress(models.Model): email = models.TextField(validators=[validate_email]) date_created = models.DateTimeField(blank=True, editable=False) date_invalidated = models.DateTimeField( default=datetime.utcnow, null=True, blank=True) account = models.ForeignKey( Account, db_column='account', blank=True, null=True) account_notified = models.BooleanField() class Meta: app_label = 'identityprovider' db_table = u'invalidated_emailaddress' verbose_name_plural = _('Invalidated email addresses') def __unicode__(self): return self.email
miing/mci_migo
identityprovider/models/emailaddress.py
Python
agpl-3.0
4,191
0
# -*- coding: utf-8 -*- """Terrain slope calculation, and ground range spacing calculation from DEM. Author: Michael Denbina Copyright 2016 California Institute of Technology. All rights reserved. United States Government Sponsorship acknowledged. This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import numpy as np def calcslope(dem, spacing, inc): """Calculate terrain slope angles. Given an input DEM in radar (azimuth, slant range) coordinates, the DEM pixel spacing (in azimuth and slant range), and the incidence angle, calculate and return the azimuth and ground range slope angles (in radians). Arguments: dem: an array containing the DEM heights. spacing: a tuple containing the (azimuth, slant range) pixel spacing of the DEM, in meters. inc: the incidence angle, in radians. Returns: rngslope: the terrain slope angle in the ground range direction azslope: the slope angle in the azimuth direction """ (azslope,rngslope) = np.gradient(dem) azslope = np.arctan(azslope / spacing[0]) rngslope = np.arctan(rngslope / ((spacing[1]/np.sin(inc)) + (rngslope/np.tan(inc)))) return rngslope, azslope def calcgrspacing(dem, spacing, inc): """Calculate ground range pixel spacing. Given an input DEM in radar (azimuth, slant range) coordinates, the DEM pixel spacing (in azimuth and slant range), and the incidence angle, calculate and return the ground range spacing. Arguments: dem: an array containing the DEM heights. spacing: the slant range spacing of the DEM, in meters. inc: the incidence angle, in radians. Returns: grspacing: Ground range spacing for each pixel, in meters. """ (azgrad,srgrad) = np.gradient(dem) grspacing = ((spacing/np.sin(inc)) + (srgrad/np.tan(inc))) return grspacing
mdenbina/kapok
kapok/lib/slope.py
Python
gpl-3.0
2,658
0.007524
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RTriebeard(RPackage): """triebeard: 'Radix' Trees in 'Rcpp'""" homepage = "https://github.com/Ironholds/triebeard/" url = "https://cloud.r-project.org/src/contrib/triebeard_0.3.0.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/triebeard" version('0.3.0', sha256='bf1dd6209cea1aab24e21a85375ca473ad11c2eff400d65c6202c0fb4ef91ec3') depends_on('r-rcpp', type=('build', 'run'))
LLNL/spack
var/spack/repos/builtin/packages/r-triebeard/package.py
Python
lgpl-2.1
652
0.003067
Flask==0.10.1 Jinja2==2.7.2 MarkupSafe==0.18 Werkzeug==0.9.4 distribute==0.6.31 itsdangerous==0.23 lxml==3.3.1 pygal==1.3.1 wsgiref==0.1.2
birknilson/oyster
examples/piechart/requirements.py
Python
mit
139
0.064748
import pickle from sklearn.model_selection import train_test_split from preprocess.feature import FeatureFile from network.model import FC1Net, FC4Net, MLPModel, SaNet from network.evaluate import NetEvaluator class Trainer: def __init__(self, feature_file=None): self.exclude_features_keys = None self.feature_filename = feature_file self.X, self.Y = self.load_full_data(feature_file) self.feature_file = FeatureFile(feature_file) def run(self, args, **kwargs): print('** Train under {} **'.format(self.feature_filename)) if args.videowise: train_x, test_x, train_y, test_y = self.train_test_split_videowise() elif kwargs.get('extra_test'): x, y = self.load_full_data(kwargs.get('extra_test')) train_x, test_x, train_y, test_y = self.X, x, self.Y, y else: train_x, test_x, train_y, test_y = train_test_split(self.X, self.Y, test_size=0.2) FC1Net(train=True).run((train_x, train_y), (test_x, test_y), args.save) FC4Net(train=True).run((train_x, train_y), (test_x, test_y), args.save) MLPModel(train=True).run((train_x, train_y), (test_x, test_y), args.save) SaNet(train=True).run((train_x, train_y), (test_x, test_y), args.save) self.evaluator((train_x, train_y), (test_x, test_y), self.X, self.Y) def evaluator(self, train, test, X, Y): train_x, train_y = train test_x, test_y = test evaluator = NetEvaluator(X, Y) evaluator.X, evaluator.Y = self.X, self.Y evaluator.train_x, evaluator.test_x, evaluator.train_y, evaluator.test_y = train_x, test_x, train_y, test_y print('=== evaluator & cross-validate ===') evaluator.baseline_svm() evaluator.baseline_randomforest() print('-For FC1Net-') evaluator.cross_validation(FC1Net.build_model) print('-For FC4Net-') evaluator.cross_validation(FC4Net.build_model) print('-For MLPModel-') evaluator.cross_validation(MLPModel.build_model) print('-For SaNet-') evaluator.cross_validation(SaNet.build_model) def load_full_data(self, feature_file): X, Y = FeatureFile(feature_file).load() Y[Y > 0] = 1 return X, Y def train_test_split_videowise(self): f = self.feature_file data = f.load(random=True, video_wise=True, split=0.1) X, Y = data['train'] X_, Y_ = data['test'] Y[Y > 0] = 1 Y_[Y_ > 0] = 1 print('Excluded videos: ', f.excluded) print('Train/Test ({}/{}) features'.format(len(Y), len(Y_))) self.exclude_features_keys = f.excluded return X, X_, Y, Y_ def summary(self): results = { 'exclude_features_keys': self.exclude_features_keys } with open('data/outputs/training_info.pkl', 'wb') as f: pickle.dump(results, f, protocol=2) # For Python2
NTHU-CVLab/ActivityProps
network/trainer.py
Python
apache-2.0
2,961
0.001351
"""Tests for the pylint checker in :mod:`pylint.extensions.check_elif """ import os import os.path as osp import unittest from pylint import checkers from pylint.extensions.check_elif import ElseifUsedChecker from pylint.lint import PyLinter from pylint.reporters import BaseReporter class TestReporter(BaseReporter): def handle_message(self, msg): self.messages.append(msg) def on_set_current_module(self, module, filepath): self.messages = [] class CheckElseIfUsedTC(unittest.TestCase): @classmethod def setUpClass(cls): cls._linter = PyLinter() cls._linter.set_reporter(TestReporter()) checkers.initialize(cls._linter) cls._linter.register_checker(ElseifUsedChecker(cls._linter)) def test_elseif_message(self): elif_test = osp.join(osp.dirname(osp.abspath(__file__)), 'data', 'elif.py') self._linter.check([elif_test]) msgs = self._linter.reporter.messages self.assertEqual(len(msgs), 2) for msg in msgs: self.assertEqual(msg.symbol, 'else-if-used') self.assertEqual(msg.msg, 'Consider using "elif" instead of "else if"') self.assertEqual(msgs[0].line, 9) self.assertEqual(msgs[1].line, 21) if __name__ == '__main__': unittest.main()
justathoughtor2/atomicApe
cygwin/lib/python2.7/site-packages/pylint/test/extensions/test_elseif_used.py
Python
gpl-3.0
1,359
0
# importing 'argv' from 'sys' library from sys import argv # assigning the variables 'script' (which is the name of the script), and 'filename' (which is the name of a file) to the command line argument array 'argv' script, filename = argv # printing string with formatter representing 'filename' print "We're going to erase %r." % filename # printing string print "If you don't want that, hit CTRL-C (^C)." print "If you do want that, hit RETURN." raw_input("?") print "Opening the file..." # assigning operator to variable. using 'open' to open the file. target = open(filename, 'w') # printing string print "Truncating the file. Goodbye!" # emptying file using truncate function target.truncate() print "Now I'm going to ask you for three lines." # assigning user data to variable and printing string line1 = raw_input("line 1: ") line2 = raw_input("line 2: ") line3 = raw_input("line 3: ") print "I'm going to write these to the file." # writing string from variable to the file target.write(line1) # writing new line to the file target.write('\n') target.write(line2) target.write('\n') target.write(line3) target.write('\n') print "And finally, we close it." # now we close the case! - '...It was Elementary, my dear Watson...' target.close()
SaileshPatel/Python-Exercises
ex16.py
Python
mit
1,257
0.001591
"""Test Google Smart Home.""" from unittest.mock import patch, Mock import pytest from homeassistant.core import State, EVENT_CALL_SERVICE from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, TEMP_CELSIUS, __version__ from homeassistant.setup import async_setup_component from homeassistant.components import camera from homeassistant.components.climate.const import ( ATTR_MIN_TEMP, ATTR_MAX_TEMP, HVAC_MODE_HEAT, ) from homeassistant.components.google_assistant import ( const, trait, smart_home as sh, EVENT_COMMAND_RECEIVED, EVENT_QUERY_RECEIVED, EVENT_SYNC_RECEIVED, ) from homeassistant.components.demo.binary_sensor import DemoBinarySensor from homeassistant.components.demo.cover import DemoCover from homeassistant.components.demo.light import DemoLight from homeassistant.components.demo.media_player import AbstractDemoPlayer from homeassistant.components.demo.switch import DemoSwitch from homeassistant.helpers import device_registry from tests.common import ( mock_device_registry, mock_registry, mock_area_registry, mock_coro, ) from . import BASIC_CONFIG, MockConfig REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" @pytest.fixture def registries(hass): """Registry mock setup.""" from types import SimpleNamespace ret = SimpleNamespace() ret.entity = mock_registry(hass) ret.device = mock_device_registry(hass) ret.area = mock_area_registry(hass) return ret async def test_sync_message(hass): """Test a sync message.""" light = DemoLight(None, "Demo Light", state=False, hs_color=(180, 75)) light.hass = hass light.entity_id = "light.demo_light" await light.async_update_ha_state() # This should not show up in the sync request hass.states.async_set("sensor.no_match", "something") # Excluded via config hass.states.async_set("light.not_expose", "on") config = MockConfig( should_expose=lambda state: state.entity_id != "light.not_expose", entity_config={ "light.demo_light": { const.CONF_ROOM_HINT: "Living Room", const.CONF_ALIASES: ["Hello", "World"], } }, ) events = [] hass.bus.async_listen(EVENT_SYNC_RECEIVED, events.append) result = await sh.async_handle_message( hass, config, "test-agent", {"requestId": REQ_ID, "inputs": [{"intent": "action.devices.SYNC"}]}, ) assert result == { "requestId": REQ_ID, "payload": { "agentUserId": "test-agent", "devices": [ { "id": "light.demo_light", "name": {"name": "Demo Light", "nicknames": ["Hello", "World"]}, "traits": [ trait.TRAIT_BRIGHTNESS, trait.TRAIT_ONOFF, trait.TRAIT_COLOR_SETTING, ], "type": const.TYPE_LIGHT, "willReportState": False, "attributes": { "colorModel": "hsv", "colorTemperatureRange": { "temperatureMinK": 2000, "temperatureMaxK": 6535, }, }, "roomHint": "Living Room", } ], }, } await hass.async_block_till_done() assert len(events) == 1 assert events[0].event_type == EVENT_SYNC_RECEIVED assert events[0].data == {"request_id": REQ_ID} # pylint: disable=redefined-outer-name async def test_sync_in_area(hass, registries): """Test a sync message where room hint comes from area.""" area = registries.area.async_create("Living Room") device = registries.device.async_get_or_create( config_entry_id="1234", connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, ) registries.device.async_update_device(device.id, area_id=area.id) entity = registries.entity.async_get_or_create( "light", "test", "1235", suggested_object_id="demo_light", device_id=device.id ) light = DemoLight(None, "Demo Light", state=False, hs_color=(180, 75)) light.hass = hass light.entity_id = entity.entity_id await light.async_update_ha_state() config = MockConfig(should_expose=lambda _: True, entity_config={}) events = [] hass.bus.async_listen(EVENT_SYNC_RECEIVED, events.append) result = await sh.async_handle_message( hass, config, "test-agent", {"requestId": REQ_ID, "inputs": [{"intent": "action.devices.SYNC"}]}, ) assert result == { "requestId": REQ_ID, "payload": { "agentUserId": "test-agent", "devices": [ { "id": "light.demo_light", "name": {"name": "Demo Light"}, "traits": [ trait.TRAIT_BRIGHTNESS, trait.TRAIT_ONOFF, trait.TRAIT_COLOR_SETTING, ], "type": const.TYPE_LIGHT, "willReportState": False, "attributes": { "colorModel": "hsv", "colorTemperatureRange": { "temperatureMinK": 2000, "temperatureMaxK": 6535, }, }, "roomHint": "Living Room", } ], }, } await hass.async_block_till_done() assert len(events) == 1 assert events[0].event_type == EVENT_SYNC_RECEIVED assert events[0].data == {"request_id": REQ_ID} async def test_query_message(hass): """Test a sync message.""" light = DemoLight(None, "Demo Light", state=False, hs_color=(180, 75)) light.hass = hass light.entity_id = "light.demo_light" await light.async_update_ha_state() light2 = DemoLight( None, "Another Light", state=True, hs_color=(180, 75), ct=400, brightness=78 ) light2.hass = hass light2.entity_id = "light.another_light" await light2.async_update_ha_state() events = [] hass.bus.async_listen(EVENT_QUERY_RECEIVED, events.append) result = await sh.async_handle_message( hass, BASIC_CONFIG, "test-agent", { "requestId": REQ_ID, "inputs": [ { "intent": "action.devices.QUERY", "payload": { "devices": [ {"id": "light.demo_light"}, {"id": "light.another_light"}, {"id": "light.non_existing"}, ] }, } ], }, ) assert result == { "requestId": REQ_ID, "payload": { "devices": { "light.non_existing": {"online": False}, "light.demo_light": {"on": False, "online": True, "brightness": 0}, "light.another_light": { "on": True, "online": True, "brightness": 30, "color": { "spectrumHsv": { "hue": 180, "saturation": 0.75, "value": 0.3058823529411765, }, "temperatureK": 2500, }, }, } }, } assert len(events) == 3 assert events[0].event_type == EVENT_QUERY_RECEIVED assert events[0].data == {"request_id": REQ_ID, "entity_id": "light.demo_light"} assert events[1].event_type == EVENT_QUERY_RECEIVED assert events[1].data == {"request_id": REQ_ID, "entity_id": "light.another_light"} assert events[2].event_type == EVENT_QUERY_RECEIVED assert events[2].data == {"request_id": REQ_ID, "entity_id": "light.non_existing"} async def test_execute(hass): """Test an execute command.""" await async_setup_component(hass, "light", {"light": {"platform": "demo"}}) await hass.services.async_call( "light", "turn_off", {"entity_id": "light.ceiling_lights"}, blocking=True ) events = [] hass.bus.async_listen(EVENT_COMMAND_RECEIVED, events.append) service_events = [] hass.bus.async_listen(EVENT_CALL_SERVICE, service_events.append) result = await sh.async_handle_message( hass, BASIC_CONFIG, None, { "requestId": REQ_ID, "inputs": [ { "intent": "action.devices.EXECUTE", "payload": { "commands": [ { "devices": [ {"id": "light.non_existing"}, {"id": "light.ceiling_lights"}, ], "execution": [ { "command": "action.devices.commands.OnOff", "params": {"on": True}, }, { "command": "action.devices.commands.BrightnessAbsolute", "params": {"brightness": 20}, }, ], } ] }, } ], }, ) assert result == { "requestId": REQ_ID, "payload": { "commands": [ { "ids": ["light.non_existing"], "status": "ERROR", "errorCode": "deviceOffline", }, { "ids": ["light.ceiling_lights"], "status": "SUCCESS", "states": { "on": True, "online": True, "brightness": 20, "color": { "spectrumHsv": { "hue": 56, "saturation": 0.86, "value": 0.2, }, "temperatureK": 2631, }, }, }, ] }, } assert len(events) == 4 assert events[0].event_type == EVENT_COMMAND_RECEIVED assert events[0].data == { "request_id": REQ_ID, "entity_id": "light.non_existing", "execution": { "command": "action.devices.commands.OnOff", "params": {"on": True}, }, } assert events[1].event_type == EVENT_COMMAND_RECEIVED assert events[1].data == { "request_id": REQ_ID, "entity_id": "light.non_existing", "execution": { "command": "action.devices.commands.BrightnessAbsolute", "params": {"brightness": 20}, }, } assert events[2].event_type == EVENT_COMMAND_RECEIVED assert events[2].data == { "request_id": REQ_ID, "entity_id": "light.ceiling_lights", "execution": { "command": "action.devices.commands.OnOff", "params": {"on": True}, }, } assert events[3].event_type == EVENT_COMMAND_RECEIVED assert events[3].data == { "request_id": REQ_ID, "entity_id": "light.ceiling_lights", "execution": { "command": "action.devices.commands.BrightnessAbsolute", "params": {"brightness": 20}, }, } assert len(service_events) == 2 assert service_events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"entity_id": "light.ceiling_lights"}, } assert service_events[0].context == events[2].context assert service_events[1].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 20, "entity_id": "light.ceiling_lights"}, } assert service_events[1].context == events[2].context assert service_events[1].context == events[3].context async def test_raising_error_trait(hass): """Test raising an error while executing a trait command.""" hass.states.async_set( "climate.bla", HVAC_MODE_HEAT, {ATTR_MIN_TEMP: 15, ATTR_MAX_TEMP: 30, ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) events = [] hass.bus.async_listen(EVENT_COMMAND_RECEIVED, events.append) await hass.async_block_till_done() result = await sh.async_handle_message( hass, BASIC_CONFIG, "test-agent", { "requestId": REQ_ID, "inputs": [ { "intent": "action.devices.EXECUTE", "payload": { "commands": [ { "devices": [{"id": "climate.bla"}], "execution": [ { "command": "action.devices.commands." "ThermostatTemperatureSetpoint", "params": {"thermostatTemperatureSetpoint": 10}, } ], } ] }, } ], }, ) assert result == { "requestId": REQ_ID, "payload": { "commands": [ { "ids": ["climate.bla"], "status": "ERROR", "errorCode": "valueOutOfRange", } ] }, } assert len(events) == 1 assert events[0].event_type == EVENT_COMMAND_RECEIVED assert events[0].data == { "request_id": REQ_ID, "entity_id": "climate.bla", "execution": { "command": "action.devices.commands.ThermostatTemperatureSetpoint", "params": {"thermostatTemperatureSetpoint": 10}, }, } async def test_serialize_input_boolean(hass): """Test serializing an input boolean entity.""" state = State("input_boolean.bla", "on") # pylint: disable=protected-access entity = sh.GoogleEntity(hass, BASIC_CONFIG, state) result = await entity.sync_serialize() assert result == { "id": "input_boolean.bla", "attributes": {}, "name": {"name": "bla"}, "traits": ["action.devices.traits.OnOff"], "type": "action.devices.types.SWITCH", "willReportState": False, } async def test_unavailable_state_doesnt_sync(hass): """Test that an unavailable entity does not sync over.""" light = DemoLight(None, "Demo Light", state=False) light.hass = hass light.entity_id = "light.demo_light" light._available = False # pylint: disable=protected-access await light.async_update_ha_state() result = await sh.async_handle_message( hass, BASIC_CONFIG, "test-agent", {"requestId": REQ_ID, "inputs": [{"intent": "action.devices.SYNC"}]}, ) assert result == { "requestId": REQ_ID, "payload": {"agentUserId": "test-agent", "devices": []}, } @pytest.mark.parametrize( "device_class,google_type", [ ("non_existing_class", "action.devices.types.SWITCH"), ("switch", "action.devices.types.SWITCH"), ("outlet", "action.devices.types.OUTLET"), ], ) async def test_device_class_switch(hass, device_class, google_type): """Test that a cover entity syncs to the correct device type.""" sensor = DemoSwitch( None, "Demo Sensor", state=False, icon="mdi:switch", assumed=False, device_class=device_class, ) sensor.hass = hass sensor.entity_id = "switch.demo_sensor" await sensor.async_update_ha_state() result = await sh.async_handle_message( hass, BASIC_CONFIG, "test-agent", {"requestId": REQ_ID, "inputs": [{"intent": "action.devices.SYNC"}]}, ) assert result == { "requestId": REQ_ID, "payload": { "agentUserId": "test-agent", "devices": [ { "attributes": {}, "id": "switch.demo_sensor", "name": {"name": "Demo Sensor"}, "traits": ["action.devices.traits.OnOff"], "type": google_type, "willReportState": False, } ], }, } @pytest.mark.parametrize( "device_class,google_type", [ ("door", "action.devices.types.DOOR"), ("garage_door", "action.devices.types.GARAGE"), ("lock", "action.devices.types.SENSOR"), ("opening", "action.devices.types.SENSOR"), ("window", "action.devices.types.SENSOR"), ], ) async def test_device_class_binary_sensor(hass, device_class, google_type): """Test that a binary entity syncs to the correct device type.""" sensor = DemoBinarySensor( None, "Demo Sensor", state=False, device_class=device_class ) sensor.hass = hass sensor.entity_id = "binary_sensor.demo_sensor" await sensor.async_update_ha_state() result = await sh.async_handle_message( hass, BASIC_CONFIG, "test-agent", {"requestId": REQ_ID, "inputs": [{"intent": "action.devices.SYNC"}]}, ) assert result == { "requestId": REQ_ID, "payload": { "agentUserId": "test-agent", "devices": [ { "attributes": {"queryOnlyOpenClose": True}, "id": "binary_sensor.demo_sensor", "name": {"name": "Demo Sensor"}, "traits": ["action.devices.traits.OpenClose"], "type": google_type, "willReportState": False, } ], }, } @pytest.mark.parametrize( "device_class,google_type", [ ("non_existing_class", "action.devices.types.BLINDS"), ("door", "action.devices.types.DOOR"), ("garage", "action.devices.types.GARAGE"), ], ) async def test_device_class_cover(hass, device_class, google_type): """Test that a binary entity syncs to the correct device type.""" sensor = DemoCover(None, hass, "Demo Sensor", device_class=device_class) sensor.hass = hass sensor.entity_id = "cover.demo_sensor" await sensor.async_update_ha_state() result = await sh.async_handle_message( hass, BASIC_CONFIG, "test-agent", {"requestId": REQ_ID, "inputs": [{"intent": "action.devices.SYNC"}]}, ) assert result == { "requestId": REQ_ID, "payload": { "agentUserId": "test-agent", "devices": [ { "attributes": {}, "id": "cover.demo_sensor", "name": {"name": "Demo Sensor"}, "traits": ["action.devices.traits.OpenClose"], "type": google_type, "willReportState": False, } ], }, } @pytest.mark.parametrize( "device_class,google_type", [ ("non_existing_class", "action.devices.types.SWITCH"), ("speaker", "action.devices.types.SPEAKER"), ("tv", "action.devices.types.TV"), ], ) async def test_device_media_player(hass, device_class, google_type): """Test that a binary entity syncs to the correct device type.""" sensor = AbstractDemoPlayer("Demo", device_class=device_class) sensor.hass = hass sensor.entity_id = "media_player.demo" await sensor.async_update_ha_state() result = await sh.async_handle_message( hass, BASIC_CONFIG, "test-agent", {"requestId": REQ_ID, "inputs": [{"intent": "action.devices.SYNC"}]}, ) assert result == { "requestId": REQ_ID, "payload": { "agentUserId": "test-agent", "devices": [ { "attributes": {}, "id": sensor.entity_id, "name": {"name": sensor.name}, "traits": ["action.devices.traits.OnOff"], "type": google_type, "willReportState": False, } ], }, } async def test_query_disconnect(hass): """Test a disconnect message.""" config = MockConfig(hass=hass) config.async_enable_report_state() assert config._unsub_report_state is not None with patch.object( config, "async_deactivate_report_state", side_effect=mock_coro ) as mock_deactivate: result = await sh.async_handle_message( hass, config, "test-agent", {"inputs": [{"intent": "action.devices.DISCONNECT"}], "requestId": REQ_ID}, ) assert result is None assert len(mock_deactivate.mock_calls) == 1 async def test_trait_execute_adding_query_data(hass): """Test a trait execute influencing query data.""" hass.config.api = Mock(base_url="http://1.1.1.1:8123") hass.states.async_set( "camera.office", "idle", {"supported_features": camera.SUPPORT_STREAM} ) with patch( "homeassistant.components.camera.async_request_stream", return_value=mock_coro("/api/streams/bla"), ): result = await sh.async_handle_message( hass, BASIC_CONFIG, None, { "requestId": REQ_ID, "inputs": [ { "intent": "action.devices.EXECUTE", "payload": { "commands": [ { "devices": [{"id": "camera.office"}], "execution": [ { "command": "action.devices.commands.GetCameraStream", "params": { "StreamToChromecast": True, "SupportedStreamProtocols": [ "progressive_mp4", "hls", "dash", "smooth_stream", ], }, } ], } ] }, } ], }, ) assert result == { "requestId": REQ_ID, "payload": { "commands": [ { "ids": ["camera.office"], "status": "SUCCESS", "states": { "online": True, "cameraStreamAccessUrl": "http://1.1.1.1:8123/api/streams/bla", }, } ] }, } async def test_identify(hass): """Test identify message.""" result = await sh.async_handle_message( hass, BASIC_CONFIG, None, { "requestId": REQ_ID, "inputs": [ { "intent": "action.devices.IDENTIFY", "payload": { "device": { "mdnsScanData": { "additionals": [ { "type": "TXT", "class": "IN", "name": "devhome._home-assistant._tcp.local", "ttl": 4500, "data": [ "version=0.101.0.dev0", "base_url=http://192.168.1.101:8123", "requires_api_password=true", ], } ] } }, "structureData": {}, }, } ], "devices": [ { "id": "light.ceiling_lights", "customData": { "httpPort": 8123, "httpSSL": False, "proxyDeviceId": BASIC_CONFIG.agent_user_id, "webhookId": "dde3b9800a905e886cc4d38e226a6e7e3f2a6993d2b9b9f63d13e42ee7de3219", }, } ], }, ) assert result == { "requestId": REQ_ID, "payload": { "device": { "id": BASIC_CONFIG.agent_user_id, "isLocalOnly": True, "isProxy": True, "deviceInfo": { "hwVersion": "UNKNOWN_HW_VERSION", "manufacturer": "Home Assistant", "model": "Home Assistant", "swVersion": __version__, }, } }, } async def test_reachable_devices(hass): """Test REACHABLE_DEVICES intent.""" # Matching passed in device. hass.states.async_set("light.ceiling_lights", "on") # Unsupported entity hass.states.async_set("not_supported.entity", "something") # Excluded via config hass.states.async_set("light.not_expose", "on") # Not passed in as google_id hass.states.async_set("light.not_mentioned", "on") config = MockConfig( should_expose=lambda state: state.entity_id != "light.not_expose" ) result = await sh.async_handle_message( hass, config, None, { "requestId": REQ_ID, "inputs": [ { "intent": "action.devices.REACHABLE_DEVICES", "payload": { "device": { "proxyDevice": { "id": "6a04f0f7-6125-4356-a846-861df7e01497", "customData": "{}", "proxyData": "{}", } }, "structureData": {}, }, } ], "devices": [ { "id": "light.ceiling_lights", "customData": { "httpPort": 8123, "httpSSL": False, "proxyDeviceId": BASIC_CONFIG.agent_user_id, "webhookId": "dde3b9800a905e886cc4d38e226a6e7e3f2a6993d2b9b9f63d13e42ee7de3219", }, }, { "id": "light.not_expose", "customData": { "httpPort": 8123, "httpSSL": False, "proxyDeviceId": BASIC_CONFIG.agent_user_id, "webhookId": "dde3b9800a905e886cc4d38e226a6e7e3f2a6993d2b9b9f63d13e42ee7de3219", }, }, {"id": BASIC_CONFIG.agent_user_id, "customData": {}}, ], }, ) assert result == { "requestId": REQ_ID, "payload": {"devices": [{"verificationId": "light.ceiling_lights"}]}, }
joopert/home-assistant
tests/components/google_assistant/test_smart_home.py
Python
apache-2.0
28,484
0.000807
# Copyright (c) Citrix Systems Inc. # All rights reserved. # # Redistribution and use in source and binary forms, # with or without modification, are permitted provided # that the following conditions are met: # # * Redistributions of source code must retain the above # copyright notice, this list of conditions and the # following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the # following disclaimer in the documentation and/or other # materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF # SUCH DAMAGE. import testbase import time from utils import * class VMOpsTestClass(testbase.OperationsTestClass): """Test class to determine proper operation of the most basic VM procedures""" # Deine the number of test VMs vm_count = 3 def _setup_vms(self, session): """Creates vm_count VMs on the master host's local SR""" host_ref = get_pool_master(session) net_ref = get_management_network(session) return deploy_common_droid_vms_on_hosts(session, [host_ref], [net_ref], self.vm_count, {net_ref: self.get_static_manager(net_ref)})[host_ref] def test_vm_power_control(self, session): """Creates a number of VMs and alterates the power state a predefined number of times""" vm_ref_list = self._setup_vms(session) for i in range(3): log.debug("Starting test run %d of %d" % (i + 1, range(3)[-1] + 1)) # Make certain the VMs are available for vm_ref in vm_ref_list: check_vm_ping_response( session, vm_ref, get_context_vm_mip(vm_ref)) # Shut down all VMs log.debug("Shutting down VMs: %s" % vm_ref_list) """Note that it is required we build the following 'task_list' in this manner, i.e 'x=vm_ref', so that we can get around a particular issue with Python variable bindings within loops""" task_list = [(lambda x=vm_ref: session.xenapi.Async.VM.clean_shutdown(x)) for vm_ref in vm_ref_list] res = run_xapi_async_tasks(session, task_list) # Verify the VMs report a 'Halted' power state log.debug("Verrifying VM power control operations") for vm_ref in vm_ref_list: if session.xenapi.VM.get_power_state(vm_ref) != 'Halted': raise Exception( "ERROR: Unexpected power state; VM did not shut down") log.debug("VM %s is shut down" % vm_ref) log.debug("Verrification complete: All VMs have shut down") # Boot all VMs log.debug("Booting VMs: %s" % vm_ref_list) host_ref = get_pool_master(session) task_list = [(lambda x=vm_ref: session.xenapi.Async.VM.start_on(x, host_ref, False, False)) for vm_ref in vm_ref_list] res = run_xapi_async_tasks(session, task_list) # Verify the VMs report a 'Running' power state log.debug("Verrifying VM power control operations") for vm_ref in vm_ref_list: if session.xenapi.VM.get_power_state(vm_ref) != 'Running': raise Exception( "ERROR: Unexpected power state; VM did not boot") log.debug("VM %s is running" % vm_ref) log.debug("Verrification complete: All VMs have booted") log.debug("Test run %d of %d has completed successfully" % (i + 1, range(3)[-1] + 1)) wait_for_vms_ips(session, vm_ref_list) rec = {} rec['info'] = ("VM power state tests completed successfully.") return rec def test_vm_reboot(self, session): """Creates a number of VMs and continuously reboots them a predefined number of times""" vm_ref_list = self._setup_vms(session) for i in range(3): log.debug("Starting test run %d of %d" % (i + 1, range(3)[-1] + 1)) # Make certain the VMs are available for vm_ref in vm_ref_list: check_vm_ping_response( session, vm_ref, get_context_vm_mip(vm_ref)) # Reboot all VMs log.debug("Rebooting VMs: %s" % vm_ref_list) task_list = [(lambda x=vm_ref: session.xenapi.Async.VM.clean_reboot(x)) for vm_ref in vm_ref_list] res = run_xapi_async_tasks(session, task_list) # Verify the VMs report a 'Running' power state log.debug("Verrifying VM power control operations") for vm_ref in vm_ref_list: if session.xenapi.VM.get_power_state(vm_ref) != 'Running': raise Exception("ERROR: Unexpected power state") log.debug("VM %s is running" % vm_ref) log.debug("Verrification complete: All VMs have rebooted") log.debug("Test run %d of %d has completed successfully" % (i + 1, range(3)[-1] + 1)) wait_for_vms_ips(session, vm_ref_list) rec = {} rec['info'] = ("VM reboot test completed successfully") return rec def test_vm_suspend(self, session): """Creates a number of VMs and verifies correct suspend/resume functionality through three test runs""" vm_ref_list = self._setup_vms(session) for i in range(3): log.debug("Starting test run %d of %d" % (i + 1, range(3)[-1] + 1)) # Make certain the VMs are available for vm_ref in vm_ref_list: check_vm_ping_response( session, vm_ref, get_context_vm_mip(vm_ref)) # Suspend all VMs log.debug("Suspending VMs: %s" % vm_ref_list) task_list = [(lambda x=vm_ref: session.xenapi.Async.VM.suspend(x)) for vm_ref in vm_ref_list] start = time.time() res = run_xapi_async_tasks(session, task_list, 1200) suspend_time = time.time() - start log.debug( "Suspend operation returned complete in %s seconds" % suspend_time) # Verify the VMs report a 'Suspended' power state log.debug("Verrifying VM power control operations") for vm_ref in vm_ref_list: if session.xenapi.VM.get_power_state(vm_ref) != 'Suspended': raise Exception("ERROR: VM %s did not suspend" % vm_ref) log.debug("VM %s is suspended" % vm_ref) log.debug("Verrification complete: All VMs have been suspended") # Resume all VMs log.debug("Resuming VMs: %s" % vm_ref_list) host_ref = get_pool_master(session) task_list = [(lambda x=vm_ref: session.xenapi.Async.VM.resume_on(x, host_ref, False, False)) for vm_ref in vm_ref_list] res = run_xapi_async_tasks(session, task_list) # Verify the VMs report a 'Running' power state log.debug("Verrifying VM power control operations") for vm_ref in vm_ref_list: if session.xenapi.VM.get_power_state(vm_ref) != 'Running': raise Exception("ERROR: VM %s did not resume" % vm_ref) log.debug("VM %s is running" % vm_ref) log.debug("Verrification complete: All VMs have resumed") log.debug("Test run %d of %d has completed successfully" % (i + 1, range(3)[-1] + 1)) wait_for_vms_ips(session, vm_ref_list) rec = {} rec['info'] = ("VM suspend tests completed successfully") return rec def test_vm_relocation(self, session): """Creates a number of VMs and 'relocates' them between the master host and the master host""" vm_ref_list = self._setup_vms(session) for i in range(3): log.debug("Starting test run %d of %d" % (i + 1, range(3)[-1] + 1)) # Make certain the VMs are available for vm_ref in vm_ref_list: check_vm_ping_response( session, vm_ref, get_context_vm_mip(vm_ref)) # Relocate all VMs log.debug("Relocating VMs: %s" % vm_ref_list) host_ref = get_pool_master(session) task_list = [(lambda x=vm_ref: session.xenapi.Async.VM.pool_migrate(x, host_ref, {'live': 'true'})) for vm_ref in vm_ref_list] res = run_xapi_async_tasks(session, task_list) # Verify the VMs report a 'Running' power state log.debug("Verrifying VM power control operations") for vm_ref in vm_ref_list: if session.xenapi.VM.get_power_state(vm_ref) != 'Running': raise Exception("ERROR: Unexpected power state") log.debug("VM %s is running" % vm_ref) log.debug( "Verrification complete: All VMs have been relocated and are running") log.debug("Test run %d of %d has completed successfully" % (i + 1, range(3)[-1] + 1)) wait_for_vms_ips(session, vm_ref_list) rec = {} rec['info'] = ("VM relocation tests completed successfully") return rec class CrashDumpTestClass(testbase.OperationsTestClass): """Test class to verify crash dump is created and collectable properly.""" def test_crashdump(self, session): """Check crashdump is created properly.""" log.debug("Running Crashdump test.") test_class = self.config['test_class'] test_method = self.config['test_method'] self.control = test_method.get_control() log.debug("control info: %s" % self.control) ret = {} if not self.control: crash_beg_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S") log.debug("prepare to crash dump %s" % crash_beg_time) self.control = "crash;%s" % crash_beg_time self.set_control(ret, self.control) self.set_test_name(ret, "%s.%s" % ( self.__class__.__name__, CrashDumpTestClass.test_crashdump.__name__)) # save test before crash test_class.update([ret]) test_class.save('test_run.conf') # crash master by default time.sleep(5) host_crash(self.session) # should not reach here log.debug("Current host did not reboot") if self.control.startswith("crash"): crash_beg_time = self.control.split(';')[1] if not crash_beg_time: raise Exception( "Reboot flag is not persistent and does not include crash info. Does host restarted by forced crashdump?") crash_beg_time = datetime( *(time.strptime(crash_beg_time, "%Y-%m-%d %H:%M:%S")[0:6])) log.debug("host crashed at %s" % str(crash_beg_time)) # Check new crashdump was created during host crash. crashdumps_all = retrieve_crashdumps(session) log.debug("available crashdumps: %s" % (str(crashdumps_all))) crashdumps_matching = [ cd for cd in crashdumps_all if crash_beg_time < cd['timestamp']] log.debug("matched crashdump(s): %s" % (str(crashdumps_matching))) if not len(crashdumps_matching) == 1: raise Exception("Host didn't create crashdump properly. number of new crashdumps: %d" % len( crashdumps_matching)) self.set_info(ret, 'An additional crashdump was detected.') return ret
xenserver/auto-cert-kit
autocertkit/operations_tests.py
Python
bsd-2-clause
13,353
0.001573
import os.path from enum import Enum class NamedEnum(Enum): def __init__(self, name): self.display_name = name @classmethod def get_names(cls): return [name for name, _ in cls.__members__.items()] class Role(NamedEnum): solver = ('Solver') tester = ('Tester') class Status(NamedEnum): queued = ('Queued') testing = ('Testing') failed = ('Failed') inactive = ('Inactive') rejected = ('Rejected') defeated = ('Defeated') active = ('Active') class Verdict(NamedEnum): solved = ('Solved') wrong_answer = ('Wrong Answer') time_limit_exceeded = ('Time Limit Exceeded') run_time_error = ('Run-Time Error') security_violation = ('Security Violation') judge_error = ('Judge Error') class ExtensionEnum(NamedEnum): def __init__(self, name, extension): self.extension = extension super(ExtensionEnum, self).__init__(name) class Language(ExtensionEnum): cpp = ('C++', 'cpp') python = ('Python', 'py') def detect_language(filename): extension = os.path.splitext(filename)[-1] if extension == '.cpp' or extension == '.cc': return Language.cpp if extension == '.py': return Language.python return None
jsannemo/programming-battle
battle/battle/api/__init__.py
Python
bsd-2-clause
1,247
0.005613
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function from __future__ import division import unittest import numpy as np import paddle.fluid.core as core from op_test import OpTest def adaptive_start_index(index, input_size, output_size): return int(np.floor(index * input_size / output_size)) def adaptive_end_index(index, input_size, output_size): return int(np.ceil((index + 1) * input_size / output_size)) def max_pool3D_forward_naive(x, ksize, strides, paddings, global_pool=0, ceil_mode=False, exclusive=True, adaptive=False): N, C, D, H, W = x.shape if global_pool == 1: ksize = [D, H, W] if adaptive: D_out, H_out, W_out = ksize else: D_out = (D - ksize[0] + 2 * paddings[0] + strides[0] - 1 ) // strides[0] + 1 if ceil_mode else ( H - ksize[0] + 2 * paddings[0]) // strides[0] + 1 H_out = (H - ksize[1] + 2 * paddings[1] + strides[1] - 1 ) // strides[1] + 1 if ceil_mode else ( W - ksize[1] + 2 * paddings[1]) // strides[1] + 1 W_out = (W - ksize[2] + 2 * paddings[2] + strides[2] - 1 ) // strides[2] + 1 if ceil_mode else ( W - ksize[2] + 2 * paddings[2]) // strides[2] + 1 out = np.zeros((N, C, D_out, H_out, W_out)) for k in range(D_out): if adaptive: d_start = adaptive_start_index(k, D, ksize[0]) d_end = adaptive_end_index(k, D, ksize[0]) else: d_start = np.max((k * strides[0] - paddings[0], 0)) d_end = np.min((k * strides[0] + ksize[0] - paddings[0], D)) for i in range(H_out): if adaptive: h_start = adaptive_start_index(i, H, ksize[1]) h_end = adaptive_end_index(i, H, ksize[1]) else: h_start = np.max((i * strides[1] - paddings[1], 0)) h_end = np.min((i * strides[1] + ksize[1] - paddings[1], H)) for j in range(W_out): if adaptive: w_start = adaptive_start_index(j, W, ksize[2]) w_end = adaptive_end_index(j, W, ksize[2]) else: w_start = np.max((j * strides[2] - paddings[2], 0)) w_end = np.min((j * strides[2] + ksize[2] - paddings[2], W)) x_masked = x[:, :, d_start:d_end, h_start:h_end, w_start:w_end] out[:, :, k, i, j] = np.max(x_masked, axis=(2, 3, 4)) return out def avg_pool3D_forward_naive(x, ksize, strides, paddings, global_pool=0, ceil_mode=False, exclusive=True, adaptive=False): N, C, D, H, W = x.shape if global_pool == 1: ksize = [D, H, W] if adaptive: D_out, H_out, W_out = ksize else: D_out = (D - ksize[0] + 2 * paddings[0] + strides[0] - 1 ) // strides[0] + 1 if ceil_mode else ( H - ksize[0] + 2 * paddings[0]) // strides[0] + 1 H_out = (H - ksize[1] + 2 * paddings[1] + strides[1] - 1 ) // strides[1] + 1 if ceil_mode else ( W - ksize[1] + 2 * paddings[1]) // strides[1] + 1 W_out = (W - ksize[2] + 2 * paddings[2] + strides[2] - 1 ) // strides[2] + 1 if ceil_mode else ( W - ksize[2] + 2 * paddings[2]) // strides[2] + 1 out = np.zeros((N, C, D_out, H_out, W_out)) for k in range(D_out): if adaptive: d_start = adaptive_start_index(k, D, ksize[0]) d_end = adaptive_end_index(k, D, ksize[0]) else: d_start = np.max((k * strides[0] - paddings[0], 0)) d_end = np.min((k * strides[0] + ksize[0] - paddings[0], D)) for i in range(H_out): if adaptive: h_start = adaptive_start_index(i, H, ksize[1]) h_end = adaptive_end_index(i, H, ksize[1]) else: h_start = np.max((i * strides[1] - paddings[1], 0)) h_end = np.min((i * strides[1] + ksize[1] - paddings[1], H)) for j in range(W_out): if adaptive: w_start = adaptive_start_index(j, W, ksize[2]) w_end = adaptive_end_index(j, W, ksize[2]) else: w_start = np.max((j * strides[2] - paddings[2], 0)) w_end = np.min((j * strides[2] + ksize[2] - paddings[2], W)) x_masked = x[:, :, d_start:d_end, h_start:h_end, w_start:w_end] field_size = (d_end - d_start) * (h_end - h_start) * (w_end - w_start) \ if (exclusive or adaptive) else ksize[0] * ksize[1] * ksize[2] out[:, :, k, i, j] = np.sum(x_masked, axis=(2, 3, 4)) / field_size return out class TestPool3d_Op(OpTest): def setUp(self): self.op_type = "pool3d" self.use_cudnn = False self.dtype = np.float32 self.init_test_case() self.init_global_pool() self.init_kernel_type() self.init_pool_type() self.init_ceil_mode() self.init_exclusive() self.init_adaptive() if self.global_pool: self.paddings = [0 for _ in range(len(self.paddings))] input = np.random.random(self.shape).astype(self.dtype) output = self.pool3D_forward_naive( input, self.ksize, self.strides, self.paddings, self.global_pool, self.ceil_mode, self.exclusive, self.adaptive).astype(self.dtype) self.inputs = {'X': OpTest.np_dtype_to_fluid_dtype(input)} self.attrs = { 'strides': self.strides, 'paddings': self.paddings, 'ksize': self.ksize, 'pooling_type': self.pool_type, 'global_pooling': self.global_pool, 'use_cudnn': self.use_cudnn, 'ceil_mode': self.ceil_mode, 'data_format': 'AnyLayout', # TODO(dzhwinter) : should be fix latter 'exclusive': self.exclusive, 'adaptive': self.adaptive } self.outputs = {'Out': output} def testcudnn(self): return core.is_compiled_with_cuda() and self.use_cudnn def test_check_output(self): if self.testcudnn(): place = core.CUDAPlace(0) self.check_output_with_place(place, atol=1e-5) else: self.check_output() def test_check_grad(self): if self.dtype == np.float16: return if self.testcudnn() and self.pool_type != "max": place = core.CUDAPlace(0) self.check_grad_with_place( place, set(['X']), 'Out', max_relative_error=0.07) elif self.pool_type != "max": self.check_grad(set(['X']), 'Out', max_relative_error=0.07) def init_test_case(self): self.shape = [2, 3, 5, 5, 5] self.ksize = [3, 3, 3] self.strides = [1, 1, 1] self.paddings = [0, 0, 0] def init_kernel_type(self): pass def init_pool_type(self): self.pool_type = "avg" self.pool3D_forward_naive = avg_pool3D_forward_naive def init_global_pool(self): self.global_pool = True def init_ceil_mode(self): self.ceil_mode = False def init_exclusive(self): self.exclusive = True def init_adaptive(self): self.adaptive = False class TestCase1(TestPool3d_Op): def init_test_case(self): self.shape = [2, 3, 7, 7, 7] self.ksize = [3, 3, 3] self.strides = [1, 1, 1] self.paddings = [0, 0, 0] def init_pool_type(self): self.pool_type = "avg" self.pool3D_forward_naive = avg_pool3D_forward_naive def init_global_pool(self): self.global_pool = False class TestCase2(TestPool3d_Op): def init_test_case(self): self.shape = [2, 3, 7, 7, 7] self.ksize = [3, 3, 3] self.strides = [1, 1, 1] self.paddings = [1, 1, 1] def init_pool_type(self): self.pool_type = "avg" self.pool3D_forward_naive = avg_pool3D_forward_naive def init_global_pool(self): self.global_pool = False class TestCase3(TestPool3d_Op): def init_pool_type(self): self.pool_type = "max" self.pool3D_forward_naive = max_pool3D_forward_naive class TestCase4(TestCase1): def init_pool_type(self): self.pool_type = "max" self.pool3D_forward_naive = max_pool3D_forward_naive class TestCase5(TestCase2): def init_pool_type(self): self.pool_type = "max" self.pool3D_forward_naive = max_pool3D_forward_naive #--------------------test pool3d-------------------- class TestCUDNNCase1(TestPool3d_Op): def init_kernel_type(self): self.use_cudnn = True class TestFP16CUDNNCase1(TestPool3d_Op): def init_kernel_type(self): self.use_cudnn = True self.dtype = np.float16 def test_check_output(self): if core.is_compiled_with_cuda(): place = core.CUDAPlace(0) if core.is_float16_supported(place): self.check_output_with_place(place, atol=1e-3) class TestCUDNNCase2(TestCase1): def init_kernel_type(self): self.use_cudnn = True class TestFP16CUDNNCase2(TestCase1): def init_kernel_type(self): self.use_cudnn = True self.dtype = np.float16 def test_check_output(self): if core.is_compiled_with_cuda(): place = core.CUDAPlace(0) if core.is_float16_supported(place): self.check_output_with_place(place, atol=1e-3) class TestCUDNNCase3(TestCase2): def init_kernel_type(self): self.use_cudnn = True class TestFP16CUDNNCase3(TestCase2): def init_kernel_type(self): self.use_cudnn = True self.dtype = np.float16 def test_check_output(self): if core.is_compiled_with_cuda(): place = core.CUDAPlace(0) if core.is_float16_supported(place): self.check_output_with_place(place, atol=1e-3) class TestCUDNNCase4(TestCase3): def init_kernel_type(self): self.use_cudnn = True class TestFP16CUDNNCase4(TestCase3): def init_kernel_type(self): self.use_cudnn = True self.dtype = np.float16 def test_check_output(self): if core.is_compiled_with_cuda(): place = core.CUDAPlace(0) if core.is_float16_supported(place): self.check_output_with_place(place, atol=1e-3) class TestCUDNNCase5(TestCase4): def init_kernel_type(self): self.use_cudnn = True class TestFP16CUDNNCase5(TestCase4): def init_kernel_type(self): self.use_cudnn = True self.dtype = np.float16 def test_check_output(self): if core.is_compiled_with_cuda(): place = core.CUDAPlace(0) if core.is_float16_supported(place): self.check_output_with_place(place, atol=1e-3) class TestCUDNNCase6(TestCase5): def init_kernel_type(self): self.use_cudnn = True class TestFP16CUDNNCase6(TestCase5): def init_kernel_type(self): self.use_cudnn = True self.dtype = np.float16 def test_check_output(self): if core.is_compiled_with_cuda(): place = core.CUDAPlace(0) if core.is_float16_supported(place): self.check_output_with_place(place, atol=1e-3) class TestCeilModeCase1(TestCUDNNCase1): def init_ceil_mode(self): self.ceil_mode = True class TestCeilModeCase2(TestCUDNNCase2): def init_ceil_mode(self): self.ceil_mode = True class TestCeilModeCase3(TestCase1): def init_ceil_mode(self): self.ceil_mode = True class TestCeilModeCase4(TestCase2): def init_ceil_mode(self): self.ceil_mode = True class TestAvgInclude(TestCase2): def init_exclusive(self): self.exclusive = False class TestCUDNNAvgInclude(TestCUDNNCase3): def init_exclusive(self): self.exclusive = False class TestAvgPoolAdaptive(TestCase1): def init_adaptive(self): self.adaptive = True if __name__ == '__main__': unittest.main()
baidu/Paddle
python/paddle/fluid/tests/unittests/test_pool3d_op.py
Python
apache-2.0
13,221
0.000454
################################################################################ # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ################################################################################ from abc import ABC, abstractmethod from pyflink.common import typeinfo, Duration, WatermarkStrategy, ConfigOptions from pyflink.common.serialization import JsonRowDeserializationSchema, \ JsonRowSerializationSchema, Encoder, SimpleStringSchema from pyflink.common.typeinfo import Types from pyflink.datastream import StreamExecutionEnvironment from pyflink.datastream.connectors import FlinkKafkaConsumer, FlinkKafkaProducer, JdbcSink, \ JdbcConnectionOptions, JdbcExecutionOptions, StreamingFileSink, \ OutputFileConfig, FileSource, StreamFormat, FileEnumeratorProvider, FileSplitAssignerProvider, \ NumberSequenceSource, RollingPolicy, FileSink, BucketAssigner, RMQSink, RMQSource, \ RMQConnectionConfig, PulsarSource, StartCursor, PulsarDeserializationSchema, StopCursor, \ SubscriptionType from pyflink.datastream.tests.test_util import DataStreamTestSinkFunction from pyflink.java_gateway import get_gateway from pyflink.testing.test_case_utils import PyFlinkTestCase, _load_specific_flink_module_jars, \ invoke_java_object_method from pyflink.util.java_utils import load_java_class, get_field_value class ConnectorTestBase(PyFlinkTestCase, ABC): @classmethod @abstractmethod def _get_jars_relative_path(cls): """ Return the relative path of connector, such as `/flink-connectors/flink-sql-connector-jdbc`. """ pass def setUp(self) -> None: self.env = StreamExecutionEnvironment.get_execution_environment() # Cache current ContextClassLoader, we will replace it with a temporary URLClassLoader to # load specific connector jars with given module path to do dependency isolation. And We # will change the ClassLoader back to the cached ContextClassLoader after the test case # finished. self._cxt_clz_loader = get_gateway().jvm.Thread.currentThread().getContextClassLoader() _load_specific_flink_module_jars(self._get_jars_relative_path()) def tearDown(self): # Change the ClassLoader back to the cached ContextClassLoader after the test case finished. if self._cxt_clz_loader is not None: get_gateway().jvm.Thread.currentThread().setContextClassLoader(self._cxt_clz_loader) class FlinkKafkaTest(ConnectorTestBase): @classmethod def _get_jars_relative_path(cls): return '/flink-connectors/flink-sql-connector-kafka' def setUp(self) -> None: super().setUp() self.env.set_parallelism(2) def test_kafka_connector_universal(self): self.kafka_connector_assertion(FlinkKafkaConsumer, FlinkKafkaProducer) def kafka_connector_assertion(self, flink_kafka_consumer_clz, flink_kafka_producer_clz): source_topic = 'test_source_topic' sink_topic = 'test_sink_topic' props = {'bootstrap.servers': 'localhost:9092', 'group.id': 'test_group'} type_info = Types.ROW([Types.INT(), Types.STRING()]) # Test for kafka consumer deserialization_schema = JsonRowDeserializationSchema.builder() \ .type_info(type_info=type_info).build() flink_kafka_consumer = flink_kafka_consumer_clz(source_topic, deserialization_schema, props) flink_kafka_consumer.set_start_from_earliest() flink_kafka_consumer.set_commit_offsets_on_checkpoints(True) j_properties = get_field_value(flink_kafka_consumer.get_java_function(), 'properties') self.assertEqual('localhost:9092', j_properties.getProperty('bootstrap.servers')) self.assertEqual('test_group', j_properties.getProperty('group.id')) self.assertTrue(get_field_value(flink_kafka_consumer.get_java_function(), 'enableCommitOnCheckpoints')) j_start_up_mode = get_field_value(flink_kafka_consumer.get_java_function(), 'startupMode') j_deserializer = get_field_value(flink_kafka_consumer.get_java_function(), 'deserializer') j_deserialize_type_info = invoke_java_object_method(j_deserializer, "getProducedType") deserialize_type_info = typeinfo._from_java_type(j_deserialize_type_info) self.assertTrue(deserialize_type_info == type_info) self.assertTrue(j_start_up_mode.equals(get_gateway().jvm .org.apache.flink.streaming.connectors .kafka.config.StartupMode.EARLIEST)) j_topic_desc = get_field_value(flink_kafka_consumer.get_java_function(), 'topicsDescriptor') j_topics = invoke_java_object_method(j_topic_desc, 'getFixedTopics') self.assertEqual(['test_source_topic'], list(j_topics)) # Test for kafka producer serialization_schema = JsonRowSerializationSchema.builder().with_type_info(type_info) \ .build() flink_kafka_producer = flink_kafka_producer_clz(sink_topic, serialization_schema, props) flink_kafka_producer.set_write_timestamp_to_kafka(False) j_producer_config = get_field_value(flink_kafka_producer.get_java_function(), 'producerConfig') self.assertEqual('localhost:9092', j_producer_config.getProperty('bootstrap.servers')) self.assertEqual('test_group', j_producer_config.getProperty('group.id')) self.assertFalse(get_field_value(flink_kafka_producer.get_java_function(), 'writeTimestampToKafka')) class FlinkJdbcSinkTest(ConnectorTestBase): @classmethod def _get_jars_relative_path(cls): return '/flink-connectors/flink-connector-jdbc' def test_jdbc_sink(self): ds = self.env.from_collection([('ab', 1), ('bdc', 2), ('cfgs', 3), ('deeefg', 4)], type_info=Types.ROW([Types.STRING(), Types.INT()])) jdbc_connection_options = JdbcConnectionOptions.JdbcConnectionOptionsBuilder()\ .with_driver_name('com.mysql.jdbc.Driver')\ .with_user_name('root')\ .with_password('password')\ .with_url('jdbc:mysql://server-name:server-port/database-name').build() jdbc_execution_options = JdbcExecutionOptions.builder().with_batch_interval_ms(2000)\ .with_batch_size(100).with_max_retries(5).build() jdbc_sink = JdbcSink.sink("insert into test table", ds.get_type(), jdbc_connection_options, jdbc_execution_options) ds.add_sink(jdbc_sink).name('jdbc sink') plan = eval(self.env.get_execution_plan()) self.assertEqual('Sink: jdbc sink', plan['nodes'][1]['type']) j_output_format = get_field_value(jdbc_sink.get_java_function(), 'outputFormat') connection_options = JdbcConnectionOptions( get_field_value(get_field_value(j_output_format, 'connectionProvider'), 'jdbcOptions')) self.assertEqual(jdbc_connection_options.get_db_url(), connection_options.get_db_url()) self.assertEqual(jdbc_connection_options.get_driver_name(), connection_options.get_driver_name()) self.assertEqual(jdbc_connection_options.get_password(), connection_options.get_password()) self.assertEqual(jdbc_connection_options.get_user_name(), connection_options.get_user_name()) exec_options = JdbcExecutionOptions(get_field_value(j_output_format, 'executionOptions')) self.assertEqual(jdbc_execution_options.get_batch_interval_ms(), exec_options.get_batch_interval_ms()) self.assertEqual(jdbc_execution_options.get_batch_size(), exec_options.get_batch_size()) self.assertEqual(jdbc_execution_options.get_max_retries(), exec_options.get_max_retries()) class FlinkPulsarTest(ConnectorTestBase): @classmethod def _get_jars_relative_path(cls): return '/flink-connectors/flink-sql-connector-pulsar' def test_pulsar_source(self): test_option = ConfigOptions.key('pulsar.source.enableAutoAcknowledgeMessage') \ .boolean_type().no_default_value() pulsar_source = PulsarSource.builder() \ .set_service_url('pulsar://localhost:6650') \ .set_admin_url('http://localhost:8080') \ .set_topics('ada') \ .set_start_cursor(StartCursor.earliest()) \ .set_unbounded_stop_cursor(StopCursor.never()) \ .set_bounded_stop_cursor(StopCursor.at_event_time(22)) \ .set_subscription_name('ff') \ .set_subscription_type(SubscriptionType.Exclusive) \ .set_deserialization_schema( PulsarDeserializationSchema.flink_type_info(Types.STRING(), None)) \ .set_deserialization_schema( PulsarDeserializationSchema.flink_schema(SimpleStringSchema())) \ .set_config(test_option, True) \ .set_config_with_dict({'pulsar.source.autoCommitCursorInterval': '1000'}) \ .build() ds = self.env.from_source(source=pulsar_source, watermark_strategy=WatermarkStrategy.for_monotonous_timestamps(), source_name="pulsar source") ds.print() plan = eval(self.env.get_execution_plan()) self.assertEqual('Source: pulsar source', plan['nodes'][0]['type']) configuration = get_field_value(pulsar_source.get_java_function(), "sourceConfiguration") self.assertEqual( configuration.getString( ConfigOptions.key('pulsar.client.serviceUrl') .string_type() .no_default_value()._j_config_option), 'pulsar://localhost:6650') self.assertEqual( configuration.getString( ConfigOptions.key('pulsar.admin.adminUrl') .string_type() .no_default_value()._j_config_option), 'http://localhost:8080') self.assertEqual( configuration.getString( ConfigOptions.key('pulsar.consumer.subscriptionName') .string_type() .no_default_value()._j_config_option), 'ff') self.assertEqual( configuration.getString( ConfigOptions.key('pulsar.consumer.subscriptionType') .string_type() .no_default_value()._j_config_option), SubscriptionType.Exclusive.name) self.assertEqual( configuration.getBoolean( test_option._j_config_option), True) self.assertEqual( configuration.getLong( ConfigOptions.key('pulsar.source.autoCommitCursorInterval') .long_type() .no_default_value()._j_config_option), 1000) def test_set_topics_with_list(self): PulsarSource.builder() \ .set_service_url('pulsar://localhost:6650') \ .set_admin_url('http://localhost:8080') \ .set_topics(['ada', 'beta']) \ .set_subscription_name('ff') \ .set_deserialization_schema( PulsarDeserializationSchema.flink_schema(SimpleStringSchema())) \ .build() def test_set_topics_pattern(self): PulsarSource.builder() \ .set_service_url('pulsar://localhost:6650') \ .set_admin_url('http://localhost:8080') \ .set_topics_pattern('ada.*') \ .set_subscription_name('ff') \ .set_deserialization_schema( PulsarDeserializationSchema.flink_schema(SimpleStringSchema())) \ .build() class RMQTest(ConnectorTestBase): @classmethod def _get_jars_relative_path(cls): return '/flink-connectors/flink-sql-connector-rabbitmq' def test_rabbitmq_connectors(self): connection_config = RMQConnectionConfig.Builder() \ .set_host('localhost') \ .set_port(5672) \ .set_virtual_host('/') \ .set_user_name('guest') \ .set_password('guest') \ .build() type_info = Types.ROW([Types.INT(), Types.STRING()]) deserialization_schema = JsonRowDeserializationSchema.builder() \ .type_info(type_info=type_info).build() rmq_source = RMQSource( connection_config, 'source_queue', True, deserialization_schema) self.assertEqual( get_field_value(rmq_source.get_java_function(), 'queueName'), 'source_queue') self.assertTrue(get_field_value(rmq_source.get_java_function(), 'usesCorrelationId')) serialization_schema = JsonRowSerializationSchema.builder().with_type_info(type_info) \ .build() rmq_sink = RMQSink(connection_config, 'sink_queue', serialization_schema) self.assertEqual( get_field_value(rmq_sink.get_java_function(), 'queueName'), 'sink_queue') class ConnectorTests(PyFlinkTestCase): def setUp(self) -> None: self.env = StreamExecutionEnvironment.get_execution_environment() self.test_sink = DataStreamTestSinkFunction() _load_specific_flink_module_jars('/flink-connectors/flink-connector-files') _load_specific_flink_module_jars('/flink-connectors/flink-connector-sink-common') def tearDown(self) -> None: self.test_sink.clear() def test_stream_file_sink(self): self.env.set_parallelism(2) ds = self.env.from_collection([('ab', 1), ('bdc', 2), ('cfgs', 3), ('deeefg', 4)], type_info=Types.ROW([Types.STRING(), Types.INT()])) ds.map( lambda a: a[0], Types.STRING()).add_sink( StreamingFileSink.for_row_format(self.tempdir, Encoder.simple_string_encoder()) .with_rolling_policy( RollingPolicy.default_rolling_policy( part_size=1024 * 1024 * 1024, rollover_interval=15 * 60 * 1000, inactivity_interval=5 * 60 * 1000)) .with_output_file_config( OutputFileConfig.OutputFileConfigBuilder() .with_part_prefix("prefix") .with_part_suffix("suffix").build()).build()) self.env.execute("test_streaming_file_sink") results = [] import os for root, dirs, files in os.walk(self.tempdir, topdown=True): for file in files: self.assertTrue(file.startswith('.prefix')) self.assertTrue('suffix' in file) path = root + "/" + file with open(path) as infile: for line in infile: results.append(line) expected = ['deeefg\n', 'bdc\n', 'ab\n', 'cfgs\n'] results.sort() expected.sort() self.assertEqual(expected, results) def test_file_source(self): stream_format = StreamFormat.text_line_format() paths = ["/tmp/1.txt", "/tmp/2.txt"] file_source_builder = FileSource.for_record_stream_format(stream_format, *paths) file_source = file_source_builder\ .monitor_continuously(Duration.of_days(1)) \ .set_file_enumerator(FileEnumeratorProvider.default_splittable_file_enumerator()) \ .set_split_assigner(FileSplitAssignerProvider.locality_aware_split_assigner()) \ .build() continuous_setting = file_source.get_java_function().getContinuousEnumerationSettings() self.assertIsNotNone(continuous_setting) self.assertEqual(Duration.of_days(1), Duration(continuous_setting.getDiscoveryInterval())) input_paths_field = \ load_java_class("org.apache.flink.connector.file.src.AbstractFileSource"). \ getDeclaredField("inputPaths") input_paths_field.setAccessible(True) input_paths = input_paths_field.get(file_source.get_java_function()) self.assertEqual(len(input_paths), len(paths)) self.assertEqual(str(input_paths[0]), paths[0]) self.assertEqual(str(input_paths[1]), paths[1]) def test_file_sink(self): base_path = "/tmp/1.txt" encoder = Encoder.simple_string_encoder() file_sink_builder = FileSink.for_row_format(base_path, encoder) file_sink = file_sink_builder\ .with_bucket_check_interval(1000) \ .with_bucket_assigner(BucketAssigner.base_path_bucket_assigner()) \ .with_rolling_policy(RollingPolicy.on_checkpoint_rolling_policy()) \ .with_output_file_config( OutputFileConfig.builder().with_part_prefix("pre").with_part_suffix("suf").build())\ .build() buckets_builder_field = \ load_java_class("org.apache.flink.connector.file.sink.FileSink"). \ getDeclaredField("bucketsBuilder") buckets_builder_field.setAccessible(True) buckets_builder = buckets_builder_field.get(file_sink.get_java_function()) self.assertEqual("DefaultRowFormatBuilder", buckets_builder.getClass().getSimpleName()) row_format_builder_clz = load_java_class( "org.apache.flink.connector.file.sink.FileSink$RowFormatBuilder") encoder_field = row_format_builder_clz.getDeclaredField("encoder") encoder_field.setAccessible(True) self.assertEqual("SimpleStringEncoder", encoder_field.get(buckets_builder).getClass().getSimpleName()) interval_field = row_format_builder_clz.getDeclaredField("bucketCheckInterval") interval_field.setAccessible(True) self.assertEqual(1000, interval_field.get(buckets_builder)) bucket_assigner_field = row_format_builder_clz.getDeclaredField("bucketAssigner") bucket_assigner_field.setAccessible(True) self.assertEqual("BasePathBucketAssigner", bucket_assigner_field.get(buckets_builder).getClass().getSimpleName()) rolling_policy_field = row_format_builder_clz.getDeclaredField("rollingPolicy") rolling_policy_field.setAccessible(True) self.assertEqual("OnCheckpointRollingPolicy", rolling_policy_field.get(buckets_builder).getClass().getSimpleName()) output_file_config_field = row_format_builder_clz.getDeclaredField("outputFileConfig") output_file_config_field.setAccessible(True) output_file_config = output_file_config_field.get(buckets_builder) self.assertEqual("pre", output_file_config.getPartPrefix()) self.assertEqual("suf", output_file_config.getPartSuffix()) def test_seq_source(self): seq_source = NumberSequenceSource(1, 10) seq_source_clz = load_java_class( "org.apache.flink.api.connector.source.lib.NumberSequenceSource") from_field = seq_source_clz.getDeclaredField("from") from_field.setAccessible(True) self.assertEqual(1, from_field.get(seq_source.get_java_function())) to_field = seq_source_clz.getDeclaredField("to") to_field.setAccessible(True) self.assertEqual(10, to_field.get(seq_source.get_java_function()))
apache/flink
flink-python/pyflink/datastream/tests/test_connectors.py
Python
apache-2.0
20,161
0.004018
#### NOTICE: THIS FILE IS AUTOGENERATED #### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY #### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES from swgpy.object import * def create(kernel): result = Tangible() result.template = "object/tangible/wearables/armor/mandalorian/shared_armor_mandalorian_helmet.iff" result.attribute_template_id = 0 result.stfName("wearables_name","armor_mandalorian_helmet") #### BEGIN MODIFICATIONS #### #### END MODIFICATIONS #### return result
obi-two/Rebelion
data/scripts/templates/object/tangible/wearables/armor/mandalorian/shared_armor_mandalorian_helmet.py
Python
mit
494
0.044534
import os import operator import sys from collections import defaultdict import matplotlib.pyplot as pp import CommonConf BASE = 'ksp' algs = ['mcf', 'raeke', 'edksp', 'vlb', 'ksp', 'ecmp'] def setupMPPDefaults(): pp.rcParams['font.size'] = 66 pp.rcParams['mathtext.default'] = 'regular' pp.rcParams['ytick.labelsize'] = 62 pp.rcParams['xtick.labelsize'] = 62 pp.rcParams['legend.fontsize'] = 62 pp.rcParams['lines.markersize'] = 12 pp.rcParams['axes.titlesize'] = 60 pp.rcParams['axes.labelsize'] = 60 pp.rcParams['axes.edgecolor'] = 'grey' pp.rcParams['axes.linewidth'] = 3.0 pp.rcParams['axes.grid'] = True pp.rcParams['grid.alpha'] = 0.4 pp.rcParams['grid.color'] = 'grey' pp.rcParams['legend.frameon'] = True pp.rcParams['legend.framealpha'] = 0.4 pp.rcParams['legend.numpoints'] = 1 pp.rcParams['legend.scatterpoints'] = 1 def parse_rtt_file(rtt_file): rtts = dict() with open(rtt_file) as f: for l in f.readlines(): tokens = l.split() rtts[tokens[0]] = float(tokens[1]) return rtts def parse_path_file(paths_file): src,dst = '','' paths = dict() with open(paths_file) as f: for l in f.readlines(): if "->" in l: src = l.split()[0] dst = l.split()[2] paths[(src,dst)] = dict() else: if len(l.strip()) == 0: continue path = tuple(l.split('@')[0].strip()[1:-1].split(', ')) weight = l.split('@')[1].strip() paths[(src,dst)][path] = float(weight) return paths def plot_latency(rtt_file, paths_dir, out_file, rtt_hop): paths_file = dict() for alg in algs: paths_file[alg] = paths_dir + '/' + alg + '_0' rtts = parse_rtt_file(rtt_file) alg_sd_wtrtt = defaultdict(dict) # alg -> sd -> weighted_rtt for alg in algs: paths = parse_path_file(paths_file[alg]) for sd,sdpaths in paths.iteritems(): weighted_rtts = 0 for path,weight in sdpaths.iteritems(): path_rtt = 0 if rtt_hop == 'rtt': for link in path: path_rtt += rtts.get(link, 0) else: path_rtt = len(path) - 2 weighted_rtts += weight * path_rtt alg_sd_wtrtt[alg][sd] = weighted_rtts # sort hosts by ecmp weighted RTT sorted_sd = sorted(alg_sd_wtrtt['ecmp'].items(), key=operator.itemgetter(1)) sorted_sd = [x[0] for x in sorted_sd] alg_sorted_lats = dict() # alg -> list of latencies sorted by ecmp distance for alg in algs: if alg == BASE: alg_sorted_lats[alg] = [alg_sd_wtrtt[BASE][sd] for sd in sorted_sd] else: alg_sorted_lats[alg] = [(alg_sd_wtrtt[alg][sd]-0*alg_sd_wtrtt[BASE][sd]) for sd in sorted_sd] setupMPPDefaults() colors = CommonConf.getLineColorsDict() fmts = CommonConf.getLineFormatsDict() linewidth = CommonConf.getLineMarkersLWDict() mrkrs = CommonConf.getLineMarkersDict() mrkrsize = CommonConf.getLineMarkersSizeDict() for solver in algs: fig = pp.figure(figsize=(12,6)) ax = fig.add_subplot(111) #if solver == BASE: # continue ys = alg_sorted_lats[solver] print solver if solver == 'ecmp' or solver == 'edksp': markeredgecolor = colors[solver] else: markeredgecolor = 'None' ax.plot( #alg_sorted_lats[BASE], ys, alpha=0.5, color=colors[solver], label=CommonConf.gen_label(solver), linewidth=linewidth[solver], linestyle='None', markevery=1, markeredgecolor=markeredgecolor, markeredgewidth=mrkrsize[solver]/4, marker=mrkrs[solver], markersize=mrkrsize[solver]*1.5) ax.set_xlabel("Node Pairs") if rtt_hop == 'rtt': ax.set_ylabel("RTT (ms)") else: ax.set_ylabel("hop count") ax.spines['right'].set_visible(False) ax.spines['top'].set_visible(False) ax.yaxis.set_ticks_position('left') ax.xaxis.set_ticks_position('bottom') ax.xaxis.set_ticks([]) # handles, labels = ax.get_legend_handles_labels() # # or sort them by labels # hl = sorted(zip(handles, labels), key=operator.itemgetter(1)) # hl = hl[1:5]+[hl[0]]+[hl[5]] # Put racke in correct position # handles2, labels2 = zip(*hl) # ax.legend(handles2, labels2, loc='best', borderaxespad=0., fancybox=True, ncol=3) pp.locator_params(nbins=4) pp.tight_layout(pad=0) pp.savefig(out_file.split('.')[0]+solver+'.pdf') if __name__ == "__main__": if len(sys.argv) < 5: print "Usage: " + sys.argv[0] + " rtt_file paths_dir out_file rtt/hop" rtt_file = sys.argv[1] paths_dir = sys.argv[2] out_file = sys.argv[3] rtt_hop = sys.argv[4] plot_latency(rtt_file, paths_dir, out_file, rtt_hop)
merlin-lang/kulfi
simulate/viz/LatencyDiffPoints.py
Python
lgpl-3.0
5,142
0.004084
from flask import Blueprint, render_template from ...middlewares import auth_manager from .middlewares import s from ...container import container account = Blueprint('account', __name__, url_prefix='/account') def view(): account = container.account return render_template('accounts/view.html', account=account) def update(): account = container.account return render_template('accounts/view.html', account=account) account.add_url_rule( '', 'view', s(view), methods=['GET'] ) account.add_url_rule( '/edit', 'update', s(auth_manager), methods=['GET', 'POST'] )
masom/doorbot-api-python
doorbot/views/dashboard/account.py
Python
mit
610
0.001639
import pytest from cfme.cloud.provider.azure import AzureProvider from cfme.cloud.provider.ec2 import EC2Provider from cfme.cloud.provider.openstack import OpenStackProvider from cfme.networks.provider import NetworkProviderCollection from cfme.utils import testgen from cfme.utils.appliance.implementations.ui import navigate_to pytest_generate_tests = testgen.generate( classes=[EC2Provider, AzureProvider, OpenStackProvider], scope='module') pytestmark = pytest.mark.usefixtures('setup_provider') @pytest.mark.tier(1) def test_sdn_crud(provider, appliance): """ Test for functional addition of network manager with cloud provider and functional references to components on detail page Prerequisites: Cloud provider in cfme """ view = navigate_to(provider, 'Details') net_prov_name = view.contents.relationships.get_text_of("Network Manager") collection = NetworkProviderCollection(appliance) network_provider = collection.instantiate(name=net_prov_name) view = navigate_to(network_provider, 'Details') parent_name = view.entities.relationships.get_text_of("Parent Cloud Provider") assert parent_name == provider.name testing_list = ["Cloud Networks", "Cloud Subnets", "Network Routers", "Security Groups", "Floating IPs", "Network Ports", "Load Balancers"] for testing_name in testing_list: view = navigate_to(network_provider, 'Details') view.entities.relationships.click_at(testing_name) provider.delete_if_exists(cancel=False) provider.wait_for_delete() assert not network_provider.exists
okolisny/integration_tests
cfme/tests/networks/test_sdn_crud.py
Python
gpl-2.0
1,613
0.00124
# # gPrime - A web-based genealogy program # # Copyright (C) 2002-2006 Donald N. Allingham # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # #------------------------------------------------------------------------- # # Standard Python modules # #------------------------------------------------------------------------- from ....const import LOCALE as glocale _ = glocale.translation.gettext #------------------------------------------------------------------------- # # Gprime modules # #------------------------------------------------------------------------- from .._hasnotesubstrbase import HasNoteSubstrBase #------------------------------------------------------------------------- # "Media having notes that contain a substring" #------------------------------------------------------------------------- class HasNoteMatchingSubstringOf(HasNoteSubstrBase): """Media having notes containing <substring>""" name = _('Media objects having notes containing <substring>') description = _("Matches media objects whose notes contain text " "matching a substring")
sam-m888/gprime
gprime/filters/rules/media/_hasnotematchingsubstringof.py
Python
gpl-2.0
1,769
0.005653
from datetime import datetime, tzinfo try: import pytz except ImportError: pytz = None from django.template import Node from django.template import TemplateSyntaxError, Library from django.utils import timezone register = Library() # HACK: datetime is an old-style class, create a new-style equivalent # so we can define additional attributes. class datetimeobject(datetime, object): pass # Template filters @register.filter def localtime(value): """ Converts a datetime to local time in the active time zone. This only makes sense within a {% localtime off %} block. """ return do_timezone(value, timezone.get_current_timezone()) @register.filter def utc(value): """ Converts a datetime to UTC. """ return do_timezone(value, timezone.utc) @register.filter('timezone') def do_timezone(value, arg): """ Converts a datetime to local time in a given time zone. The argument must be an instance of a tzinfo subclass or a time zone name. If it is a time zone name, pytz is required. Naive datetimes are assumed to be in local time in the default time zone. """ if not isinstance(value, datetime): return '' # Obtain a timezone-aware datetime try: if timezone.is_naive(value): default_timezone = timezone.get_default_timezone() value = timezone.make_aware(value, default_timezone) # Filters must never raise exceptions, and pytz' exceptions inherit # Exception directly, not a specific subclass. So catch everything. except Exception: return '' # Obtain a tzinfo instance if isinstance(arg, tzinfo): tz = arg elif isinstance(arg, basestring) and pytz is not None: try: tz = pytz.timezone(arg) except pytz.UnknownTimeZoneError: return '' else: return '' result = timezone.localtime(value, tz) # HACK: the convert_to_local_time flag will prevent # automatic conversion of the value to local time. result = datetimeobject(result.year, result.month, result.day, result.hour, result.minute, result.second, result.microsecond, result.tzinfo) result.convert_to_local_time = False return result # Template tags class LocalTimeNode(Node): """ Template node class used by ``localtime_tag``. """ def __init__(self, nodelist, use_tz): self.nodelist = nodelist self.use_tz = use_tz def render(self, context): old_setting = context.use_tz context.use_tz = self.use_tz output = self.nodelist.render(context) context.use_tz = old_setting return output class TimezoneNode(Node): """ Template node class used by ``timezone_tag``. """ def __init__(self, nodelist, tz): self.nodelist = nodelist self.tz = tz def render(self, context): with timezone.override(self.tz.resolve(context)): output = self.nodelist.render(context) return output class GetCurrentTimezoneNode(Node): """ Template node class used by ``get_current_timezone_tag``. """ def __init__(self, variable): self.variable = variable def render(self, context): context[self.variable] = timezone.get_current_timezone_name() return '' @register.tag('localtime') def localtime_tag(parser, token): """ Forces or prevents conversion of datetime objects to local time, regardless of the value of ``settings.USE_TZ``. Sample usage:: {% localtime off %}{{ value_in_utc }}{% endlocaltime %} """ bits = token.split_contents() if len(bits) == 1: use_tz = True elif len(bits) > 2 or bits[1] not in ('on', 'off'): raise TemplateSyntaxError("%r argument should be 'on' or 'off'" % bits[0]) else: use_tz = bits[1] == 'on' nodelist = parser.parse(('endlocaltime',)) parser.delete_first_token() return LocalTimeNode(nodelist, use_tz) @register.tag('timezone') def timezone_tag(parser, token): """ Enables a given time zone just for this block. The ``timezone`` argument must be an instance of a ``tzinfo`` subclass, a time zone name, or ``None``. If is it a time zone name, pytz is required. If it is ``None``, the default time zone is used within the block. Sample usage:: {% timezone "Europe/Paris" %} It is {{ now }} in Paris. {% endtimezone %} """ bits = token.split_contents() if len(bits) != 2: raise TemplateSyntaxError("'%s' takes one argument (timezone)" % bits[0]) tz = parser.compile_filter(bits[1]) nodelist = parser.parse(('endtimezone',)) parser.delete_first_token() return TimezoneNode(nodelist, tz) @register.tag("get_current_timezone") def get_current_timezone_tag(parser, token): """ Stores the name of the current time zone in the context. Usage:: {% get_current_timezone as TIME_ZONE %} This will fetch the currently active time zone and put its name into the ``TIME_ZONE`` context variable. """ args = token.contents.split() if len(args) != 3 or args[1] != 'as': raise TemplateSyntaxError("'get_current_timezone' requires " "'as variable' (got %r)" % args) return GetCurrentTimezoneNode(args[2])
adrianholovaty/django
django/templatetags/tz.py
Python
bsd-3-clause
5,488
0
#### NOTICE: THIS FILE IS AUTOGENERATED #### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY #### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES from swgpy.object import * def create(kernel): result = Tangible() result.template = "object/tangible/wearables/armor/stormtrooper/shared_armor_stormtrooper_chest_plate.iff" result.attribute_template_id = 0 result.stfName("wearables_name","armor_stormtrooper_chest_plate") #### BEGIN MODIFICATIONS #### #### END MODIFICATIONS #### return result
anhstudios/swganh
data/scripts/templates/object/tangible/wearables/armor/stormtrooper/shared_armor_stormtrooper_chest_plate.py
Python
mit
507
0.043393
#!/usr/bin/env python3 # Copyright 2020 The Pigweed Authors # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. """Tests for presubmit tools.""" import unittest from pw_presubmit import presubmit def _fake_function_1(_): """Fake presubmit function.""" def _fake_function_2(_): """Fake presubmit function.""" class ProgramsTest(unittest.TestCase): """Tests the presubmit Programs abstraction.""" def setUp(self): self._programs = presubmit.Programs( first=[_fake_function_1, (), [(_fake_function_2, )]], second=[_fake_function_2], ) def test_empty(self): self.assertEqual({}, presubmit.Programs()) def test_access_present_members(self): self.assertEqual('first', self._programs['first'].name) self.assertEqual((_fake_function_1, _fake_function_2), tuple(self._programs['first'])) self.assertEqual('second', self._programs['second'].name) self.assertEqual((_fake_function_2, ), tuple(self._programs['second'])) def test_access_missing_member(self): with self.assertRaises(KeyError): _ = self._programs['not_there'] def test_all_steps(self): self.assertEqual( { '_fake_function_1': _fake_function_1, '_fake_function_2': _fake_function_2, }, self._programs.all_steps()) if __name__ == '__main__': unittest.main()
google/pigweed
pw_presubmit/py/presubmit_test.py
Python
apache-2.0
1,944
0
# coding: utf-8 # # Copyright 2012 NAMD-EMAP-FGV # # This file is part of PyPLN. You can get more information at: http://pypln.org/. # # PyPLN is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # PyPLN is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with PyPLN. If not, see <http://www.gnu.org/licenses/>. import nltk from pypln.backend.workers.bigrams import Bigrams from utils import TaskTest bigram_measures = nltk.collocations.BigramAssocMeasures() class TestBigramWorker(TaskTest): def test_bigrams_should_return_correct_score(self): # We need this list comprehension because we need to save the word list # in mongo (thus, it needs to be json serializable). Also, a list is # what will be available to the worker in real situations. tokens = [w for w in nltk.corpus.genesis.words('english-web.txt')] doc_id = self.collection.insert({'tokens': tokens}, w=1) Bigrams().delay(doc_id) refreshed_document = self.collection.find_one({'_id': doc_id}) bigram_rank = refreshed_document['bigram_rank'] result = bigram_rank[0][1][0] # This is the value of the chi_sq measure for this bigram in this # colocation expected_chi_sq = 95.59393417173634 self.assertEqual(result, expected_chi_sq) def test_bigrams_could_contain_dollar_signs_and_dots(self): tokens = ['$', '.'] doc_id = self.collection.insert({'tokens': tokens}, w=1) Bigrams().delay(doc_id) refreshed_document = self.collection.find_one({'_id': doc_id}) bigram_rank = refreshed_document['bigram_rank'] result = bigram_rank[0][1][0] # 2.0 is the value of the chi_sq measure for this bigram in this # colocation expected_chi_sq = 2.0 self.assertEqual(result, expected_chi_sq)
NAMD/pypln.backend
tests/test_worker_bigrams.py
Python
gpl-3.0
2,288
0.000874
#{# # This file is protected by Copyright. Please refer to the COPYRIGHT file # distributed with this source distribution. # # This file is part of REDHAWK core. # # REDHAWK core is free software: you can redistribute it and/or modify it under # the terms of the GNU Lesser General Public License as published by the Free # Software Foundation, either version 3 of the License, or (at your option) any # later version. # # REDHAWK core is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # # You should have received a copy of the GNU Lesser General Public License # along with this program. If not, see http://www.gnu.org/licenses/. #} #% set className = portgen.className() class ${className}(${component.baseclass.name}.${portgen.templateClass()}): def __init__(self, parent, name): self.parent = parent self.name = name self.sri = None self.queue = Queue.Queue() self.port_lock = threading.Lock() #{% for operation in portgen.operations() %} #{% set arglist = ['self'] + operation.args %} def ${operation.name}(${arglist|join(', ')}): # TODO: pass #{% endfor %}
RedhawkSDR/framework-codegen
redhawk/codegen/jinja/python/ports/templates/generic.provides.py
Python
lgpl-3.0
1,314
0.005327
from phystricks import * def exCircleThree(): pspict,fig = SinglePicture("exCircleThree") circle = Circle(Point(0,0),1.5) circle.angleI = 45 circle.angleF = 380 circle.wave(0.1,0.1) circle.parameters.color = "green" circleB = Circle(Point(0,0),1.5) circleB.angleI = circle.angleF-360 circleB.angleF = circle.angleI circleB.wave(circle.waviness.dx,circle.waviness.dy) circleB.parameters.color = "red" pspict.DrawGraphs(circle,circleB) pspict.DrawDefaultAxes() pspict.comment="A large green wavy part and a small red wavy part." fig.no_figure() fig.conclude() fig.write_the_file()
LaurentClaessens/phystricks
testing/demonstration/exCircleThree.py
Python
gpl-3.0
652
0.015337
from Framework.Resposta import Resposta from Models.Grau.Grau import Grau as ModelGrau class RespostaEditar(Resposta): def __init__(self,mensagem): self.corpo = mensagem
AEDA-Solutions/matweb
backend/Models/Grau/RespostaEditar.py
Python
mit
179
0.039106
# -*- coding: utf-8 -*- u""" Copyright 2013-2014 Olivier Cortès <oc@1flow.io>. This file is part of the 1flow project. 1flow is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. 1flow is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with 1flow. If not, see http://www.gnu.org/licenses/ """ from django.conf import settings from django.contrib import admin from ..models.reldb import ( # DjangoUser as User, HistoryEntry, MailAccount, Author, Folder, SyncNode, NodePermissions, ) if settings.FULL_ADMIN: from helpcontent import HelpContent, HelpContentAdmin admin.site.register(HelpContent, HelpContentAdmin) from website import WebSite, WebSiteAdmin admin.site.register(WebSite, WebSiteAdmin) admin.site.register(Author) admin.site.register(Folder) from processor import ( Processor, ProcessorAdmin, ProcessingChain, ChainedItem, ChainedItemAdmin, ChainedItemParameter, ProcessingError, ProcessorCategory, ) admin.site.register(Processor, ProcessorAdmin) admin.site.register(ProcessingChain) admin.site.register(ChainedItem, ChainedItemAdmin) admin.site.register(ChainedItemParameter) admin.site.register(ProcessingError) admin.site.register(ProcessorCategory) from language import Language, LanguageAdmin admin.site.register(Language, LanguageAdmin) admin.site.register(MailAccount) # admin.site.register(CombinedFeedRule) admin.site.register(HistoryEntry) from userimport import UserImport, UserImportAdmin admin.site.register(UserImport, UserImportAdmin) admin.site.register(SyncNode) admin.site.register(NodePermissions) from feed import ( RssAtomFeed, RssAtomFeedAdmin, MailFeed, MailFeedRule, MailFeedRuleAdmin, # CombinedFeed # TwitterFeed ) admin.site.register(RssAtomFeed, RssAtomFeedAdmin) admin.site.register(MailFeed) admin.site.register(MailFeedRule, MailFeedRuleAdmin) # admin.site.register(CombinedFeed) from subscription import Subscription, SubscriptionAdmin admin.site.register(Subscription, SubscriptionAdmin) from article import Article, ArticleAdmin, OriginalData admin.site.register(Article, ArticleAdmin) admin.site.register(OriginalData) from read import Read, ReadAdmin admin.site.register(Read, ReadAdmin) from tag import Tag, TagAdmin admin.site.register(Tag, TagAdmin) # TODO: remove this when migration is finished import mongo # NOQA
1flow/1flow
oneflow/core/admin/__init__.py
Python
agpl-3.0
2,818
0.00284
""" API for communicating with the i3 window manager. """ import json import subprocess class I3Msg(object): """Send messages to i3.""" def __init__(self, socket=None, msgbin=None): """ Initialize the messager. @param socket The socket to connect to i3 via. @param msgbin The path to i3-msg. """ if msgbin is None: msgbin = "i3-msg" self.cmd = (msgbin,) if socket is not None: self.cmd = self.cmd + ('s', socket) def call(self, *args): """ Call i3msg and return the parsed output. @param args The set of command line arguments to pass to i3-msg. @return An object containing the command's response. """ data = subprocess.check_output(self.cmd + args) if len(data) == 0: return None return json.loads(data.decode('utf-8')) def command(self, *cmd): """ Return the result of an i3 command. @param cmd A list representing the command to execute. @return The output of the command. """ return self.call(*cmd) def get_workspaces(self): """ Return a list of workspace objects. @return Parsed output of i3-msg -t get_workspaces. """ return self.call('-t', 'get_workspaces') def get_outputs(self): """ Return a list of output objects. @return Parsed output of i3-msg -t get_outputs. """ return self.call('-t', 'get_outputs') def get_tree(self): """ Return the i3 container tree. @return Parsed output of i3-msg -t get_tree. """ return self.call('-t', 'get_tree') def get_marks(self): """ Return a list of marks. @return Parsed output of i3-msg -t get_marks. """ return self.call('-t', 'get_marks') def get_bar_config(self, id=None): """ Return i3bar config with the given ID or all if no ID is provided. @param id The ID of the bar to retrieve configuration for. @return Parsed output of i3-msg -t get_bar_config [ID]. """ args = ('-t', 'get_bar_config') if id is not None: args = args + (id,) return self.call(*args) def get_version(self): """ Return the version of i3 we're connected to. @return Parsed output of i3-msg -t get_version. """ return self.call('-t', 'get_version') class I3Runner(object): """Run a series of commands from a file stream.""" def __init__(self, i3msg=None): """ Initialize the runner. @param i3msg The i3msg object. If None then a new object will be created with the default arguments. """ if i3msg is None: i3msg = I3Msg() self.i3 = i3msg def commands(self, cmds, ignore=None): """ Run multiple of commands. @param cmds An iterable containing commands to run. @param ignore A regex used to ignore certain lines. Defaults to None. @return A list of results, one for each command. """ results = [] for cmd in cmds: if len(cmd.strip()) == 0 or ( ignore is not None and ignore.match(cmd)): results.append(None) else: results.append(self.i3.command(cmd)) return results def loads(self, data, ignore=None): """ Load commands from a string. @param data The string to process. Commands should be on individual lines. @param ignore A regex used to ignore certain lines. Defaults to None. @return A list of results, one for each command. """ return self.commands(data.split("\n")) def load(self, file, ignore=None): """ Load commands from a file-like object. @param file A file-like object to read commands from. @param ignore A regex used to ignore certain lines. Defaults to None. @return A list of results, one for each command. """ return self.commands(file, ignore)
BlueDragonX/fm-dot
i3/lib/i3.py
Python
bsd-3-clause
4,205
0
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('forums', '0017_auto_20150517_1552'), ] operations = [ migrations.AlterField( model_name='post', name='reply_to', field=models.ForeignKey(related_name='replies', blank=True, editable=False, to='forums.Post', null=True), ), ]
Aurora0000/descant
forums/migrations/0018_auto_20150518_1634.py
Python
mit
467
0.002141
#!/usr/bin/python # (c) 2017, NetApp, Inc # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' module: sf_snapshot_schedule_manager short_description: Manage SolidFire snapshot schedules extends_documentation_fragment: - netapp.solidfire version_added: '2.3' author: Sumit Kumar (sumit4@netapp.com) description: - Create, destroy, or update accounts on SolidFire options: state: description: - Whether the specified schedule should exist or not. required: true choices: ['present', 'absent'] paused: description: - Pause / Resume a schedule. required: false recurring: description: - Should the schedule recur? required: false time_interval_days: description: Time interval in days. required: false default: 1 time_interval_hours: description: Time interval in hours. required: false default: 0 time_interval_minutes: description: Time interval in minutes. required: false default: 0 name: description: - Name for the snapshot schedule. required: true snapshot_name: description: - Name for the created snapshots. required: false volumes: description: - Volume IDs that you want to set the snapshot schedule for. - At least 1 volume ID is required for creating a new schedule. - required when C(state=present) required: false retention: description: - Retention period for the snapshot. - Format is 'HH:mm:ss'. required: false schedule_id: description: - The schedule ID for the schedule that you want to update or delete. required: false starting_date: description: - Starting date for the schedule. - Required when C(state=present). - Please use two '-' in the above format, or you may see an error- TypeError, is not JSON serializable description. - "Format: C(2016--12--01T00:00:00Z)" required: false ''' EXAMPLES = """ - name: Create Snapshot schedule sf_snapshot_schedule_manager: hostname: "{{ solidfire_hostname }}" username: "{{ solidfire_username }}" password: "{{ solidfire_password }}" state: present name: Schedule_A time_interval_days: 1 starting_date: 2016--12--01T00:00:00Z volumes: 7 - name: Update Snapshot schedule sf_snapshot_schedule_manager: hostname: "{{ solidfire_hostname }}" username: "{{ solidfire_username }}" password: "{{ solidfire_password }}" state: present schedule_id: 6 recurring: True snapshot_name: AnsibleSnapshots - name: Delete Snapshot schedule sf_snapshot_schedule_manager: hostname: "{{ solidfire_hostname }}" username: "{{ solidfire_username }}" password: "{{ solidfire_password }}" state: absent schedule_id: 6 """ RETURN = """ schedule_id: description: Schedule ID of the newly created schedule returned: success type: string """ import traceback from ansible.module_utils.basic import AnsibleModule from ansible.module_utils._text import to_native import ansible.module_utils.netapp as netapp_utils HAS_SF_SDK = netapp_utils.has_sf_sdk() class SolidFireSnapShotSchedule(object): def __init__(self): self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() self.argument_spec.update(dict( state=dict(required=True, choices=['present', 'absent']), name=dict(required=True, type='str'), time_interval_days=dict(required=False, type='int', default=1), time_interval_hours=dict(required=False, type='int', default=0), time_interval_minutes=dict(required=False, type='int', default=0), paused=dict(required=False, type='bool'), recurring=dict(required=False, type='bool'), starting_date=dict(type='str'), snapshot_name=dict(required=False, type='str'), volumes=dict(required=False, type='list'), retention=dict(required=False, type='str'), schedule_id=dict(type='int'), )) self.module = AnsibleModule( argument_spec=self.argument_spec, required_if=[ ('state', 'present', ['starting_date', 'volumes']) ], supports_check_mode=True ) p = self.module.params # set up state variables self.state = p['state'] self.name = p['name'] # self.interval = p['interval'] self.time_interval_days = p['time_interval_days'] self.time_interval_hours = p['time_interval_hours'] self.time_interval_minutes = p['time_interval_minutes'] self.paused = p['paused'] self.recurring = p['recurring'] self.starting_date = p['starting_date'] if self.starting_date is not None: self.starting_date = self.starting_date.replace("--", "-") self.snapshot_name = p['snapshot_name'] self.volumes = p['volumes'] self.retention = p['retention'] self.schedule_id = p['schedule_id'] self.create_schedule_result = None if HAS_SF_SDK is False: self.module.fail_json(msg="Unable to import the SolidFire Python SDK") else: self.sfe = netapp_utils.create_sf_connection(module=self.module) def get_schedule(self): schedule_list = self.sfe.list_schedules() for schedule in schedule_list.schedules: if schedule.name == self.name: # Update self.schedule_id: if self.schedule_id is not None: if schedule.schedule_id == self.schedule_id: return schedule else: self.schedule_id = schedule.schedule_id return schedule return None def create_schedule(self): try: sched = netapp_utils.Schedule() # if self.interval == 'time_interval': sched.frequency = netapp_utils.TimeIntervalFrequency(days=self.time_interval_days, hours=self.time_interval_hours, minutes=self.time_interval_minutes) # Create schedule sched.name = self.name sched.schedule_info = netapp_utils.ScheduleInfo( volume_ids=self.volumes, snapshot_name=self.snapshot_name, retention=self.retention ) sched.paused = self.paused sched.recurring = self.recurring sched.starting_date = self.starting_date self.create_schedule_result = self.sfe.create_schedule(schedule=sched) except Exception as e: self.module.fail_json(msg='Error creating schedule %s: %s' % (self.name, to_native(e)), exception=traceback.format_exc()) def delete_schedule(self): try: get_schedule_result = self.sfe.get_schedule(schedule_id=self.schedule_id) sched = get_schedule_result.schedule sched.to_be_deleted = True self.sfe.modify_schedule(schedule=sched) except Exception as e: self.module.fail_json(msg='Error deleting schedule %s: %s' % (self.name, to_native(e)), exception=traceback.format_exc()) def update_schedule(self): try: get_schedule_result = self.sfe.get_schedule(schedule_id=self.schedule_id) sched = get_schedule_result.schedule # Update schedule properties # if self.interval == 'time_interval': temp_frequency = netapp_utils.TimeIntervalFrequency(days=self.time_interval_days, hours=self.time_interval_hours, minutes=self.time_interval_minutes) if sched.frequency.days != temp_frequency.days or \ sched.frequency.hours != temp_frequency.hours \ or sched.frequency.minutes != temp_frequency.minutes: sched.frequency = temp_frequency sched.name = self.name if self.volumes is not None: sched.schedule_info.volume_ids = self.volumes if self.retention is not None: sched.schedule_info.retention = self.retention if self.snapshot_name is not None: sched.schedule_info.snapshot_name = self.snapshot_name if self.paused is not None: sched.paused = self.paused if self.recurring is not None: sched.recurring = self.recurring if self.starting_date is not None: sched.starting_date = self.starting_date # Make API call self.sfe.modify_schedule(schedule=sched) except Exception as e: self.module.fail_json(msg='Error updating schedule %s: %s' % (self.name, to_native(e)), exception=traceback.format_exc()) def apply(self): changed = False schedule_exists = False update_schedule = False schedule_detail = self.get_schedule() if schedule_detail: schedule_exists = True if self.state == 'absent': changed = True elif self.state == 'present': # Check if we need to update the account if self.retention is not None and schedule_detail.schedule_info.retention != self.retention: update_schedule = True changed = True elif schedule_detail.name != self.name: update_schedule = True changed = True elif self.snapshot_name is not None and schedule_detail.schedule_info.snapshot_name != self.snapshot_name: update_schedule = True changed = True elif self.volumes is not None and schedule_detail.schedule_info.volume_ids != self.volumes: update_schedule = True changed = True elif self.paused is not None and schedule_detail.paused != self.paused: update_schedule = True changed = True elif self.recurring is not None and schedule_detail.recurring != self.recurring: update_schedule = True changed = True elif self.starting_date is not None and schedule_detail.starting_date != self.starting_date: update_schedule = True changed = True elif self.time_interval_minutes is not None or self.time_interval_hours is not None \ or self.time_interval_days is not None: temp_frequency = netapp_utils.TimeIntervalFrequency(days=self.time_interval_days, hours=self.time_interval_hours, minutes=self.time_interval_minutes) if schedule_detail.frequency.days != temp_frequency.days or \ schedule_detail.frequency.hours != temp_frequency.hours \ or schedule_detail.frequency.minutes != temp_frequency.minutes: update_schedule = True changed = True else: if self.state == 'present': changed = True if changed: if self.module.check_mode: # Skip changes pass else: if self.state == 'present': if not schedule_exists: self.create_schedule() elif update_schedule: self.update_schedule() elif self.state == 'absent': self.delete_schedule() if self.create_schedule_result is not None: self.module.exit_json(changed=changed, schedule_id=self.create_schedule_result.schedule_id) else: self.module.exit_json(changed=changed) def main(): v = SolidFireSnapShotSchedule() v.apply() if __name__ == '__main__': main()
jimi-c/ansible
lib/ansible/modules/storage/netapp/sf_snapshot_schedule_manager.py
Python
gpl-3.0
13,004
0.002538
#!/usr/bin/python # GNU General Public License v3.0+ (see COPYING or # https://www.gnu.org/licenses/gpl-3.0.txt) # from __future__ import (absolute_import, division, print_function) __metaclass__ = type ANSIBLE_METADATA = { 'metadata_version': '1.1', 'supported_by': 'community', 'status': ['preview'] } DOCUMENTATION = ''' --- version_added: "2.8" author: - Naval Patel (@navalkp) - Prashant Bhosale (@prabhosa) module: lxca_cmms short_description: Custom module for lxca cmms inventory utility description: - This module returns/displays a inventory details of cmms options: uuid: description: uuid of device, this is string with length greater than 16. command_options: description: options to filter nodes information default: cmms choices: - cmms - cmms_by_uuid - cmms_by_chassis_uuid chassis: description: uuid of chassis, this is string with length greater than 16. extends_documentation_fragment: - lxca_common ''' EXAMPLES = ''' # get all cmms info - name: get nodess data from LXCA lxca_cmms: login_user: USERID login_password: Password auth_url: "https://10.243.15.168" # get specific cmms info by uuid - name: get nodes data from LXCA lxca_cmms: login_user: USERID login_password: Password auth_url: "https://10.243.15.168" uuid: "3C737AA5E31640CE949B10C129A8B01F" command_options: cmms_by_uuid # get specific cmms info by chassis uuid - name: get nodes data from LXCA lxca_cmms: login_user: USERID login_password: Password auth_url: "https://10.243.15.168" chassis: "3C737AA5E31640CE949B10C129A8B01F" command_options: cmms_by_chassis_uuid ''' RETURN = r''' result: description: cmms detail from lxca returned: success type: dict sample: cmmList: - machineType: '' model: '' type: 'CMM' uuid: '118D2C88C8FD11E4947B6EAE8B4BDCDF' # bunch of properties - machineType: '' model: '' type: 'CMM' uuid: '223D2C88C8FD11E4947B6EAE8B4BDCDF' # bunch of properties # Multiple cmms details ''' import traceback from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.remote_management.lxca.common import LXCA_COMMON_ARGS, has_pylxca, connection_object try: from pylxca import cmms except ImportError: pass UUID_REQUIRED = 'UUID of device is required for cmms_by_uuid command.' CHASSIS_UUID_REQUIRED = 'UUID of chassis is required for cmms_by_chassis_uuid command.' SUCCESS_MSG = "Success %s result" def _cmms(module, lxca_con): return cmms(lxca_con) def _cmms_by_uuid(module, lxca_con): if not module.params['uuid']: module.fail_json(msg=UUID_REQUIRED) return cmms(lxca_con, module.params['uuid']) def _cmms_by_chassis_uuid(module, lxca_con): if not module.params['chassis']: module.fail_json(msg=CHASSIS_UUID_REQUIRED) return cmms(lxca_con, chassis=module.params['chassis']) def setup_module_object(): """ this function merge argument spec and create ansible module object :return: """ args_spec = dict(LXCA_COMMON_ARGS) args_spec.update(INPUT_ARG_SPEC) module = AnsibleModule(argument_spec=args_spec, supports_check_mode=False) return module FUNC_DICT = { 'cmms': _cmms, 'cmms_by_uuid': _cmms_by_uuid, 'cmms_by_chassis_uuid': _cmms_by_chassis_uuid, } INPUT_ARG_SPEC = dict( command_options=dict(default='cmms', choices=['cmms', 'cmms_by_uuid', 'cmms_by_chassis_uuid']), uuid=dict(default=None), chassis=dict(default=None) ) def execute_module(module): """ This function invoke commands :param module: Ansible module object """ try: with connection_object(module) as lxca_con: result = FUNC_DICT[module.params['command_options']](module, lxca_con) module.exit_json(changed=False, msg=SUCCESS_MSG % module.params['command_options'], result=result) except Exception as exception: error_msg = '; '.join((e) for e in exception.args) module.fail_json(msg=error_msg, exception=traceback.format_exc()) def main(): module = setup_module_object() has_pylxca(module) execute_module(module) if __name__ == '__main__': main()
tersmitten/ansible
lib/ansible/modules/remote_management/lxca/lxca_cmms.py
Python
gpl-3.0
4,442
0.001576
# Generated by Django 3.0.5 on 2020-04-07 10:39 import django.core.validators from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("spectator_events", "0041_event_ticket"), ] operations = [ migrations.AlterField( model_name="venue", name="cinema_treasures_id", field=models.PositiveIntegerField( blank=True, help_text='Optional. ID of a cinema at\n<a href="http://cinematreasures.org/">Cinema Treasures</a>.', # noqa: E501 null=True, ), ), migrations.AlterField( model_name="work", name="imdb_id", field=models.CharField( blank=True, help_text="Starts with 'tt', e.g. 'tt0100842'.\nFrom <a href=\"https://www.imdb.com\">IMDb</a>.", # noqa: E501 max_length=12, validators=[ django.core.validators.RegexValidator( code="invalid_imdb_id", message='IMDb ID should be like "tt1234567"', regex="^tt\\d{7,10}$", ) ], verbose_name="IMDb ID", ), ), ]
philgyford/django-spectator
spectator/events/migrations/0042_auto_20200407_1039.py
Python
mit
1,302
0
def create_pos_n_neg(): for file_type in ['neg']: for img in os.listdir(file_type): if file_type == 'pos': line = file_type+'/'+img+' 1 0 0 50 50\n' with open('info.dat','a') as f: f.write(line) elif file_type == 'neg': line = file_type+'/'+img+'\n' with open('bg.txt','a') as f: f.write(line)
Tianyi94/EC601Project_Somatic-Parkour-Game-based-on-OpenCV
Old Code/ControlPart/Create_pos&neg.py
Python
mit
444
0.006757
# -*- coding: utf-8 -*- # Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from collections import OrderedDict import functools import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore from google.cloud.policytroubleshooter_v1.types import checker from google.cloud.policytroubleshooter_v1.types import explanations from .transports.base import IamCheckerTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import IamCheckerGrpcAsyncIOTransport from .client import IamCheckerClient class IamCheckerAsyncClient: """IAM Policy Troubleshooter service. This service helps you troubleshoot access issues for Google Cloud resources. """ _client: IamCheckerClient DEFAULT_ENDPOINT = IamCheckerClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = IamCheckerClient.DEFAULT_MTLS_ENDPOINT common_billing_account_path = staticmethod( IamCheckerClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( IamCheckerClient.parse_common_billing_account_path ) common_folder_path = staticmethod(IamCheckerClient.common_folder_path) parse_common_folder_path = staticmethod(IamCheckerClient.parse_common_folder_path) common_organization_path = staticmethod(IamCheckerClient.common_organization_path) parse_common_organization_path = staticmethod( IamCheckerClient.parse_common_organization_path ) common_project_path = staticmethod(IamCheckerClient.common_project_path) parse_common_project_path = staticmethod(IamCheckerClient.parse_common_project_path) common_location_path = staticmethod(IamCheckerClient.common_location_path) parse_common_location_path = staticmethod( IamCheckerClient.parse_common_location_path ) @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials info. Args: info (dict): The service account private key info. args: Additional arguments to pass to the constructor. kwargs: Additional arguments to pass to the constructor. Returns: IamCheckerAsyncClient: The constructed client. """ return IamCheckerClient.from_service_account_info.__func__(IamCheckerAsyncClient, info, *args, **kwargs) # type: ignore @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials file. Args: filename (str): The path to the service account private key json file. args: Additional arguments to pass to the constructor. kwargs: Additional arguments to pass to the constructor. Returns: IamCheckerAsyncClient: The constructed client. """ return IamCheckerClient.from_service_account_file.__func__(IamCheckerAsyncClient, filename, *args, **kwargs) # type: ignore from_service_account_json = from_service_account_file @classmethod def get_mtls_endpoint_and_cert_source( cls, client_options: Optional[ClientOptions] = None ): """Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the client cert source is None. (2) if `client_options.client_cert_source` is provided, use the provided one; if the default client cert source exists, use the default one; otherwise the client cert source is None. The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the default mTLS endpoint; if the environment variabel is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. More details can be found at https://google.aip.dev/auth/4114. Args: client_options (google.api_core.client_options.ClientOptions): Custom options for the client. Only the `api_endpoint` and `client_cert_source` properties may be used in this method. Returns: Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the client cert source to use. Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ return IamCheckerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore @property def transport(self) -> IamCheckerTransport: """Returns the transport used by the client instance. Returns: IamCheckerTransport: The transport used by the client instance. """ return self._client.transport get_transport_class = functools.partial( type(IamCheckerClient).get_transport_class, type(IamCheckerClient) ) def __init__( self, *, credentials: ga_credentials.Credentials = None, transport: Union[str, IamCheckerTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the iam checker client. Args: credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. transport (Union[str, ~.IamCheckerTransport]): The transport to use. If set to None, a transport is chosen automatically. client_options (ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always use the default regular endpoint) and "auto" (auto switch to the default mTLS endpoint if client certificate is present, this is the default value). However, the ``api_endpoint`` property takes precedence if provided. (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used to provide client certificate for mutual TLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ self._client = IamCheckerClient( credentials=credentials, transport=transport, client_options=client_options, client_info=client_info, ) async def troubleshoot_iam_policy( self, request: Union[checker.TroubleshootIamPolicyRequest, dict] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> checker.TroubleshootIamPolicyResponse: r"""Checks whether a member has a specific permission for a specific resource, and explains why the member does or does not have that permission. .. code-block:: python from google.cloud import policytroubleshooter_v1 def sample_troubleshoot_iam_policy(): # Create a client client = policytroubleshooter_v1.IamCheckerClient() # Initialize request argument(s) request = policytroubleshooter_v1.TroubleshootIamPolicyRequest( ) # Make the request response = client.troubleshoot_iam_policy(request=request) # Handle the response print(response) Args: request (Union[google.cloud.policytroubleshooter_v1.types.TroubleshootIamPolicyRequest, dict]): The request object. Request for [TroubleshootIamPolicy][google.cloud.policytroubleshooter.v1.IamChecker.TroubleshootIamPolicy]. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: google.cloud.policytroubleshooter_v1.types.TroubleshootIamPolicyResponse: Response for [TroubleshootIamPolicy][google.cloud.policytroubleshooter.v1.IamChecker.TroubleshootIamPolicy]. """ # Create or coerce a protobuf request object. request = checker.TroubleshootIamPolicyRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.troubleshoot_iam_policy, default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, ) # Send the request. response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response async def __aenter__(self): return self async def __aexit__(self, exc_type, exc, tb): await self.transport.close() try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-cloud-policy-troubleshooter", ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ("IamCheckerAsyncClient",)
googleapis/python-policy-troubleshooter
google/cloud/policytroubleshooter_v1/services/iam_checker/async_client.py
Python
apache-2.0
11,763
0.00204
from __future__ import with_statement from alembic import context from sqlalchemy import engine_from_config, pool from logging.config import fileConfig import logging # this is the Alembic Config object, which provides # access to the values within the .ini file in use. config = context.config # Interpret the config file for Python logging. # This line sets up loggers basically. fileConfig(config.config_file_name) logger = logging.getLogger('alembic.env') # add your model's MetaData object here # for 'autogenerate' support from pasquino import app target_metadata = app.db.metadata from flask import current_app config.set_main_option('sqlalchemy.url', current_app.config.get('SQLALCHEMY_DATABASE_URI')) def run_migrations_offline(): """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output. """ url = config.get_main_option("sqlalchemy.url") context.configure(url=url) with context.begin_transaction(): context.run_migrations() def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ # this callback is used to prevent an auto-migration from being generated # when there are no changes to the schema # reference: http://alembic.readthedocs.org/en/latest/cookbook.html def process_revision_directives(context, revision, directives): if getattr(config.cmd_opts, 'autogenerate', False): script = directives[0] if script.upgrade_ops.is_empty(): directives[:] = [] logger.info('No changes in schema detected.') engine = engine_from_config(config.get_section(config.config_ini_section), prefix='sqlalchemy.', poolclass=pool.NullPool) connection = engine.connect() context.configure(connection=connection, target_metadata=target_metadata, process_revision_directives=process_revision_directives, **current_app.extensions['migrate'].configure_args) try: with context.begin_transaction(): context.run_migrations() finally: connection.close() if context.is_offline_mode(): run_migrations_offline() else: run_migrations_online()
imbstack/pasquino
migrations/env.py
Python
mit
2,643
0.001135
## # api.py # # This file is the workhorse for the the entire web application. # It implements and provides the API required for the iOS portion # of the project as well as interacting with Google's datastore # for persistent storage of our models. ## # for sending mail from google.appengine.api import mail # Used in conjunction with the geomodel library for doing # proximity based searches from google.appengine.ext.db import GeoPt from geo import geotypes # HttpResponse is what all Django-based views must return # to render the output. In our web application the # _json* methods build and return HttpResponse objects # for rendering JSON dat from django.http import HttpResponse # For encoding Python objects into JSON strings from django.utils import simplejson # Our datastore models from model import * # For handling user sessions from appengine_utilities.sessions import Session # Provides the sha1 module we use for hashing passwords import hashlib # The Python loggin module. We use the basicConfig method # to setup to log to the console (or GoogleAppEngineLauncher # logs screen) import logging logging.basicConfig(level=logging.DEBUG) ## # CONSTANTS ## """ The email address to send from. See the Notes section of the README for more information on what to set this to. """ SENDER_EMAIL_ADDRESS = "VALID@APPENGINE_ADDRESS.COM" ## # UTILITY METHODS ## def _hash_password(password): """ Returns a sha1-hashed version of the given plaintext password. """ return hashlib.sha1(password).hexdigest() def _json_response(success=True, msg="OK", **kwargs): """ Helper method to build an HTTPResponse with a stock JSON object. @param success=True: indicates success or failure of the API method @param msg: string with details on success or failure @kwargs: any number of key/value pairs to be sent with the JSON object """ # build up the response data and convert it to a string using the # simplejson module response_data = dict(success=success, msg=msg) response_data.update(kwargs) response_string = simplejson.dumps(response_data) # All views must return a valid HttpResponse object so build it and # set the JSON string and mimetype indicating that the result is # JSON return HttpResponse(response_string, mimetype="application/json") def _json_unauthorized_response(**kwargs): """ Helper method to build an HTTPResponse with a stock JSON object that represents unauthorized access to an API method. NOTE: Always returns success=false and msg="Unauthorized" @kwargs: any number of key/value pairs to be sent with the JSON object """ # Same process as _json_response method, accept always return false and # an Unauthorized message with a status code of 401 response_data = dict(success=False, msg="Unauthorized") response_data.update(kwargs) response_string = simplejson.dumps(response_data) return HttpResponse(response_string, status=401, mimetype="application/json") ## # DECORATORS # # For more information about decorators in Python see: # # http://www.python.org/dev/peps/pep-0318/ # http://wiki.python.org/moin/PythonDecorators # http://www.ibm.com/developerworks/linux/library/l-cpdecor.html # Google... ## # Usage: @validate_request(method, p1, p2, ...) def validate_request(method, *params): """ Decorator for validating the required request method for an API call as well as enforcing any required parameters in the request. If either the method or parameter checks fail a stock failure JSON object is returned with the exact issue in the msg field. If all checks pass then the API call proceeds. """ def _dec(view_func): def _view(request, *args, **kwargs): # check the required method if request.method == method: # check that each parameter exists and has a value for param in params: value = request.REQUEST.get(param, "") if not value: # failed parameter check return _json_response(success=False, msg="'%s' is required." % param) # return the original API call through return view_func(request, *args, **kwargs) else: # failed method check return _json_response(success=False, msg="%s requests are not allowed." % request.method) return _view return _dec # Usage: @validate_session() def validate_session(): """ Decorator for validating that a user is authenticated by checking the session for a user object. If this fails the stock json_unauthorized_response is returned or else the API call is allowed to proceed. """ def _dec(view_func): def _view(request, *args, **kwargs): # get the session and check for a user, fail if it doesn't exist if Session().get("user") is None: # failed request return _json_unauthorized_response() # return the original API call through return view_func(request, *args, **kwargs) return _view return _dec ## # API METHODS ## @validate_session() @validate_request("POST", "question", "latitude", "longitude", "pay_key") def ask(request): """ API Method - /ask Creates a new Question and adds it to the datastore @method POST @param question: the text of the question @param latitude: latitude of the location @param longitude: longitude of the location @param pay_key: the pay key from a successful PayPal purchase @returns stock success or failure JSON response along with the question and user objects. """ # authenticated user user = Session().get("user") # required parameters question = request.REQUEST.get("question") latitude = float(request.REQUEST.get("latitude")) longitude = float(request.REQUEST.get("longitude")) pay_key = request.REQUEST.get("pay_key") # Using the PayKey you could validate it using PayPal APIs # to confirm that a user paid and the transaction is complete. # This is left up to the curious coder to implement :) # Create the question with the required fields and tie it # to the authenticated user q = Question(question=question, location=GeoPt(latitude, longitude), user=user) q.update_location() q.put() # return stock JSON with the Question object details return _json_response(question=q.to_json(), user=user.to_json()) @validate_session() @validate_request("POST", "question_id", "answer") def answer(request): """ API Method - /answer Creates a new Answer object and adds it to the datastore. Validates that the question exists and does not have an accepted answer before accepting the answer. This method also takes care of sending the owner of the question an email saying a new answer has been given with the answer in the body of the message. @method POST @param question_id: id of an existing question @param answer: the text for the answer to a question @returns one answer object """ # session and authenticated user user = Session().get("user") # required parameters question_id = int(request.REQUEST.get("question_id")) answer = request.REQUEST.get("answer") # find the question associated with the question_id parameter question = Question.get_by_id(question_id) # no question with the given id if question is None: return _json_response(success=False, msg="Question does not exist.") # question has already been answered if question.closed: return _json_response(success=False, msg="Question has an accepted answer and is now closed.") # create a new answer and save it to the datastore a = Answer(user=user, question=question, answer=answer) a.put() # send an email to the owner of the question question_owner_email = question.user.email mail.send_mail(sender=SENDER_EMAIL_ADDRESS, to=question_owner_email, subject="Your question has a new answer!", body=""" This is to inform you that one of your questions has received a new answer. Your question: %s The answer: %s Regards, Inquire Application """ % (question.question, answer)) # return stock JSON with details of the answer object return _json_response(answer=a.to_json()) @validate_session() @validate_request("POST", "answer_id") def accept(request): """ API Method - /accept Accepts an answer for a question. The question must be owned by the current authenticated user accepting the question and not already have an accepted answer. This method also takes care of sending the owner of the answer an email saying their answer was accepted. The accepted answer owner will also be given one karma point. @method POST @param answer_id: id of the answer being accepted @returns stock JSON object """ # session and authenticated user user = Session().get("user") # required parameters answer_id = int(request.REQUEST.get("answer_id")) # find the answer associated with the answer_id answer = Answer.get_by_id(answer_id) # no answer with the given id if answer is None: return _json_response(success=False, msg="Answer does not exist.") # associated question question = answer.question # make sure the question for this answer is owned by this user question = answer.question if question.user.key().id() != user.key().id(): return _json_response(success=False, msg="You must be the owner of the question to accept an answer.") # also make sure the question is not already answered if question.closed: return _json_response(success=False, msg="Question already has an accepted answer.") # change the accepted flag of the answer and save it. answer.accepted_answer = True answer.put() # close the question and save it question.closed = True question.put() # update the answer owner's karma points answer.user.karma += 1 answer.user.put() # send an email to the address assigned to the answer answer_owner_email = answer.user.email mail.send_mail(sender=SENDER_EMAIL_ADDRESS, to=answer_owner_email, subject="Your answer was accepted!", body=""" This is to inform you that one of your answers has been accepted! You have been given one karma point. The question you answered: %s Your answer: %s Regards, Inquire Application """ % (question.question, answer.answer)) # return stock success JSON return _json_response() @validate_session() @validate_request("GET", "question_id") def answers(request): """ API Method - /answers Returns a list of answers for a given question id. @method GET @param question_id: The question id to retrieve answers for @returns list of answer objects """ # required parameters question_id = int(request.GET.get("question_id")) # retrieve the matching question question = Question.get_by_id(question_id) if question is None: return _json_response(success=False, msg="Question does not exist!") return _json_response(answers=[a.to_json() for a in question.answer_set]) @validate_session() @validate_request("GET", "latitude", "longitude") def questions(request): """ API Method - /questions Returns a list of questions that are within geographical proximity to the passed in latitude/longitude. @method GET @param latitude: latitude of the location @param longitude longitude of the location @optional max_results: max number of questions to return, default=25 @optional max_distance: max distance to search in miles @returns list of question objects """ # required parameters latitude = float(request.GET.get("latitude")) longitude = float(request.GET.get("longitude")) # defines the center of our proximity search # geotypes.Point provided by geomodel project center = geotypes.Point(latitude, longitude) # default max_results = int(request.GET.get("max_results", 25)) # 25 results default max_distance = int(request.GET.get("max_distance", 50)) # 50 mile default # convert miles to kilometers max_distance = 1000*max_distance/0.621371192 # Get all unclosed questions within the proximity max_distance and # limit to max_results base_query = Question.all().filter("closed =", False) questions = Question.proximity_fetch(base_query, center, max_results=max_results, max_distance=max_distance) return _json_response(questions=[q.to_json() for q in questions]) @validate_request("POST", "email", "password") def register(request): """ API Method - /register Creates a new user and adds it to the datastore. If a user already exists with the given email address the request fails and an appropriate JSON response is returned. @method POST @param email: email address for the user @param password: password for the user @returns newly created user object or failure JSON """ # required parameters email = request.POST.get("email") password = request.POST.get("password") # users = User.all() users.filter("email =", email) if users.count() != 0: return _json_response(success=False, msg="Email address already exists.", users=users.count()) password = _hash_password(password) new_user = User(email=email, password=password) new_user.put() return _json_response() def logout(request): """ API Method - /logout Destroys the active user's session object. Any further use of protected API methods will require a new session via the auth API. @method GET @returns stock JSON response """ # delete session and return stock JSON response with a msg # indicating the user has logged out session = Session() session.delete() return _json_response(msg="User has been logged out.") @validate_request("POST", "email", "password") def auth(request): """ API Method - /auth If credentials are correct a new session is created for this user which authorizes them to use protected API methods. @method POST @param email: user's email address @param password: user's password @returns stock JSON response """ # required parameters email = request.POST.get("email") password = request.POST.get("password") # hash the password password = _hash_password(password) # Look up a User object that matches the email/password users = User.all() users \ .filter("email =", email) \ .filter("password =", password) # No user found, return a failure message if users.count() == 0: return _json_response(success=False, msg="Email or password is invalid.") # Somehow more than one client with the same user/password have # been created, which should never happen. Error out here. if users.count() > 1: return _json_response(details=None, success=False, msg="Internal security error. Contact an administrator") # Pull the User from the datastore user = users.get() # Build a new session object and store the user session = Session() session["user"] = user # return stock JSON with user details return _json_response(user=user.to_json()) # Utility method for generating random questions around a # given point. The point is currently Apple's headquarters # so this works well with testing with the simulator. def randomize(request): import random # location to generate questions around near_lat, near_lon = 37.331693, -122.030457 # ~50 miles dx = 50.0/69.0 # Number of questions to generate num_questions = 10 # Possible users to assign questions to. These # users will be looked up by the email addresses # supply in this list and they must exist email_accounts = ["email1@example.com", "email2@example.com"] # no more editing # look up the user objects associated with the # given email addresses users = [] for email in email_accounts: user = User.all().filter("email =", email).get() if user is not None: users.append(user) # return false if there were no user objects found if not users: return _json_response(success=False, msg="No users found") # generate num_questions random questions around the given # point (near_lat, near_lon) within some distance dx and # assigning a random user to the question for i in range(num_questions): lat = random.uniform(near_lat-dx, near_lat+dx) lon = random.uniform(near_lon-dx, near_lon+dx) user = random.sample(users, 1)[0] q = Question(user=user, question="Question %d" % i, location=db.GeoPt(lat, lon)) q.update_location() q.put() # return true return _json_response()
zaffra/Inquire
GAE/api.py
Python
bsd-3-clause
17,740
0.003439
import numpy as np import os # APOGEE-APOKASC overlap inputf = "/home/annaho/TheCannon/examples/example_apokasc/apokasc_DR12_overlap.npz" apogee_apokasc = np.load(inputf)['arr_0'] # APOGEE-LAMOST overlap inputf = "/home/annaho/TheCannon/examples/example_DR12/Data" apogee_lamost = np.array(os.listdir(inputf)) # APOGEE-APOKASC-LAMOST overlap = np.intersect1d(apogee_lamost, apogee_apokasc) # 530 stars apogee_key = np.loadtxt("apogee_sorted_by_ra.txt", dtype=str) lamost_key = np.loadtxt("lamost_sorted_by_ra.txt", dtype=str) inds = np.array([np.where(apogee_key==a)[0][0] for a in overlap]) overlap_lamost = lamost_key[inds] np.savez("apogee_apokasc_lamost_overlap.npz", overlap) # get all APOGEE parameters label_file = "apogee_dr12_labels.csv" apogee_id_all = np.loadtxt(label_file, usecols=(1,), delimiter=',', dtype=str) apogee_labels_all = np.loadtxt( label_file, usecols=(2,3,4,5), delimiter=',', dtype=float) inds = np.array([np.where(apogee_id_all==a)[0][0] for a in overlap]) apogee_id = apogee_id_all[inds] apogee_labels = apogee_labels_all[inds,:] # get all APOKASC parameters apokasc_id_all = np.load("example_apokasc/apokasc_DR12_overlap.npz")['arr_0'] apokasc_labels_all = np.load("example_apokasc/tr_label.npz")['arr_0'] inds = np.array([np.where(apokasc_id_all==a)[0][0] for a in overlap]) apokasc_id = apokasc_id_all[inds] apokasc_labels = apokasc_labels_all[inds] # get all LAMOST parameters inputf = "/home/annaho/TheCannon/examples/test_training_overlap/lamost_sorted_by_ra_with_dr2_params.txt" lamost_id_all = np.loadtxt(inputf, usecols=(0,), dtype=str) lamost_labels_all = np.loadtxt(inputf, usecols=(3,4,5), dtype=float) inds = np.array([np.where(lamost_id_all==a)[0][0] for a in overlap_lamost]) lamost_id = lamost_id_all[inds] lamost_labels = lamost_labels_all[inds] # plot them against each other from matplotlib import rc rc('font', family='serif') rc('text', usetex=True) names = [r"$T_{eff}$", r"$\log g$", r"$[Fe/H]$", r"$[\alpha/Fe]$"] def plot(ax, x, y, i): ax.scatter(x[:,i], y[:,i], c='k') xlim = ax.get_xlim() ylim = ax.get_ylim() ax.plot([-10000,10000],[-10000,10000], c='r') ax.set_xlim(xlim) ax.set_ylim(ylim) ax.set_title(names[i]) x = apokasc_labels y = lamost_labels fig,axarr = subplots(2,2) ax = axarr[0,0] plot(ax, x, y, 0) ax = axarr[0,1] plot(ax, x, y, 1) ax = axarr[1,0] plot(ax, x, y, 2) #ax = axarr[1,1] #plot(ax, x, y, 3) fig.text(0.5,0.01, "Kepler APOKASC", ha='center', va='bottom', fontsize=18) fig.text(0.01, 0.5, "LAMOST", ha='left', va='center', rotation=90, fontsize=18)
annayqho/TheCannon
code/apokasc_lamost/kepler_apogee_lamost_overlap.py
Python
mit
2,584
0.011223
# # OpenCenter(TM) is Copyright 2013 by Rackspace US, Inc. ############################################################################## # # OpenCenter is licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. This # version of OpenCenter includes Rackspace trademarks and logos, and in # accordance with Section 6 of the License, the provision of commercial # support services in conjunction with a version of OpenCenter which includes # Rackspace trademarks and logos is prohibited. OpenCenter source code and # details are available at: # https://github.com/rcbops/opencenter or upon # written request. # # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 and a copy, including this # notice, is available in the LICENSE file accompanying this software. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the # specific language governing permissions and limitations # under the License. # ############################################################################## from util import OpenCenterTestCase import opencenter.db.api as db_api from opencenter.webapp import ast api = db_api.api_from_models() class ExpressionTestCase(OpenCenterTestCase): def setUp(self): self.nodes = {} self.interfaces = {} self.nodes['node-1'] = self._model_create('nodes', name='node-1') self.interfaces['chef'] = self._model_create('filters', name='chef', filter_type='interface', expr='facts.x = true') self.nodes['container'] = self._model_create('nodes', name='container') def tearDown(self): self._clean_all() def _run_expression(self, node, expression, ns={}): builder = ast.FilterBuilder(ast.FilterTokenizer(), expression, api=api) root_node = builder.build() return root_node.eval_node(node, symbol_table=ns) def _simple_expression(self, expression): node = self._model_get_by_id('nodes', self.nodes['node-1']['id']) return self._run_expression(node, 'nodes: %s' % expression) def _invert_expression(self, expression, ns={}): builder = ast.FilterBuilder(ast.FilterTokenizer(), expression) root_node = builder.build() return root_node.invert() def _eval_expression(self, expression, node_id, ns={}): ephemeral_api = db_api.ephemeral_api_from_api(api) builder = ast.FilterBuilder(ast.FilterTokenizer(), expression, api=ephemeral_api) node = ephemeral_api._model_get_by_id('nodes', node_id) builder.eval_node(node, symbol_table=ns) new_node = ephemeral_api._model_get_by_id('nodes', node_id) return new_node def test_bad_interface(self): expression = "ifcount('blahblah') > 0" self.assertRaises(SyntaxError, self._run_expression, self.nodes['node-1'], expression) def test_zero_ifcount(self): expression = "ifcount('chef') > 0" result = self._run_expression(self.nodes['node-1'], expression) self.logger.debug('Got result: %s' % result) self.assertEquals(result, False) def test_valid_ifcount(self): expression = "ifcount('chef') > 0" self._model_create('facts', node_id=self.nodes['node-1']['id'], key='x', value=True) result = self._run_expression(self.nodes['node-1'], expression) self.logger.debug('Got result: %s' % result) self.assertEquals(result, True) def test_invert_equals(self): expression = "facts.test = 'test'" result = self._invert_expression(expression) self.assertEquals(result, ["facts.test := 'test'"]) def test_invert_and(self): expression = "facts.test='test' and facts.x='x'" result = self._invert_expression(expression) self.assertTrue("facts.test := 'test'" in result) self.assertTrue("facts.x := 'x'" in result) def test_invert_in(self): expression = "'test' in facts.foo" result = self._invert_expression(expression) self.assertTrue("facts.foo := union(facts.foo, 'test')" in result) self.assertEquals(len(result), 1) def test_invert_not_in(self): expression = "'test' !in facts.foo" result = self._invert_expression(expression) self.assertTrue("facts.foo := remove(facts.foo, 'test')" in result) self.assertEquals(len(result), 1) def test_eval_assign(self): node_id = self.nodes['node-1']['id'] expression = "facts.parent_id := %d" % int( self.nodes['container']['id']) node = self._eval_expression(expression, node_id) self.assertEquals(node['facts'].get('parent_id', None), self.nodes['container']['id']) def test_eval_union(self): node_id = self.nodes['node-1']['id'] expression = "facts.woof := union(facts.woof, 3)" node = self._eval_expression(expression, node_id) self.assertEquals(node['facts']['woof'], [3]) def test_eval_remove(self): node_id = self.nodes['node-1']['id'] fact = self._model_create('facts', node_id=node_id, key='array_fact', value=[1, 2]) expression = 'facts.array_fact := remove(facts.array_fact, 2)' node = self._eval_expression(expression, node_id) self.assertEquals(node['facts']['array_fact'], [1]) # verify removing from none returns none. This is perhaps # questionable, but is inline with the rest of the none/empty # behavior. It could probably also return [], but enforce # current behavior self._model_delete('facts', fact['id']) expression = 'facts.array_fact := remove(facts.array_fact, "test")' node = self._eval_expression(expression, node_id) self.assertEquals(node['facts']['array_fact'], None) # verify removing from a non-list raises SyntaxError self._model_create('facts', node_id=node_id, key='array_fact', value='non-array') expression = 'facts.array_fact := remove(facts.array_fact, "whoops")' self.assertRaises(SyntaxError, self._eval_expression, expression, node_id) def test_eval_namespaces(self): node_id = self.nodes['node-1']['id'] expression = "facts.parent_id := value" ns = {"value": self.nodes['container']['id']} node = self._eval_expression(expression, node_id, ns) self.assertEquals(node['facts'].get('parent_id', None), self.nodes['container']['id']) # test the inverter and regularizer functions def test_regularize_expression(self): expression = 'foo=value' regular = ast.regularize_expression(expression) self.logger.debug('Got regularized expression "%s" for "%s"' % (regular, expression)) self.assertEquals(regular, 'foo = value') def test_inverted_expression(self): expression = 'foo=value' inverted = ast.invert_expression(expression) self.logger.debug('Got inverted expression "%s" for "%s"' % (inverted, expression)) self.assertEquals(len(inverted), 1) self.assertEquals(inverted[0], 'foo := value') def test_inverted_union(self): expression = 'facts.test := union(facts.test, test)' inverted = ast.invert_expression(expression) self.logger.debug('Got inverted expression "%s" for "%s"' % (inverted, expression)) self.assertEquals(len(inverted), 1) self.assertEquals(inverted[0], 'test in facts.test') def test_inverted_remove(self): expression = 'facts.test := remove(facts.test, test)' inverted = ast.invert_expression(expression) self.logger.debug('Got inverted expression "%s" for "%s"' % (inverted, expression)) self.assertEquals(len(inverted), 1) self.assertEquals(inverted[0], 'test !in facts.test') def test_concrete_expression(self): expression = "foo = value" ns = {"value": 3} concrete = ast.concrete_expression(expression, ns) self.logger.debug('Got concrete expression "%s" for "%s"' % (concrete, expression)) # TODO(rpedde): This does not work like you think it does # self.assertTrue('foo = 3', concrete) # Using an assertEquals of the above fails # self.assertEquals(concrete, 'foo = 3') # But this works self.assertEquals(concrete, 'foo = value') def test_apply_expression(self): expression = 'facts.test := union(facts.test, "test")' node = self._model_get_by_id('nodes', self.nodes['node-1']['id']) # make sure we are applying into an empty fact self.assertFalse('test' in node['facts']) ast.apply_expression(self.nodes['node-1']['id'], expression, api) node = self._model_get_by_id('nodes', self.nodes['node-1']['id']) self.assertTrue('test' in node['facts']) self.assertEquals(node['facts']['test'], ['test']) # FIXME: when we get types def test_util_nth_with_none(self): expression = 'nth(0, facts.test)' # nth of none? res = self._simple_expression(expression) self.assertIsNone(res) # FIXME: when we get types def test_util_nth_not_integer(self): expression = 'nth("a", facts.test)' # raise with type error? res = self._simple_expression(expression) self.assertIsNone(res) # FIXME: when we get types def test_util_nth_index_out_of_range(self): self._model_create('facts', node_id=self.nodes['node-1']['id'], key='test', value=[1, 2, 3]) self.assertTrue(self._simple_expression('nth(2, facts.test)') is 3) self.assertIsNone(self._simple_expression('nth(3, facts.test)')) # FIXME: when we get types def test_str_casting_none(self): # this should fail, too, I think self.assertIsNone(self._simple_expression('str(facts.test)')) self._model_create('facts', node_id=self.nodes['node-1']['id'], key='test', value=[1, 2, 3]) self.assertEquals(self._simple_expression('str(facts.test)'), '[1, 2, 3]') self._model_create('facts', node_id=self.nodes['node-1']['id'], key='test', value=1) self.assertEquals(self._simple_expression('str(facts.test)'), '1')
rcbops/opencenter
tests/test_expressions.py
Python
apache-2.0
11,034
0
from os.path import join, dirname import numpy as np from numpy.testing import assert_array_almost_equal, assert_equal import pytest from pytest import raises as assert_raises from scipy.fftpack._realtransforms import ( dct, idct, dst, idst, dctn, idctn, dstn, idstn) # Matlab reference data MDATA = np.load(join(dirname(__file__), 'test.npz')) X = [MDATA['x%d' % i] for i in range(8)] Y = [MDATA['y%d' % i] for i in range(8)] # FFTW reference data: the data are organized as follows: # * SIZES is an array containing all available sizes # * for every type (1, 2, 3, 4) and every size, the array dct_type_size # contains the output of the DCT applied to the input np.linspace(0, size-1, # size) FFTWDATA_DOUBLE = np.load(join(dirname(__file__), 'fftw_double_ref.npz')) FFTWDATA_SINGLE = np.load(join(dirname(__file__), 'fftw_single_ref.npz')) FFTWDATA_SIZES = FFTWDATA_DOUBLE['sizes'] def fftw_dct_ref(type, size, dt): x = np.linspace(0, size-1, size).astype(dt) dt = np.result_type(np.float32, dt) if dt == np.double: data = FFTWDATA_DOUBLE elif dt == np.float32: data = FFTWDATA_SINGLE else: raise ValueError() y = (data['dct_%d_%d' % (type, size)]).astype(dt) return x, y, dt def fftw_dst_ref(type, size, dt): x = np.linspace(0, size-1, size).astype(dt) dt = np.result_type(np.float32, dt) if dt == np.double: data = FFTWDATA_DOUBLE elif dt == np.float32: data = FFTWDATA_SINGLE else: raise ValueError() y = (data['dst_%d_%d' % (type, size)]).astype(dt) return x, y, dt def dct_2d_ref(x, **kwargs): """Calculate reference values for testing dct2.""" x = np.array(x, copy=True) for row in range(x.shape[0]): x[row, :] = dct(x[row, :], **kwargs) for col in range(x.shape[1]): x[:, col] = dct(x[:, col], **kwargs) return x def idct_2d_ref(x, **kwargs): """Calculate reference values for testing idct2.""" x = np.array(x, copy=True) for row in range(x.shape[0]): x[row, :] = idct(x[row, :], **kwargs) for col in range(x.shape[1]): x[:, col] = idct(x[:, col], **kwargs) return x def dst_2d_ref(x, **kwargs): """Calculate reference values for testing dst2.""" x = np.array(x, copy=True) for row in range(x.shape[0]): x[row, :] = dst(x[row, :], **kwargs) for col in range(x.shape[1]): x[:, col] = dst(x[:, col], **kwargs) return x def idst_2d_ref(x, **kwargs): """Calculate reference values for testing idst2.""" x = np.array(x, copy=True) for row in range(x.shape[0]): x[row, :] = idst(x[row, :], **kwargs) for col in range(x.shape[1]): x[:, col] = idst(x[:, col], **kwargs) return x def naive_dct1(x, norm=None): """Calculate textbook definition version of DCT-I.""" x = np.array(x, copy=True) N = len(x) M = N-1 y = np.zeros(N) m0, m = 1, 2 if norm == 'ortho': m0 = np.sqrt(1.0/M) m = np.sqrt(2.0/M) for k in range(N): for n in range(1, N-1): y[k] += m*x[n]*np.cos(np.pi*n*k/M) y[k] += m0 * x[0] y[k] += m0 * x[N-1] * (1 if k % 2 == 0 else -1) if norm == 'ortho': y[0] *= 1/np.sqrt(2) y[N-1] *= 1/np.sqrt(2) return y def naive_dst1(x, norm=None): """Calculate textbook definition version of DST-I.""" x = np.array(x, copy=True) N = len(x) M = N+1 y = np.zeros(N) for k in range(N): for n in range(N): y[k] += 2*x[n]*np.sin(np.pi*(n+1.0)*(k+1.0)/M) if norm == 'ortho': y *= np.sqrt(0.5/M) return y def naive_dct4(x, norm=None): """Calculate textbook definition version of DCT-IV.""" x = np.array(x, copy=True) N = len(x) y = np.zeros(N) for k in range(N): for n in range(N): y[k] += x[n]*np.cos(np.pi*(n+0.5)*(k+0.5)/(N)) if norm == 'ortho': y *= np.sqrt(2.0/N) else: y *= 2 return y def naive_dst4(x, norm=None): """Calculate textbook definition version of DST-IV.""" x = np.array(x, copy=True) N = len(x) y = np.zeros(N) for k in range(N): for n in range(N): y[k] += x[n]*np.sin(np.pi*(n+0.5)*(k+0.5)/(N)) if norm == 'ortho': y *= np.sqrt(2.0/N) else: y *= 2 return y class TestComplex: def test_dct_complex64(self): y = dct(1j*np.arange(5, dtype=np.complex64)) x = 1j*dct(np.arange(5)) assert_array_almost_equal(x, y) def test_dct_complex(self): y = dct(np.arange(5)*1j) x = 1j*dct(np.arange(5)) assert_array_almost_equal(x, y) def test_idct_complex(self): y = idct(np.arange(5)*1j) x = 1j*idct(np.arange(5)) assert_array_almost_equal(x, y) def test_dst_complex64(self): y = dst(np.arange(5, dtype=np.complex64)*1j) x = 1j*dst(np.arange(5)) assert_array_almost_equal(x, y) def test_dst_complex(self): y = dst(np.arange(5)*1j) x = 1j*dst(np.arange(5)) assert_array_almost_equal(x, y) def test_idst_complex(self): y = idst(np.arange(5)*1j) x = 1j*idst(np.arange(5)) assert_array_almost_equal(x, y) class _TestDCTBase: def setup_method(self): self.rdt = None self.dec = 14 self.type = None def test_definition(self): for i in FFTWDATA_SIZES: x, yr, dt = fftw_dct_ref(self.type, i, self.rdt) y = dct(x, type=self.type) assert_equal(y.dtype, dt) # XXX: we divide by np.max(y) because the tests fail otherwise. We # should really use something like assert_array_approx_equal. The # difference is due to fftw using a better algorithm w.r.t error # propagation compared to the ones from fftpack. assert_array_almost_equal(y / np.max(y), yr / np.max(y), decimal=self.dec, err_msg="Size %d failed" % i) def test_axis(self): nt = 2 for i in [7, 8, 9, 16, 32, 64]: x = np.random.randn(nt, i) y = dct(x, type=self.type) for j in range(nt): assert_array_almost_equal(y[j], dct(x[j], type=self.type), decimal=self.dec) x = x.T y = dct(x, axis=0, type=self.type) for j in range(nt): assert_array_almost_equal(y[:,j], dct(x[:,j], type=self.type), decimal=self.dec) class _TestDCTIBase(_TestDCTBase): def test_definition_ortho(self): # Test orthornomal mode. dt = np.result_type(np.float32, self.rdt) for xr in X: x = np.array(xr, dtype=self.rdt) y = dct(x, norm='ortho', type=1) y2 = naive_dct1(x, norm='ortho') assert_equal(y.dtype, dt) assert_array_almost_equal(y / np.max(y), y2 / np.max(y), decimal=self.dec) class _TestDCTIIBase(_TestDCTBase): def test_definition_matlab(self): # Test correspondence with MATLAB (orthornomal mode). dt = np.result_type(np.float32, self.rdt) for xr, yr in zip(X, Y): x = np.array(xr, dtype=dt) y = dct(x, norm="ortho", type=2) assert_equal(y.dtype, dt) assert_array_almost_equal(y, yr, decimal=self.dec) class _TestDCTIIIBase(_TestDCTBase): def test_definition_ortho(self): # Test orthornomal mode. dt = np.result_type(np.float32, self.rdt) for xr in X: x = np.array(xr, dtype=self.rdt) y = dct(x, norm='ortho', type=2) xi = dct(y, norm="ortho", type=3) assert_equal(xi.dtype, dt) assert_array_almost_equal(xi, x, decimal=self.dec) class _TestDCTIVBase(_TestDCTBase): def test_definition_ortho(self): # Test orthornomal mode. dt = np.result_type(np.float32, self.rdt) for xr in X: x = np.array(xr, dtype=self.rdt) y = dct(x, norm='ortho', type=4) y2 = naive_dct4(x, norm='ortho') assert_equal(y.dtype, dt) assert_array_almost_equal(y / np.max(y), y2 / np.max(y), decimal=self.dec) class TestDCTIDouble(_TestDCTIBase): def setup_method(self): self.rdt = np.double self.dec = 10 self.type = 1 class TestDCTIFloat(_TestDCTIBase): def setup_method(self): self.rdt = np.float32 self.dec = 4 self.type = 1 class TestDCTIInt(_TestDCTIBase): def setup_method(self): self.rdt = int self.dec = 5 self.type = 1 class TestDCTIIDouble(_TestDCTIIBase): def setup_method(self): self.rdt = np.double self.dec = 10 self.type = 2 class TestDCTIIFloat(_TestDCTIIBase): def setup_method(self): self.rdt = np.float32 self.dec = 5 self.type = 2 class TestDCTIIInt(_TestDCTIIBase): def setup_method(self): self.rdt = int self.dec = 5 self.type = 2 class TestDCTIIIDouble(_TestDCTIIIBase): def setup_method(self): self.rdt = np.double self.dec = 14 self.type = 3 class TestDCTIIIFloat(_TestDCTIIIBase): def setup_method(self): self.rdt = np.float32 self.dec = 5 self.type = 3 class TestDCTIIIInt(_TestDCTIIIBase): def setup_method(self): self.rdt = int self.dec = 5 self.type = 3 class TestDCTIVDouble(_TestDCTIVBase): def setup_method(self): self.rdt = np.double self.dec = 12 self.type = 3 class TestDCTIVFloat(_TestDCTIVBase): def setup_method(self): self.rdt = np.float32 self.dec = 5 self.type = 3 class TestDCTIVInt(_TestDCTIVBase): def setup_method(self): self.rdt = int self.dec = 5 self.type = 3 class _TestIDCTBase: def setup_method(self): self.rdt = None self.dec = 14 self.type = None def test_definition(self): for i in FFTWDATA_SIZES: xr, yr, dt = fftw_dct_ref(self.type, i, self.rdt) x = idct(yr, type=self.type) if self.type == 1: x /= 2 * (i-1) else: x /= 2 * i assert_equal(x.dtype, dt) # XXX: we divide by np.max(y) because the tests fail otherwise. We # should really use something like assert_array_approx_equal. The # difference is due to fftw using a better algorithm w.r.t error # propagation compared to the ones from fftpack. assert_array_almost_equal(x / np.max(x), xr / np.max(x), decimal=self.dec, err_msg="Size %d failed" % i) class TestIDCTIDouble(_TestIDCTBase): def setup_method(self): self.rdt = np.double self.dec = 10 self.type = 1 class TestIDCTIFloat(_TestIDCTBase): def setup_method(self): self.rdt = np.float32 self.dec = 4 self.type = 1 class TestIDCTIInt(_TestIDCTBase): def setup_method(self): self.rdt = int self.dec = 4 self.type = 1 class TestIDCTIIDouble(_TestIDCTBase): def setup_method(self): self.rdt = np.double self.dec = 10 self.type = 2 class TestIDCTIIFloat(_TestIDCTBase): def setup_method(self): self.rdt = np.float32 self.dec = 5 self.type = 2 class TestIDCTIIInt(_TestIDCTBase): def setup_method(self): self.rdt = int self.dec = 5 self.type = 2 class TestIDCTIIIDouble(_TestIDCTBase): def setup_method(self): self.rdt = np.double self.dec = 14 self.type = 3 class TestIDCTIIIFloat(_TestIDCTBase): def setup_method(self): self.rdt = np.float32 self.dec = 5 self.type = 3 class TestIDCTIIIInt(_TestIDCTBase): def setup_method(self): self.rdt = int self.dec = 5 self.type = 3 class TestIDCTIVDouble(_TestIDCTBase): def setup_method(self): self.rdt = np.double self.dec = 12 self.type = 4 class TestIDCTIVFloat(_TestIDCTBase): def setup_method(self): self.rdt = np.float32 self.dec = 5 self.type = 4 class TestIDCTIVInt(_TestIDCTBase): def setup_method(self): self.rdt = int self.dec = 5 self.type = 4 class _TestDSTBase: def setup_method(self): self.rdt = None # dtype self.dec = None # number of decimals to match self.type = None # dst type def test_definition(self): for i in FFTWDATA_SIZES: xr, yr, dt = fftw_dst_ref(self.type, i, self.rdt) y = dst(xr, type=self.type) assert_equal(y.dtype, dt) # XXX: we divide by np.max(y) because the tests fail otherwise. We # should really use something like assert_array_approx_equal. The # difference is due to fftw using a better algorithm w.r.t error # propagation compared to the ones from fftpack. assert_array_almost_equal(y / np.max(y), yr / np.max(y), decimal=self.dec, err_msg="Size %d failed" % i) class _TestDSTIBase(_TestDSTBase): def test_definition_ortho(self): # Test orthornomal mode. dt = np.result_type(np.float32, self.rdt) for xr in X: x = np.array(xr, dtype=self.rdt) y = dst(x, norm='ortho', type=1) y2 = naive_dst1(x, norm='ortho') assert_equal(y.dtype, dt) assert_array_almost_equal(y / np.max(y), y2 / np.max(y), decimal=self.dec) class _TestDSTIVBase(_TestDSTBase): def test_definition_ortho(self): # Test orthornomal mode. dt = np.result_type(np.float32, self.rdt) for xr in X: x = np.array(xr, dtype=self.rdt) y = dst(x, norm='ortho', type=4) y2 = naive_dst4(x, norm='ortho') assert_equal(y.dtype, dt) assert_array_almost_equal(y, y2, decimal=self.dec) class TestDSTIDouble(_TestDSTIBase): def setup_method(self): self.rdt = np.double self.dec = 12 self.type = 1 class TestDSTIFloat(_TestDSTIBase): def setup_method(self): self.rdt = np.float32 self.dec = 4 self.type = 1 class TestDSTIInt(_TestDSTIBase): def setup_method(self): self.rdt = int self.dec = 5 self.type = 1 class TestDSTIIDouble(_TestDSTBase): def setup_method(self): self.rdt = np.double self.dec = 14 self.type = 2 class TestDSTIIFloat(_TestDSTBase): def setup_method(self): self.rdt = np.float32 self.dec = 6 self.type = 2 class TestDSTIIInt(_TestDSTBase): def setup_method(self): self.rdt = int self.dec = 6 self.type = 2 class TestDSTIIIDouble(_TestDSTBase): def setup_method(self): self.rdt = np.double self.dec = 14 self.type = 3 class TestDSTIIIFloat(_TestDSTBase): def setup_method(self): self.rdt = np.float32 self.dec = 7 self.type = 3 class TestDSTIIIInt(_TestDSTBase): def setup_method(self): self.rdt = int self.dec = 7 self.type = 3 class TestDSTIVDouble(_TestDSTIVBase): def setup_method(self): self.rdt = np.double self.dec = 12 self.type = 4 class TestDSTIVFloat(_TestDSTIVBase): def setup_method(self): self.rdt = np.float32 self.dec = 4 self.type = 4 class TestDSTIVInt(_TestDSTIVBase): def setup_method(self): self.rdt = int self.dec = 5 self.type = 4 class _TestIDSTBase: def setup_method(self): self.rdt = None self.dec = None self.type = None def test_definition(self): for i in FFTWDATA_SIZES: xr, yr, dt = fftw_dst_ref(self.type, i, self.rdt) x = idst(yr, type=self.type) if self.type == 1: x /= 2 * (i+1) else: x /= 2 * i assert_equal(x.dtype, dt) # XXX: we divide by np.max(x) because the tests fail otherwise. We # should really use something like assert_array_approx_equal. The # difference is due to fftw using a better algorithm w.r.t error # propagation compared to the ones from fftpack. assert_array_almost_equal(x / np.max(x), xr / np.max(x), decimal=self.dec, err_msg="Size %d failed" % i) class TestIDSTIDouble(_TestIDSTBase): def setup_method(self): self.rdt = np.double self.dec = 12 self.type = 1 class TestIDSTIFloat(_TestIDSTBase): def setup_method(self): self.rdt = np.float32 self.dec = 4 self.type = 1 class TestIDSTIInt(_TestIDSTBase): def setup_method(self): self.rdt = int self.dec = 4 self.type = 1 class TestIDSTIIDouble(_TestIDSTBase): def setup_method(self): self.rdt = np.double self.dec = 14 self.type = 2 class TestIDSTIIFloat(_TestIDSTBase): def setup_method(self): self.rdt = np.float32 self.dec = 6 self.type = 2 class TestIDSTIIInt(_TestIDSTBase): def setup_method(self): self.rdt = int self.dec = 6 self.type = 2 class TestIDSTIIIDouble(_TestIDSTBase): def setup_method(self): self.rdt = np.double self.dec = 14 self.type = 3 class TestIDSTIIIFloat(_TestIDSTBase): def setup_method(self): self.rdt = np.float32 self.dec = 6 self.type = 3 class TestIDSTIIIInt(_TestIDSTBase): def setup_method(self): self.rdt = int self.dec = 6 self.type = 3 class TestIDSTIVDouble(_TestIDSTBase): def setup_method(self): self.rdt = np.double self.dec = 12 self.type = 4 class TestIDSTIVFloat(_TestIDSTBase): def setup_method(self): self.rdt = np.float32 self.dec = 6 self.type = 4 class TestIDSTIVnt(_TestIDSTBase): def setup_method(self): self.rdt = int self.dec = 6 self.type = 4 class TestOverwrite: """Check input overwrite behavior.""" real_dtypes = [np.float32, np.float64] def _check(self, x, routine, type, fftsize, axis, norm, overwrite_x, **kw): x2 = x.copy() routine(x2, type, fftsize, axis, norm, overwrite_x=overwrite_x) sig = "%s(%s%r, %r, axis=%r, overwrite_x=%r)" % ( routine.__name__, x.dtype, x.shape, fftsize, axis, overwrite_x) if not overwrite_x: assert_equal(x2, x, err_msg="spurious overwrite in %s" % sig) def _check_1d(self, routine, dtype, shape, axis): np.random.seed(1234) if np.issubdtype(dtype, np.complexfloating): data = np.random.randn(*shape) + 1j*np.random.randn(*shape) else: data = np.random.randn(*shape) data = data.astype(dtype) for type in [1, 2, 3, 4]: for overwrite_x in [True, False]: for norm in [None, 'ortho']: self._check(data, routine, type, None, axis, norm, overwrite_x) def test_dct(self): for dtype in self.real_dtypes: self._check_1d(dct, dtype, (16,), -1) self._check_1d(dct, dtype, (16, 2), 0) self._check_1d(dct, dtype, (2, 16), 1) def test_idct(self): for dtype in self.real_dtypes: self._check_1d(idct, dtype, (16,), -1) self._check_1d(idct, dtype, (16, 2), 0) self._check_1d(idct, dtype, (2, 16), 1) def test_dst(self): for dtype in self.real_dtypes: self._check_1d(dst, dtype, (16,), -1) self._check_1d(dst, dtype, (16, 2), 0) self._check_1d(dst, dtype, (2, 16), 1) def test_idst(self): for dtype in self.real_dtypes: self._check_1d(idst, dtype, (16,), -1) self._check_1d(idst, dtype, (16, 2), 0) self._check_1d(idst, dtype, (2, 16), 1) class Test_DCTN_IDCTN: dec = 14 dct_type = [1, 2, 3, 4] norms = [None, 'ortho'] rstate = np.random.RandomState(1234) shape = (32, 16) data = rstate.randn(*shape) @pytest.mark.parametrize('fforward,finverse', [(dctn, idctn), (dstn, idstn)]) @pytest.mark.parametrize('axes', [None, 1, (1,), [1], 0, (0,), [0], (0, 1), [0, 1], (-2, -1), [-2, -1]]) @pytest.mark.parametrize('dct_type', dct_type) @pytest.mark.parametrize('norm', ['ortho']) def test_axes_round_trip(self, fforward, finverse, axes, dct_type, norm): tmp = fforward(self.data, type=dct_type, axes=axes, norm=norm) tmp = finverse(tmp, type=dct_type, axes=axes, norm=norm) assert_array_almost_equal(self.data, tmp, decimal=12) @pytest.mark.parametrize('fforward,fforward_ref', [(dctn, dct_2d_ref), (dstn, dst_2d_ref)]) @pytest.mark.parametrize('dct_type', dct_type) @pytest.mark.parametrize('norm', norms) def test_dctn_vs_2d_reference(self, fforward, fforward_ref, dct_type, norm): y1 = fforward(self.data, type=dct_type, axes=None, norm=norm) y2 = fforward_ref(self.data, type=dct_type, norm=norm) assert_array_almost_equal(y1, y2, decimal=11) @pytest.mark.parametrize('finverse,finverse_ref', [(idctn, idct_2d_ref), (idstn, idst_2d_ref)]) @pytest.mark.parametrize('dct_type', dct_type) @pytest.mark.parametrize('norm', [None, 'ortho']) def test_idctn_vs_2d_reference(self, finverse, finverse_ref, dct_type, norm): fdata = dctn(self.data, type=dct_type, norm=norm) y1 = finverse(fdata, type=dct_type, norm=norm) y2 = finverse_ref(fdata, type=dct_type, norm=norm) assert_array_almost_equal(y1, y2, decimal=11) @pytest.mark.parametrize('fforward,finverse', [(dctn, idctn), (dstn, idstn)]) def test_axes_and_shape(self, fforward, finverse): with assert_raises(ValueError, match="when given, axes and shape arguments" " have to be of the same length"): fforward(self.data, shape=self.data.shape[0], axes=(0, 1)) with assert_raises(ValueError, match="when given, axes and shape arguments" " have to be of the same length"): fforward(self.data, shape=self.data.shape[0], axes=None) with assert_raises(ValueError, match="when given, axes and shape arguments" " have to be of the same length"): fforward(self.data, shape=self.data.shape, axes=0) @pytest.mark.parametrize('fforward', [dctn, dstn]) def test_shape(self, fforward): tmp = fforward(self.data, shape=(128, 128), axes=None) assert_equal(tmp.shape, (128, 128)) @pytest.mark.parametrize('fforward,finverse', [(dctn, idctn), (dstn, idstn)]) @pytest.mark.parametrize('axes', [1, (1,), [1], 0, (0,), [0]]) def test_shape_is_none_with_axes(self, fforward, finverse, axes): tmp = fforward(self.data, shape=None, axes=axes, norm='ortho') tmp = finverse(tmp, shape=None, axes=axes, norm='ortho') assert_array_almost_equal(self.data, tmp, decimal=self.dec)
mdhaber/scipy
scipy/fftpack/tests/test_real_transforms.py
Python
bsd-3-clause
23,941
0.000877
############################################################################### # # Copyright (C) 2001-2014 Micronaet SRL (<http://www.micronaet.it>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################### { 'name': 'Volume box for DDT', 'version': '0.1', 'category': 'Account', 'description': ''' Add volume box for DDT volume info ''', 'author': 'Micronaet S.r.l. - Nicola Riolini', 'website': 'http://www.micronaet.it', 'license': 'AGPL-3', 'depends': [ 'base', 'l10n_it_ddt', ], 'init_xml': [], 'demo': [], 'data': [ 'security/ir.model.access.csv', 'volume_view.xml', ], 'active': False, 'installable': True, 'auto_install': False, }
Micronaet/micronaet-mx8
sale_box_volume/__openerp__.py
Python
agpl-3.0
1,492
0.002011
from datetime import tzinfo, timedelta, datetime ZERO = timedelta(0) HOUR = timedelta(hours=1) # A UTC class. class UTC(tzinfo): """UTC""" def utcoffset(self, dt): return ZERO def tzname(self, dt): return "UTC" def dst(self, dt): return ZERO utc = UTC() # A class building tzinfo objects for fixed-offset time zones. # Note that FixedOffset(0, "UTC") is a different way to build a # UTC tzinfo object. class FixedOffset(tzinfo): """Fixed offset in minutes east from UTC.""" def __init__(self, offset, name): self.__offset = timedelta(minutes = offset) self.__name = name def utcoffset(self, dt): return self.__offset def tzname(self, dt): return self.__name def dst(self, dt): return ZERO # A class capturing the platform's idea of local time. import time as _time STDOFFSET = timedelta(seconds = -_time.timezone) if _time.daylight: DSTOFFSET = timedelta(seconds = -_time.altzone) else: DSTOFFSET = STDOFFSET DSTDIFF = DSTOFFSET - STDOFFSET class LocalTimezone(tzinfo): def utcoffset(self, dt): if self._isdst(dt): return DSTOFFSET else: return STDOFFSET def dst(self, dt): if self._isdst(dt): return DSTDIFF else: return ZERO def tzname(self, dt): return _time.tzname[self._isdst(dt)] def _isdst(self, dt): tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, -1) stamp = _time.mktime(tt) tt = _time.localtime(stamp) return tt.tm_isdst > 0 Local = LocalTimezone() # A complete implementation of current DST rules for major US time zones. def first_sunday_on_or_after(dt): days_to_go = 6 - dt.weekday() if days_to_go: dt += timedelta(days_to_go) return dt # US DST Rules # # This is a simplified (i.e., wrong for a few cases) set of rules for US # DST start and end times. For a complete and up-to-date set of DST rules # and timezone definitions, visit the Olson Database (or try pytz): # http://www.twinsun.com/tz/tz-link.htm # http://sourceforge.net/projects/pytz/ (might not be up-to-date) # # In the US, since 2007, DST starts at 2am (standard time) on the second # Sunday in March, which is the first Sunday on or after Mar 8. DSTSTART_2007 = datetime(1, 3, 8, 2) # and ends at 2am (DST time; 1am standard time) on the first Sunday of Nov. DSTEND_2007 = datetime(1, 11, 1, 1) # From 1987 to 2006, DST used to start at 2am (standard time) on the first # Sunday in April and to end at 2am (DST time; 1am standard time) on the last # Sunday of October, which is the first Sunday on or after Oct 25. DSTSTART_1987_2006 = datetime(1, 4, 1, 2) DSTEND_1987_2006 = datetime(1, 10, 25, 1) # From 1967 to 1986, DST used to start at 2am (standard time) on the last # Sunday in April (the one on or after April 24) and to end at 2am (DST time; # 1am standard time) on the last Sunday of October, which is the first Sunday # on or after Oct 25. DSTSTART_1967_1986 = datetime(1, 4, 24, 2) DSTEND_1967_1986 = DSTEND_1987_2006 class USTimeZone(tzinfo): def __init__(self, hours, reprname, stdname, dstname): self.stdoffset = timedelta(hours=hours) self.reprname = reprname self.stdname = stdname self.dstname = dstname def __repr__(self): return self.reprname def tzname(self, dt): if self.dst(dt): return self.dstname else: return self.stdname def utcoffset(self, dt): return self.stdoffset + self.dst(dt) def dst(self, dt): if dt is None or dt.tzinfo is None: # An exception may be sensible here, in one or both cases. # It depends on how you want to treat them. The default # fromutc() implementation (called by the default astimezone() # implementation) passes a datetime with dt.tzinfo is self. return ZERO assert dt.tzinfo is self # Find start and end times for US DST. For years before 1967, return # ZERO for no DST. if 2006 < dt.year: dststart, dstend = DSTSTART_2007, DSTEND_2007 elif 1986 < dt.year < 2007: dststart, dstend = DSTSTART_1987_2006, DSTEND_1987_2006 elif 1966 < dt.year < 1987: dststart, dstend = DSTSTART_1967_1986, DSTEND_1967_1986 else: return ZERO start = first_sunday_on_or_after(dststart.replace(year=dt.year)) end = first_sunday_on_or_after(dstend.replace(year=dt.year)) # Can't compare naive to aware objects, so strip the timezone from # dt first. if start <= dt.replace(tzinfo=None) < end: return HOUR else: return ZERO Eastern = USTimeZone(-5, "Eastern", "EST", "EDT") Central = USTimeZone(-6, "Central", "CST", "CDT") Mountain = USTimeZone(-7, "Mountain", "MST", "MDT") Pacific = USTimeZone(-8, "Pacific", "PST", "PDT")
MicroTrustRepos/microkernel
src/l4/pkg/python/contrib/Doc/includes/tzinfo-examples.py
Python
gpl-2.0
5,063
0.003555
#!/usr/bin/env python # -*- coding: utf-8 -*- # file include_bib.py # This file is part of LyX, the document processor. # Licence details can be found in the file COPYING. # authors Richard Heck and [SchAirport] # Full author contact details are available in file CREDITS # This script is intended to include a BibTeX-generated biblography # in a LaTeX file, as publishers often want. It can be run manually # on an exported LaTeX file, though it needs to be compiled first, # so the bbl file will exist. # # It should also be possible to create a LyX converter to run this # automatically. To set it up, create a format "ltxbbl"; make sure to # check it as a document format. Then create a LaTeX-->ltxbbl converter, # with the command: # python -tt $$s/scripts/include_bib.py $$i $$o # and give it the flags: # needaux,nice # You'll then have it in the export menu. # # We do not activate this converter by default, because there are problems # when one tries to use multiple bibliographies. # # Please report any problems on the devel list. import sys, os class secbib: def __init__(self, start = -1, end = -1): self.start = start self.end = end class BibError(Exception): def __init__(self, msg): self.msg = msg def __str__(self): return self.msg def InsertBib(fil, out): ''' Inserts the contents of the .bbl file instead of the bibliography in a new .tex file ''' texlist = open(fil, 'r').readlines() # multiple bibliographies biblist = [] stylist = [] for i, line in enumerate(texlist): if "\\bibliographystyle" in line: stylist.append(i) elif "\\bibliography" in line: biblist.append(i) elif "\\begin{btSect}" in line: raise BibError("Cannot export sectioned bibliographies") if len(biblist) > 1: raise BibError("Cannot export multiple bibliographies.") if not biblist: raise BibError("No biliography found!") bibpos = biblist[0] newlist = texlist[0:bibpos] bblfile = fil[:-4] + ".bbl" bbllist = open(bblfile, 'r').readlines() newlist += bbllist newlist += texlist[bibpos + 1:] outfile = open(out, 'w') outfile.write("".join(newlist)) outfile.close() return out def usage(): print r''' Usage: python include_bib.py file.tex [outfile.tex] Includes the contents of file.bbl, which must exist in the same directory as file.tex, in place of the \bibliography command, and creates the new file outfile.tex. If no name for that file is given, we create: file-bbl.tex. ''' if __name__ == "__main__": args = len(sys.argv) if args <= 1 or args > 3: usage() sys.exit(0) # we might should make sure this is a tex file.... infile = sys.argv[1] if infile[-4:] != ".tex": print "Error: " + infile + " is not a TeX file" usage() sys.exit(1) if args == 3: outfile = sys.argv[2] else: outfile = infile[:-4] + "-bbl.tex" newfile = InsertBib(infile, outfile) print "Wrote " + outfile
hashinisenaratne/HSTML
lib/scripts/include_bib.py
Python
gpl-2.0
2,972
0.01817
# -*- coding: utf-8 -*- # See README.rst file on addon root folder for license details from openerp import models, fields, exceptions, api from openerp.tools.translate import _ class ProjectTemplateWizard(models.TransientModel): _name = 'project.template.wizard' project_id = fields.Many2one( comodel_name='project.project', string='Template project', domain="[('state', '=', 'template')]") event_id = fields.Many2one(comodel_name='event.event') @api.one def project_template_duplicate(self): if not self.project_id: raise exceptions.ValidationError( _('Template project is required.')) parent_id = self.project_id.parent_id.id res = self.project_id.with_context( self.env.context, parent_id=parent_id).duplicate_template() self.with_context( {'no_recalculate': True}).event_id.project_id = res['res_id'] self.event_id.project_id.write({ 'name': self.event_id.name, 'date_start': self.event_id.date_begin, 'date': self.event_id.date_begin, 'calculation_type': 'date_end', }) self.event_id.project_id.project_recalculate()
Endika/event
event_project/wizard/project_template_wizard.py
Python
agpl-3.0
1,219
0
bind = 'unix:/run/emojiweather.sock' pythonpath = 'emojiweather' workers = 2 accesslog = '-'
richardcornish/smsweather
gunicorn.py
Python
bsd-3-clause
95
0.010526
from revscoring.features import user from revscoring.features.modifiers import not_, log from ..features import diff, revision class properties: """ Mapping of english descriptions to property identifiers """ IMAGE = 'P18' SEX_OR_GENDER = 'P21' COUNTRY_OF_CITIZENSHIP = 'P27' INSTANCE_OF = 'P31' MEMBER_OF_SPORTS_TEAM = 'P54' SIGNATURE = 'P109' COMMONS_CATEGORY = 'P373' DATE_OF_BIRTH = 'P569' DATE_OF_DEATH = 'P570' OFFICIAL_WEBSITE = 'P856' class items: """ Mapping of english descriptions to item idenifiers """ HUMAN = 'Q5' # Comment features is_client_delete = revision.comment_matches(r"^\/\* clientsitelink\-remove\:", name='revision.is_client_delete') is_client_move = revision.comment_matches(r"^\/\* clientsitelink\-update\:", name='revision.is_client_move') is_merge_into = revision.comment_matches(r"^\/\* wbmergeitems\-to\:", name='revision.is_merge_into') is_merge_from = revision.comment_matches(r"^\/\* wbmergeitems\-from\:", name='revision.is_merge_from') is_revert = \ revision.comment_matches(r"^Reverted edits by \[\[Special\:Contributions", name='revision.is_revert') is_rollback = revision.comment_matches(r"^Undid revision ", name='revision.is_rollback') is_restore = revision.comment_matches(r"^Restored revision ", name='revision.is_restore') is_item_creation = revision.comment_matches(r"^\/\* (wbsetentity|" r"wbeditentity-create\:0\|) \*\/", name='revision.is_item_creation') # Properties changed sex_or_gender_changed = \ diff.property_changed(properties.SEX_OR_GENDER, name='diff.sex_or_gender_changed') country_of_citizenship_changed = \ diff.property_changed(properties.COUNTRY_OF_CITIZENSHIP, name='diff.country_of_citizenship_changed') member_of_sports_team_changed = \ diff.property_changed(properties.MEMBER_OF_SPORTS_TEAM, name='diff.member_of_sports_team_changed') date_of_birth_changed = \ diff.property_changed(properties.DATE_OF_BIRTH, name='diff.date_of_birth_changed') image_changed = \ diff.property_changed(properties.IMAGE, name='diff.image_changed') signature_changed = \ diff.property_changed(properties.SIGNATURE, name='diff.signature_changed') commons_category_changed = \ diff.property_changed(properties.COMMONS_CATEGORY, name='diff.commons_category_changed') official_website_changed = \ diff.property_changed(properties.OFFICIAL_WEBSITE, name='diff.official_website_changed') # Status is_human = \ revision.has_property_value(properties.INSTANCE_OF, items.HUMAN, name='revision.is_human') has_birthday = \ revision.has_property(properties.DATE_OF_BIRTH, name='revision.has_birthday') dead = \ revision.has_property(properties.DATE_OF_DEATH, name='revision.dead') is_blp = has_birthday.and_(not_(dead)) reverted = [ # revscoring.features.diff.longest_repeated_char_added, # revscoring.features.diff.longest_token_added, # log(revscoring.features.diff.numeric_chars_added + 1), # log(revscoring.features.diff.numeric_chars_removed + 1), # revscoring.features.diff.proportion_of_chars_added, # revscoring.features.diff.proportion_of_chars_removed, # revscoring.features.diff.proportion_of_numeric_chars_added, # revscoring.features.diff.proportion_of_symbolic_chars_added, # revscoring.features.diff.proportion_of_uppercase_chars_added, # log(revscoring.features.diff.symbolic_chars_added + 1), # log(revscoring.features.diff.symbolic_chars_removed + 1), # log(revscoring.features.diff.uppercase_chars_added + 1), # log(revscoring.features.diff.uppercase_chars_removed + 1), # revscoring.features.diff.bytes_changed + 1, # revscoring.featuresdiff.bytes_changed_ratio, # page.is_content_namespace, # parent_revision.was_same_user, log(user.age + 1), diff.number_added_sitelinks, diff.number_removed_sitelinks, diff.number_changed_sitelinks, diff.number_added_labels, diff.number_removed_labels, diff.number_changed_labels, diff.number_added_descriptions, diff.number_removed_descriptions, diff.number_changed_descriptions, diff.number_added_aliases, diff.number_removed_aliases, diff.number_added_claims, diff.number_removed_claims, diff.number_changed_claims, diff.number_changed_identifiers, diff.en_label_touched, diff.number_added_sources, diff.number_removed_sources, diff.number_added_qualifiers, diff.number_removed_qualifiers, diff.number_added_badges, diff.number_removed_badges, # diff.mean_distance_descriptions, # diff.mean_distance_labels, diff.proportion_of_qid_added, diff.proportion_of_language_added, diff.proportion_of_links_added, is_client_move, is_client_delete, is_merge_into, is_merge_from, is_revert, is_rollback, is_restore, is_item_creation, sex_or_gender_changed, country_of_citizenship_changed, member_of_sports_team_changed, date_of_birth_changed, image_changed, signature_changed, commons_category_changed, official_website_changed, log(revision.number_claims + 1), log(revision.number_aliases + 1), log(revision.number_sources + 1), log(revision.number_qualifiers + 1), log(revision.number_badges + 1), log(revision.number_labels + 1), log(revision.number_sitelinks + 1), log(revision.number_descriptions + 1), is_human, is_blp, user.is_bot, user.is_anon, ]
wiki-ai/wb-vandalism
wb_vandalism/feature_lists/wikidata.py
Python
mit
6,094
0.003446
from .expression import Expression, Bool, BitVec, Array, BitVecConstant, issymbolic # noqa from .constraints import ConstraintSet # noqa from .solver import * # noqa from . import operators as Operators # noqa import logging logger = logging.getLogger(__name__)
trailofbits/manticore
manticore/core/smtlib/__init__.py
Python
agpl-3.0
268
0
import time, sys, os, helper from comodit_client.api import Client from comodit_client.api.exceptions import PythonApiException from comodit_client.rest.exceptions import ApiException from combox.config import config from helper import create_host, get_short_hostname, exec_cmd, exec_cmds, fork_cmd def stop(): print "Stopping virtual machine" success = exec_cmds(['VBoxManage controlvm "%s" poweroff' % config['vm']['name']]) def start(): print "Starting virtual machine" fork_cmd('VBoxManage startvm --type headless "%s"' % config['vm']['name'])
comodit/combox
combox/control.py
Python
mit
569
0.010545
import sys, os, platform, math import Sofa import Flexible.IO import Flexible.sml import SofaPython.Tools from SofaPython.Tools import listToStr as concat import numpy from numpy import linalg # variables __file = __file__.replace('\\', '/') # windows CURRENTDIR = os.path.dirname(os.path.abspath(__file__))+'/' CURRENTDIR = CURRENTDIR.replace('//', '/') # windows compatible filename #===================================================================================== # Scene lauch #===================================================================================== def createScene(root_node): root_node.createObject('RequiredPlugin', name='image') root_node.createObject('RequiredPlugin', name='Flexible') root_node.createObject('RequiredPlugin', name='Compliant') root_node.createObject('CompliantAttachButtonSetting') root_node.createObject('PythonScriptController', name='MyClass', filename=__file, classname='MyClass') # ================================================================= # # Creation of the scene # ================================================================= # class MyClass(Sofa.PythonScriptController): # Setup of class attributes def setup(self): return def createGraph(self, root): self.setup() self.node = root self.node.createObject('VisualStyle', displayFlags='showVisual hideWireframe showBehaviorModels showForceFields showInteractionForceFields') self.node.createObject('BackgroundSetting',color='1 1 1') self.node.gravity = '0 -9.81 0' self.node.dt = .1 # compliant solver self.node.createObject('CompliantImplicitSolver', stabilization=1) self.node.createObject('SequentialSolver', iterations=75, precision=1E-15, iterateOnBilaterals=1) self.node.createObject('LDLTResponse', schur=0) # beam creation self.mainNode = self.node.createChild('deformable') self.mainNode.createObject('RegularGrid', name='grid', n='25 5 5', min='0. 0. 0.', max='4. 1. 1.') self.mainNode.createObject('MeshToImageEngine', template='ImageUC', name='rasterizer', src='@grid', value=1, insideValue=1, voxelSize=0.025, padSize=0, rotateImage='false') self.mainNode.createObject('ImageContainer', template='ImageUC', name='image', src='@rasterizer', drawBB='false') self.mainNode.createObject('ImageSampler', template='ImageUC', name='sampler', src='@image', method=1, param='2 0', clearData=0) self.mainNode.createObject('MechanicalObject', template='Affine', name='parent', position='@sampler.position', rest_position='@sampler.position', showObject=1, showObjectScale='0.1') self.mainNode.createObject('VoronoiShapeFunction', template='ShapeFunctiond,ImageUC', name='SF', position='@parent.rest_position', image='@image.image', transform='@image.transform', nbRef=4, clearData=1, bias=0) self.mainNode.createObject('FixedConstraint', template='Affine', indices='0') # behavior behaviorNode = self.mainNode.createChild('behavior') behaviorNode.createObject('ImageGaussPointSampler', name='sampler', indices='@../SF.indices', weights='@../SF.weights', transform='@../SF.transform', method=2, order=4, targetNumber=10) behaviorNode.createObject('MechanicalObject', template='F332') behaviorNode.createObject('LinearMapping', template='Affine,F332') eNode = behaviorNode.createChild('E') eNode.createObject('MechanicalObject', template='E332', name='E') eNode.createObject('CorotationalStrainMapping', template='F332,E332', method='polar') eNode.createObject('HookeForceField', template='E332', name='ff', youngModulus='1E3', poissonRatio='0', viscosity='0') # contact and visual model contactNode = self.mainNode.createChild('registration') contactNode.createObject('MeshTopology', name='topo', src='@../grid') contactNode.createObject('MechanicalObject', name='DOFs') contactNode.createObject('UniformMass', totalMass=1) contactNode.createObject('TriangleModel') contactNode.createObject('LinearMapping', template='Affine,Vec3d') visuNode = contactNode.createChild('visual') visuNode.createObject('OglModel', template='ExtVec3f', name='visual', src='@../topo', color='0.8 0.2 0.2 1') visuNode.createObject('IdentityMapping', template='Vec3d,ExtVec3f') global sceneDataIO sceneDataIO = SofaPython.Tools.SceneDataIO(self.node) sceneDataIO.classNameList = ['MechanicalObject', 'OglModel', 'VisualModel'] def cleanup(self): print 'cleanup: the scene has been close' self.saveState('SceneDataIO') print 'The scene state has been save at t=', self.node.getTime() def onEndAnimationStep(self, dt): if self.node.getTime() >= dt*5 and self.node.getTime() < dt*6: self.saveState('SceneDataIO') print 'The scene state has been save at t=', self.node.getTime() # =============================================================================== # Scene methods # =============================================================================== def saveState(self, directory=None): # create the directory where the simulation state will be store if directory == None: directory = os.path.basename(__file__).split('.')[0] + '_at_t_' + str(self.node.getTime()) if not os.path.isdir(directory): try: os.makedirs(directory) except OSError: if not os.path.isdir(directory): raise # store the data sceneDataIO.writeData(directory)
FabienPean/sofa
applications/plugins/SofaPython/examples/sceneDataIO_write.py
Python
lgpl-2.1
5,843
0.004963
#!/usr/bin/python3 import _thread import RPi.GPIO as GPIO import socket import time from time import sleep from sys import exit import datetime #import MySQLdb # Start task command # sleep 30 && python /home/pi/Scripts/Sprinkler/Sprinkler.py > /home/pi/Scripts/Sprinkler/log.txt 2>&1 # Set GPIO output points Zones = [5, 6, 13, 19] StatusLED = 16 # Set GPIO input points CancelButton = 18 WaterSensor = 10 # Water Sensor Enabled? Sensor = False #Is it currently raining isRaining = False defaultWaitDuration = 0 def setup(): global serversocket,t # Setup GPIO GPIO.setmode(GPIO.BCM) GPIO.setwarnings(True) # Input Cancel Button GPIO.setup(CancelButton, GPIO.IN, pull_up_down=GPIO.PUD_UP) # Input Rain Sensor if Sensor: GPIO.setup(WaterSensor, GPIO.IN, pull_up_down=GPIO.PUD_UP) # Setup 4 zones on GPIO # Turn all Zones "OFF" for i in Zones: GPIO.setup(i, GPIO.OUT) GPIO.output(i, GPIO.HIGH) # Setup status LED GPIO.setup(StatusLED, GPIO.OUT) # Setup Sockets serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) host = socket.gethostname() port = 9999 serversocket.bind((host, port)) serversocket.listen(5) addLog("System", "Setup complete") def mainRun(): global isRaining addLog("System", "Main Thread started") # Always check the switch _thread.start_new_thread(checkSwitch, ((),)) while True: global serversocket clientsocket,addr = serversocket.accept() fromClient = clientsocket.recv(1024) clientsocket.close() strFromClient = str(fromClient.decode("ascii")) addLog("Recived", strFromClient) # Split incoming message requestType = strFromClient.split(":") # Do something with that message # What was the command? if(requestType[0] == "WATER"): # Is it raining if(isRaining == False): # Turn off LED if it was raining statusLED("off") # Start watering _thread.start_new_thread(water, (requestType[1], requestType[2], ) ) elif(requestType[0] == "ZONE"): if(requestType[1] == "ON"): zone(int(requestType[2]), "ON") else: zone(int(requestType[2]), "OFF") elif(requestType[0] == "RainStatus"): # Some day we will send something back print("nothing") elif(requestType[0] == "QUIT"): destroy() # Check switch def checkSwitch(self): global isRaining while True: state = GPIO.input(CancelButton) if(state): if(state != isRaining): addLog("System", "Switch TRUE") statusLED("solid") isRaining = True else: if(state != isRaining): addLog("System", "Switch FALSE") statusLED("off") isRaining = False sleep(2) # Water the lawn def water(zoneNum, duration): # Turn on zone zone(int(zoneNum), "ON") statusLED("on") # Sleep for that amount sleep(int(duration) * 60) # Turn off zone zone(int(zoneNum), "OFF") statusLED("off") # Zone Control Setup def zone(zoneSelect, onoff): if(onoff == "ON"): GPIO.output(Zones[zoneSelect], 0) addLog('Zone ' + str(zoneSelect), 'ON') else: GPIO.output(Zones[zoneSelect], 1) addLog('Zone ' + str(zoneSelect), 'OFF') def rain(): global isRaining # Check if it's raining if Sensor: if GPIO.input(WaterSensor): isRaining = True else: isRaining = False def statusLED(status): if status == "blink": GPIO.output(StatusLED, GPIO.HIGH) sleep(0.5) GPIO.output(StatusLED, GPIO.LOW) sleep(0.5) elif status == "solid": GPIO.output(StatusLED, GPIO.HIGH) elif status == "off": GPIO.output(StatusLED, GPIO.LOW) def addLog(currentZone, addedText): now = datetime.datetime.now() print ("{0}: {1}: {2}".format(now, currentZone, addedText)) def destroy(): global serversocket serversocket.shutdown(socket.SHUT_RDWR) for i in Zones: GPIO.output(i, GPIO.LOW) GPIO.output(StatusLED, GPIO.LOW) addLog('System', 'Sprinkler Script OFF') exit() if __name__ == '__main__': setup() try: mainRun() except KeyboardInterrupt: destroy() finally: GPIO.cleanup() exit() else: destroy()
Makerblaker/Sprinkler
server.py
Python
gpl-3.0
4,160
0.040625
# -*- coding: utf-8 -*- """ Class stores integer values for various types of moves in algebraic notation. Copyright © 2016 Aubhro Sengupta. All rights reserved. """ MOVEMENT = 0 CAPTURE = 1 KING_SIDE_CASTLE = 2 QUEEN_SIDE_CASTLE = 3 EN_PASSANT = 4 PROMOTE = 5 CAPTURE_AND_PROMOTE = 6 NOT_IMPLEMENTED = 7 LONG_ALG = 8
LordDarkula/chess_py
chess_py/core/algebraic/notation_const.py
Python
mit
330
0
# Copyright 2016 Google Inc. All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utility helpers for xmlrpclib.""" import http.client import socketserver import sys import threading import xmlrpc.client import xmlrpc.server import six from six.moves import collections_abc DEFAULT_PROXY_TIMEOUT_S = 3 # https://github.com/PythonCharmers/python-future/issues/280 # pylint: disable=g-import-not-at-top,g-importing-member if sys.version_info[0] < 3: from SimpleXMLRPCServer import SimpleXMLRPCServer # pytype: disable=import-error else: from xmlrpc.server import SimpleXMLRPCServer # pytype: disable=import-error # pylint: enable=g-import-not-at-top,g-importing-member class TimeoutHTTPConnection(http.client.HTTPConnection): # pylint: disable=missing-class-docstring def __init__(self, timeout_s, *args, **kwargs): http.client.HTTPConnection.__init__(self, *args, **kwargs) self.timeout_s = timeout_s def settimeout(self, timeout_s): self.timeout_s = timeout_s self.sock.settimeout(self.timeout_s) def connect(self): http.client.HTTPConnection.connect(self) self.sock.settimeout(self.timeout_s) class TimeoutTransport(xmlrpc.client.Transport): # pylint: disable=missing-class-docstring def __init__(self, timeout_s, *args, **kwargs): xmlrpc.client.Transport.__init__(self, *args, **kwargs) self._connection = None self.timeout_s = timeout_s def settimeout(self, timeout_s): self.timeout_s = timeout_s if self._connection: self._connection[1].settimeout(timeout_s) def make_connection(self, host): if not self._connection or host != self._connection[0]: self._connection = host, TimeoutHTTPConnection(self.timeout_s, host) return self._connection[1] class BaseServerProxy(xmlrpc.client.ServerProxy, object): """New-style base class for ServerProxy, allows for use of Mixins below.""" class TimeoutProxyMixin(object): """Timeouts for ServerProxy objects.""" def __init__(self, *args, **kwargs): kwargs.update( transport=TimeoutTransport( kwargs.pop('timeout_s', DEFAULT_PROXY_TIMEOUT_S))) super(TimeoutProxyMixin, self).__init__(*args, **kwargs) def __settimeout(self, timeout_s): if six.PY3: self._transport.settimeout(timeout_s) # pytype: disable=attribute-error else: self.__transport.settimeout(timeout_s) # pytype: disable=attribute-error class TimeoutProxyServer(TimeoutProxyMixin, BaseServerProxy): """A BaseServerProxy plus timeouts.""" class LockedProxyMixin(object): """A ServerProxy that locks calls to methods.""" def __init__(self, *args, **kwargs): super(LockedProxyMixin, self).__init__(*args, **kwargs) self._lock = threading.Lock() def __getattr__(self, attr): method = super(LockedProxyMixin, self).__getattr__(attr) # pytype: disable=attribute-error if isinstance(method, collections_abc.Callable): # xmlrpc doesn't support **kwargs, so only accept *args. def _wrapper(*args): with self._lock: return method(*args) # functools.wraps() doesn't work with _Method internal type within # xmlrpclib. We only care about the name anyway, so manually set it. _wrapper.__name__ = attr return _wrapper return method class LockedTimeoutProxy(TimeoutProxyMixin, LockedProxyMixin, BaseServerProxy): """ServerProxy with additional features we use.""" class SimpleThreadedXmlRpcServer(socketserver.ThreadingMixIn, SimpleXMLRPCServer): """Helper for handling multiple simultaneous RPCs in threads.""" daemon_threads = True
google/openhtf
openhtf/util/xmlrpcutil.py
Python
apache-2.0
4,133
0.008226
# -*- encoding: utf-8 -*- __author__ = 'kotaimen' __date__ = '2/19/15' class FormatError(Exception): pass class InvalidMapType(FormatError): pass class InvalidTileFormat(FormatError): pass class NoMatchingMapWriter(FormatError): pass
Kotaimen/stonemason
stonemason/formatbundle/exceptions.py
Python
mit
259
0
# 1. print_log('\n1. Creates Issuer wallet and opens it to get handle.\n') await wallet.create_wallet(pool_name, issuer_wallet_name, None, None, None) issuer_wallet_handle = await wallet.open_wallet(issuer_wallet_name, None, None) # 2. print_log('\n2. Creates Prover wallet and opens it to get handle.\n') await wallet.create_wallet(pool_name, prover_wallet_name, None, None, None) prover_wallet_handle = await wallet.open_wallet(prover_wallet_name, None, None) # 3. print_log('\n3. Issuer creates Claim Definition for Schema\n') schema = { 'seqNo': seq_no, 'dest': issuer_did, 'data': { 'name': 'gvt', 'version': '1.0', 'attr_names': ['age', 'sex', 'height', 'name'] } } schema_json = json.dumps(schema) schema_key = { 'name': schema['data']['name'], 'version': schema['data']['version'], 'did': schema['dest'], } claim_def_json = await anoncreds.issuer_create_and_store_claim_def(issuer_wallet_handle, issuer_did, schema_json, 'CL', False) print_log('Claim Definition: ') pprint.pprint(json.loads(claim_def_json)) # 4. print_log('\n4. Prover creates Link Secret\n') link_secret_name = 'link_secret' await anoncreds.prover_create_master_secret(prover_wallet_handle, link_secret_name) # 5. print_log('\n5. Issuer create Cred Offer\n') claim_offer_json = await anoncreds.issuer_create_claim_offer(issuer_wallet_handle, schema_json, issuer_did, prover_did) print_log('Claim Offer: ') pprint.pprint(json.loads(claim_offer_json)) # 6. print_log('\n6. Prover creates and stores Cred Request\n') claim_req_json = await anoncreds.prover_create_and_store_claim_req(prover_wallet_handle, prover_did, claim_offer_json, claim_def_json, link_secret_name) print_log('Claim Request: ') pprint.pprint(json.loads(claim_req_json)) # 7. print_log('\n7. Issuer creates Credential for received Cred Request\n') claim_json = json.dumps({ 'sex': ['male', '5944657099558967239210949258394887428692050081607692519917050011144233115103'], 'name': ['Alex', '1139481716457488690172217916278103335'], 'height': ['175', '175'], 'age': ['28', '28'] }) (_, claim_json) = await anoncreds.issuer_create_claim(issuer_wallet_handle, claim_req_json, claim_json, -1) # 8. print_log('\n8. Prover processes and stores received Credential\n') await anoncreds.prover_store_claim(prover_wallet_handle, claim_json, None)
srottem/indy-sdk
docs/how-tos/negotiate-proof/python/step2.py
Python
apache-2.0
2,916
0.006859
import gdb import pwndbg.decorators import pwndbg.events import pwndbg.gdbutils import pwndbg.memoize from pwndbg.color import disable_colors from pwndbg.color import message funcs_list_str = ', '.join(message.notice('$' + f.name) for f in pwndbg.gdbutils.functions.functions) hint_lines = ( 'loaded %i commands. Type %s for a list.' % (len(pwndbg.commands.commands), message.notice('pwndbg [filter]')), 'created %s gdb functions (can be used with print/break)' % funcs_list_str ) for line in hint_lines: print(message.prompt('pwndbg: ') + message.system(line)) cur = None def prompt_hook(*a): global cur pwndbg.decorators.first_prompt = True new = (gdb.selected_inferior(), gdb.selected_thread()) if cur != new: pwndbg.events.after_reload(start=cur is None) cur = new if pwndbg.proc.alive and pwndbg.proc.thread_is_stopped: prompt_hook_on_stop(*a) @pwndbg.memoize.reset_on_stop def prompt_hook_on_stop(*a): pwndbg.commands.context.context() @pwndbg.config.Trigger([message.config_prompt_color, disable_colors]) def set_prompt(): prompt = "pwndbg> " if not disable_colors: prompt = "\x02" + prompt + "\x01" # STX + prompt + SOH prompt = message.prompt(prompt) prompt = "\x01" + prompt + "\x02" # SOH + prompt + STX gdb.execute('set prompt %s' % prompt) if pwndbg.events.before_prompt_event.is_real_event: gdb.prompt_hook = prompt_hook else: # Old GDBs doesn't have gdb.events.before_prompt, so we will emulate it using gdb.prompt_hook def extended_prompt_hook(*a): pwndbg.events.before_prompt_event.invoke_callbacks() return prompt_hook(*a) gdb.prompt_hook = extended_prompt_hook
pwndbg/pwndbg
pwndbg/prompt.py
Python
mit
1,730
0.001734
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from .intangible import Intangible class StructuredValue(Intangible): """StructuredValue. You probably want to use the sub-classes and not this class directly. Known sub-classes are: Point2D, NormalizedQuadrilateral Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param _type: Required. Constant filled by server. :type _type: str :ivar id: A String identifier. :vartype id: str :ivar read_link: The URL that returns this resource. To use the URL, append query parameters as appropriate and include the Ocp-Apim-Subscription-Key header. :vartype read_link: str :ivar web_search_url: The URL to Bing's search result for this item. :vartype web_search_url: str :ivar name: The name of the thing represented by this object. :vartype name: str :ivar url: The URL to get more information about the thing represented by this object. :vartype url: str :ivar image: An image of the item. :vartype image: ~azure.cognitiveservices.search.visualsearch.models.ImageObject :ivar description: A short description of the item. :vartype description: str :ivar alternate_name: An alias for the item. :vartype alternate_name: str :ivar bing_id: An ID that uniquely identifies this item. :vartype bing_id: str """ _validation = { '_type': {'required': True}, 'id': {'readonly': True}, 'read_link': {'readonly': True}, 'web_search_url': {'readonly': True}, 'name': {'readonly': True}, 'url': {'readonly': True}, 'image': {'readonly': True}, 'description': {'readonly': True}, 'alternate_name': {'readonly': True}, 'bing_id': {'readonly': True}, } _attribute_map = { '_type': {'key': '_type', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'read_link': {'key': 'readLink', 'type': 'str'}, 'web_search_url': {'key': 'webSearchUrl', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'url': {'key': 'url', 'type': 'str'}, 'image': {'key': 'image', 'type': 'ImageObject'}, 'description': {'key': 'description', 'type': 'str'}, 'alternate_name': {'key': 'alternateName', 'type': 'str'}, 'bing_id': {'key': 'bingId', 'type': 'str'}, } _subtype_map = { '_type': {'Point2D': 'Point2D', 'NormalizedQuadrilateral': 'NormalizedQuadrilateral'} } def __init__(self, **kwargs): super(StructuredValue, self).__init__(**kwargs) self._type = 'StructuredValue'
Azure/azure-sdk-for-python
sdk/cognitiveservices/azure-cognitiveservices-search-visualsearch/azure/cognitiveservices/search/visualsearch/models/structured_value.py
Python
mit
3,167
0.000316
""" Sample a specific geometry or set of geometries. """ import numpy as np import nomad.core.glbl as glbl import nomad.core.trajectory as trajectory import nomad.core.log as log def set_initial_coords(wfn): """Takes initial position and momentum from geometry specified in input""" coords = glbl.properties['init_coords'] ndim = coords.shape[-1] log.print_message('string',[' Initial coordinates taken from input file(s).\n']) for coord in coords: itraj = trajectory.Trajectory(glbl.properties['n_states'], ndim, width=glbl.properties['crd_widths'], mass=glbl.properties['crd_masses'], parent=0, kecoef=glbl.modules['integrals'].kecoef) # set position and momentum itraj.update_x(np.array(coord[0])) itraj.update_p(np.array(coord[1])) # add a single trajectory specified by geometry.dat wfn.add_trajectory(itraj)
mschuurman/FMSpy
nomad/initconds/explicit.py
Python
lgpl-3.0
1,004
0.003984
# coding: utf-8 def ugettext(message): '''返回原字符串 为了使用 _('') 方式标记字符串 ''' return message
gwind/YWeb
yweb/yweb/utils/i18n.py
Python
mit
142
0
# Copyright 2013 Mario Graff Guerrero # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from SimpleGP import GPMAE import numpy as np def test_gpmae(): x = np.linspace(-10, 10, 100) pol = np.array([0.2, -0.3, 0.2]) X = np.vstack((x**2, x, np.ones(x.shape[0]))) y = (X.T * pol).sum(axis=1) x = x[:, np.newaxis] gp = GPMAE.init_cl(verbose=True, generations=30, seed=0, max_length=1000).train(x, y) gp.run() fit = gp.fitness(gp.get_best()) print fit assert fit >= -0.7906
mgraffg/simplegp
SimpleGP/tests/test_gpmae.py
Python
apache-2.0
1,069
0.000935
# Source Generated with Decompyle++ # File: session_recording.pyc (Python 2.5) from __future__ import absolute_import from pushbase.session_recording_component import FixedLengthSessionRecordingComponent class SessionRecordingComponent(FixedLengthSessionRecordingComponent): def __init__(self, *a, **k): super(SessionRecordingComponent, self).__init__(*a, **a) self.set_trigger_recording_on_release(not (self._record_button.is_pressed)) def set_trigger_recording_on_release(self, trigger_recording): self._should_trigger_recording = trigger_recording def _on_record_button_pressed(self): pass def _on_record_button_released(self): if self._should_trigger_recording: self._trigger_recording() self._should_trigger_recording = True
phatblat/AbletonLiveMIDIRemoteScripts
Push2/session_recording.py
Python
mit
842
0.014252
__all__ = ["md5", "size", "calculate", "scan"] from md5hash.md5hash import md5, size, calculate, scan
valsaven/md5hash
md5hash/__init__.py
Python
mit
103
0
""" Module to control libvirtd service. """ import re import logging import aexpect from avocado.utils import path from avocado.utils import process from avocado.utils import wait from virttest import libvirt_version from virttest import utils_split_daemons from . import remote as remote_old from . import utils_misc from .staging import service from .utils_gdb import GDB try: path.find_command("libvirtd") LIBVIRTD = "libvirtd" except path.CmdNotFoundError: LIBVIRTD = None class Libvirtd(object): """ Class to manage libvirtd service on host or guest. """ def __init__(self, service_name=None, session=None): """ Initialize an service object for libvirtd. :params service_name: Service name such as virtqemud or libvirtd. If service_name is None, all sub daemons will be operated when modular daemon environment is enabled. Otherwise,if service_name is a single string, only the given daemon/service will be operated. :params session: An session to guest or remote host. """ self.session = session if self.session: self.remote_runner = remote_old.RemoteRunner(session=self.session) runner = self.remote_runner.run else: runner = process.run self.daemons = [] self.service_list = [] if LIBVIRTD is None: logging.warning("Libvirtd service is not available in host, " "utils_libvirtd module will not function normally") self.service_name = "libvirtd" if not service_name else service_name if libvirt_version.version_compare(5, 6, 0, self.session): if utils_split_daemons.is_modular_daemon(session=self.session): if self.service_name in ["libvirtd", "libvirtd.service"]: self.service_list = ['virtqemud', 'virtproxyd', 'virtnetworkd', 'virtinterfaced', 'virtnodedevd', 'virtsecretd', 'virtstoraged', 'virtnwfilterd'] elif self.service_name == "libvirtd.socket": self.service_name = "virtqemud.socket" elif self.service_name in ["libvirtd-tcp.socket", "libvirtd-tls.socket"]: self.service_name = re.sub("libvirtd", "virtproxyd", self.service_name) else: self.service_name = re.sub("^virt.*d", "libvirtd", self.service_name) else: self.service_name = "libvirtd" if not self.service_list: self.service_list = [self.service_name] for serv in self.service_list: self.daemons.append(service.Factory.create_service(serv, run=runner)) def _wait_for_start(self, timeout=60): """ Wait n seconds for libvirt to start. Default is 10 seconds. """ def _check_start(): virsh_cmd = "virsh list" try: if self.session: self.session.cmd(virsh_cmd, timeout=2) else: process.run(virsh_cmd, timeout=2) return True except Exception: return False return utils_misc.wait_for(_check_start, timeout=timeout) def start(self, reset_failed=True): result = [] for daem_item in self.daemons: if reset_failed: daem_item.reset_failed() if not daem_item.start(): return False result.append(self._wait_for_start()) return all(result) def stop(self): result = [] for daem_item in self.daemons: result.append(daem_item.stop()) return all(result) def restart(self, reset_failed=True): result = [] for daem_item in self.daemons: if reset_failed: daem_item.reset_failed() if not daem_item.restart(): return False result.append(self._wait_for_start()) return all(result) def is_running(self): result = [] for daem_item in self.daemons: result.append(daem_item.status()) return all(result) class DaemonSocket(object): """ Class to manage libvirt/virtproxy tcp/tls socket on host or guest. """ def __init__(self, daemon_name, session=None): """ Initialize an service object for virt daemons. :param daemon_name: daemon name such as virtproxyd-tls.socket, libvirtd-tcp.socket,etc,. :param session: An session to guest or remote host. """ self.session = session if self.session: self.remote_runner = remote_old.RemoteRunner(session=self.session) self.runner = self.remote_runner.run else: self.runner = process.run self.daemon_name = daemon_name supported_daemon = ["libvirtd-tcp.socket", "libvirtd-tls.socket", "virtproxyd-tls.socket", "virtproxyd-tcp.socket"] if self.daemon_name not in supported_daemon: raise ValueError("Invalid daemon: %s" % self.daemon_name) self.daemon_service_inst = Libvirtd("virtproxyd", session=self.session) self.daemon_inst = Libvirtd(self.daemon_name, session=self.session) self.daemon_socket = Libvirtd("virtproxyd.socket", session=self.session) def stop(self): self.daemon_socket.stop() self.daemon_service_inst.stop() self.daemon_inst.stop() self.runner("systemctl daemon-reload") self.daemon_socket.start() def start(self): self.daemon_socket.stop() self.daemon_service_inst.stop() self.runner("systemctl daemon-reload") self.daemon_inst.start() self.daemon_service_inst.start() def restart(self, reset_failed=True): self.daemon_socket.stop() self.daemon_service_inst.stop() self.runner("systemctl daemon-reload") self.daemon_inst.restart() self.daemon_service_inst.start() self.daemon_inst._wait_for_start() class LibvirtdSession(object): """ Interaction daemon session by directly call the command. With gdb debugging feature can be optionally started. It is recommended to use the service in the modular daemons for initialization, because Libvirtd() class will switch to the corresponding service according to the environment, eg. If the value of "service_name" is "virtqemud", it will take "virtqemud" if the modular daemon is enabled and "libvirtd" if it's disabled. """ def __init__(self, gdb=False, logging_handler=None, logging_params=(), logging_pattern=r'.*', service_name=None): """ :param gdb: Whether call the session with gdb debugging support :param logging_handler: Callback function to handle logging :param logging_pattern: Regex for filtering specific log lines :param service_name: Service name such as virtqemud or libvirtd """ self.gdb = None self.tail = None self.running = False self.pid = None self.service_name = service_name self.bundle = {"stop-info": None} # Get an executable program to debug by GDB self.service_exec = Libvirtd( service_name=self.service_name).service_list[0] self.libvirtd_service = Libvirtd(service_name=self.service_exec) self.was_running = self.libvirtd_service.is_running() if self.was_running: logging.debug('Stopping %s service', self.service_exec) self.libvirtd_service.stop() self.logging_handler = logging_handler self.logging_params = logging_params self.logging_pattern = logging_pattern if gdb: self.gdb = GDB(self.service_exec) self.gdb.set_callback('stop', self._stop_callback, self.bundle) self.gdb.set_callback('start', self._start_callback, self.bundle) self.gdb.set_callback('termination', self._termination_callback) def _output_handler(self, line): """ Adapter output callback function. """ if self.logging_handler is not None: if re.match(self.logging_pattern, line): self.logging_handler(line, *self.logging_params) def _termination_handler(self, status): """ Helper aexpect terminaltion handler """ self.running = False self.exit_status = status self.pid = None def _termination_callback(self, gdb, status): """ Termination handler function triggered when libvirtd exited. :param gdb: Instance of the gdb session :param status: Return code of exited libvirtd session """ self.running = False self.exit_status = status self.pid = None def _stop_callback(self, gdb, info, params): """ Stop handler function triggered when gdb libvirtd stopped. :param gdb: Instance of the gdb session :param status: Return code of exited libvirtd session """ self.running = False params['stop-info'] = info def _start_callback(self, gdb, info, params): """ Stop handler function triggered when gdb libvirtd started. :param gdb: Instance of the gdb session :param status: Return code of exited libvirtd session """ self.running = True params['stop-info'] = None def set_callback(self, callback_type, callback_func, callback_params=None): """ Set a customized gdb callback function. """ if self.gdb: self.gdb.set_callback( callback_type, callback_func, callback_params) else: logging.error("Only gdb session supports setting callback") def start(self, arg_str='', wait_for_working=True): """ Start libvirtd session. :param arg_str: Argument passing to the session :param wait_for_working: Whether wait for libvirtd finish loading """ if self.gdb: self.gdb.run(arg_str=arg_str) self.pid = self.gdb.pid else: self.tail = aexpect.Tail( "%s %s" % (self.service_exec, arg_str), output_func=self._output_handler, termination_func=self._termination_handler, ) self.running = True if wait_for_working: self.wait_for_working() def cont(self): """ Continue a stopped libvirtd session. """ if self.gdb: self.gdb.cont() else: logging.error("Only gdb session supports continue") def kill(self): """ Kill the libvirtd session. """ if self.gdb: self.gdb.kill() else: self.tail.kill() def restart(self, arg_str='', wait_for_working=True): """ Restart the libvirtd session. :param arg_str: Argument passing to the session :param wait_for_working: Whether wait for libvirtd finish loading """ logging.debug("Restarting %s session", self.service_exec) self.kill() self.start(arg_str=arg_str, wait_for_working=wait_for_working) def wait_for_working(self, timeout=60): """ Wait for libvirtd to work. :param timeout: Max wait time """ logging.debug('Waiting for %s to work', self.service_exec) return utils_misc.wait_for( self.is_working, timeout=timeout, ) def back_trace(self): """ Get the backtrace from gdb session. """ if self.gdb: return self.gdb.back_trace() else: logging.warning('Can not get back trace without gdb') def insert_break(self, break_func): """ Insert a function breakpoint. :param break_func: Function at which breakpoint inserted """ if self.gdb: return self.gdb.insert_break(break_func) else: logging.warning('Can not insert breakpoint without gdb') def is_working(self): """ Check if libvirtd is start by return status of 'virsh list' """ virsh_cmd = "virsh list" try: process.run(virsh_cmd, timeout=2) return True except process.CmdError: return False def wait_for_stop(self, timeout=60, step=0.1): """ Wait for libvirtd to stop. :param timeout: Max wait time :param step: Checking interval """ logging.debug('Waiting for %s to stop', self.service_exec) if self.gdb: return self.gdb.wait_for_stop(timeout=timeout) else: return wait.wait_for( lambda: not self.running, timeout=timeout, step=step, ) def wait_for_termination(self, timeout=60): """ Wait for libvirtd gdb session to exit. :param timeout: Max wait time """ logging.debug('Waiting for %s to terminate', self.service_exec) if self.gdb: return self.gdb.wait_for_termination(timeout=timeout) else: logging.error("Only gdb session supports wait_for_termination.") def exit(self): """ Exit the libvirtd session. """ if self.gdb: self.gdb.exit() else: if self.tail: self.tail.close() if self.was_running: self.libvirtd_service.start() def deprecation_warning(): """ As the utils_libvirtd.libvirtd_xxx interfaces are deprecated, this function are printing the warning to user. """ logging.warning("This function was deprecated, Please use " "class utils_libvirtd.Libvirtd to manage " "libvirtd service.") def libvirtd_start(): libvirtd_instance = Libvirtd() deprecation_warning() return libvirtd_instance.start() def libvirtd_is_running(): libvirtd_instance = Libvirtd() deprecation_warning() return libvirtd_instance.is_running() def libvirtd_stop(): libvirtd_instance = Libvirtd() deprecation_warning() return libvirtd_instance.stop() def libvirtd_restart(): libvirtd_instance = Libvirtd() deprecation_warning() return libvirtd_instance.restart() def service_libvirtd_control(action, session=None): libvirtd_instance = Libvirtd(session=session) deprecation_warning() getattr(libvirtd_instance, action)() def unmark_storage_autostarted(): """ By removing this file libvirt start behavior at boot is simulated. """ cmd = "rm -rf /var/run/libvirt/storage/autostarted" process.run(cmd, ignore_status=True, shell=True)
sathnaga/avocado-vt
virttest/utils_libvirtd.py
Python
gpl-2.0
15,212
0.000197
# Copyright (c) 2016-present, Facebook, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ############################################################################## from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals
xzturn/caffe2
caffe2/python/rnn/__init__.py
Python
apache-2.0
821
0.001218
# coding=utf-8 import abc import logging import tempfile from bireus.client.download_service import AbstractDownloadService from bireus.client.notification_service import NotificationService from bireus.shared import * from bireus.shared.diff_head import DiffHead from bireus.shared.diff_item import DiffItem from bireus.shared.repository import ProtocolException logger = logging.getLogger(__name__) class PatchTask(abc.ABC): _patch_tasks = None def __init__(self, notification_service: NotificationService, download_service: AbstractDownloadService, repository_url: str, repo_path: Path, patch_file: Path): self._notification_service = notification_service self._download_service = download_service self._url = repository_url self._repo_path = repo_path self._patch_file = patch_file self._target_version = None def run(self) -> None: # unpack the patch into a temp folder temp_root = self._repo_path.joinpath(".bireus").joinpath("__temp__") temp_root.mkdir(parents=True, exist_ok=True) tempdir = tempfile.TemporaryDirectory(dir=str(temp_root)) unpack_archive(self._patch_file, tempdir.name) diff_head = DiffHead.load_json_file(Path(tempdir.name).joinpath('.bireus')) if diff_head.protocol != self.get_version(): logger.error(".bireus protocol version %s doesn't match patcher task version %s", diff_head.protocol, self.get_version()) self._notification_service.error(".bireus protocol version %s doesn't match patcher task version %s" % ( diff_head.protocol, self.get_version())) raise Exception(".bireus protocol version %s doesn't match patcher task version %s" % (diff_head.protocol, self.get_version())) self._target_version = diff_head.target_version # begin the patching recursion # note: a DiffHead's first and only item is the top folder itself self.patch(diff_head.items[0], self._repo_path, Path(tempdir.name), False) intermediate_folder = Path(self._repo_path.parent.joinpath(self._repo_path.name + ".patched")) relative_temp_folder = Path(tempdir.name).relative_to(self._repo_path) move_file(self._repo_path, intermediate_folder) try: move_file(intermediate_folder.joinpath(relative_temp_folder), self._repo_path) self._repo_path.joinpath(".bireus").unlink() # remove the patch descriptor move_file(intermediate_folder.joinpath(".bireus"), self._repo_path.joinpath(".bireus")) finally: remove_folder(intermediate_folder) @classmethod def get_factory(cls, protocol: int): if cls._patch_tasks is None: cls._patch_tasks = dict() for patch_task_version in PatchTask.__subclasses__(): cls._patch_tasks[patch_task_version.get_version()] = patch_task_version.create if protocol in cls._patch_tasks: return cls._patch_tasks[protocol] else: raise ProtocolException("Protocol version `%s` is not supported in this client version", protocol) @abc.abstractclassmethod def get_version(cls) -> int: pass @abc.abstractclassmethod def create(cls, notification_service: NotificationService, download_service: AbstractDownloadService, repository_url: str, repo_path: Path, patch_file: Path) -> 'PatchTask': """ Abstract factory function for dynamic patcher initialization same params as in constructor! """ pass @abc.abstractmethod def patch(self, diff: DiffItem, base_path: Path, patch_path: Path, inside_zip: bool = False) -> None: pass
Brutus5000/BiReUS
bireus/client/patch_tasks/base.py
Python
mit
3,803
0.003681
# -*- coding: utf-8 -*- from __future__ import absolute_import import logging import time import functools import random import inspect from ._compat import string_types from .client import Client from nsq import protocol from . import async logger = logging.getLogger(__name__) class Writer(Client): """ A high-level producer class built on top of the `Tornado IOLoop <http://tornadoweb.org>`_ supporting async publishing (``PUB`` & ``MPUB`` & ``DPUB``) of messages to ``nsqd`` over the TCP protocol. Example publishing a message repeatedly using a Tornado IOLoop periodic callback:: import nsq import tornado.ioloop import time def pub_message(): writer.pub('test', time.strftime('%H:%M:%S'), finish_pub) def finish_pub(conn, data): print(data) writer = nsq.Writer(['127.0.0.1:4150']) tornado.ioloop.PeriodicCallback(pub_message, 1000).start() nsq.run() Example publshing a message from a Tornado HTTP request handler:: import functools import tornado.httpserver import tornado.ioloop import tornado.options import tornado.web from nsq import Writer, Error from tornado.options import define, options class MainHandler(tornado.web.RequestHandler): @property def nsq(self): return self.application.nsq def get(self): topic = 'log' msg = 'Hello world' msg_cn = 'Hello 世界' self.nsq.pub(topic, msg) # pub self.nsq.mpub(topic, [msg, msg_cn]) # mpub self.nsq.dpub(topic, 60, msg) # dpub # customize callback callback = functools.partial(self.finish_pub, topic=topic, msg=msg) self.nsq.pub(topic, msg, callback=callback) self.write(msg) def finish_pub(self, conn, data, topic, msg): if isinstance(data, Error): # try to re-pub message again if pub failed self.nsq.pub(topic, msg) class Application(tornado.web.Application): def __init__(self, handlers, **settings): self.nsq = Writer(['127.0.0.1:4150']) super(Application, self).__init__(handlers, **settings) :param nsqd_tcp_addresses: a sequence with elements of the form 'address:port' corresponding to the ``nsqd`` instances this writer should publish to :param name: a string that is used for logging messages (defaults to first nsqd address) :param \*\*kwargs: passed to :class:`nsq.AsyncConn` initialization """ def __init__(self, nsqd_tcp_addresses, reconnect_interval=15.0, name=None, **kwargs): super(Writer, self).__init__(**kwargs) if not isinstance(nsqd_tcp_addresses, (list, set, tuple)): assert isinstance(nsqd_tcp_addresses, string_types) nsqd_tcp_addresses = [nsqd_tcp_addresses] assert nsqd_tcp_addresses self.name = name or nsqd_tcp_addresses[0] self.nsqd_tcp_addresses = nsqd_tcp_addresses self.conns = {} # Verify keyword arguments valid_args = inspect.getargspec(async.AsyncConn.__init__)[0] diff = set(kwargs) - set(valid_args) assert len(diff) == 0, 'Invalid keyword argument(s): %s' % list(diff) self.conn_kwargs = kwargs assert isinstance(reconnect_interval, (int, float)) self.reconnect_interval = reconnect_interval self.io_loop.add_callback(self._run) def _run(self): logger.info('starting writer...') self.connect() def pub(self, topic, msg, callback=None): self._pub('pub', topic, msg, callback=callback) def mpub(self, topic, msg, callback=None): if isinstance(msg, string_types): msg = [msg] assert isinstance(msg, (list, set, tuple)) self._pub('mpub', topic, msg, callback=callback) def dpub(self, topic, delay_ms, msg, callback=None): self._pub('dpub', topic, msg, delay_ms, callback=callback) def _pub(self, command, topic, msg, delay_ms=None, callback=None): if not callback: callback = functools.partial(self._finish_pub, command=command, topic=topic, msg=msg) if not self.conns: callback(None, protocol.SendError('no connections')) return conn = random.choice(list(self.conns.values())) conn.callback_queue.append(callback) cmd = getattr(protocol, command) if command == 'dpub': args = (topic, delay_ms, msg) else: args = (topic, msg) try: conn.send(cmd(*args)) except Exception: logger.exception('[%s] failed to send %s' % (conn.id, command)) conn.close() def _on_connection_error(self, conn, error, **kwargs): super(Writer, self)._on_connection_error(conn, error, **kwargs) while conn.callback_queue: callback = conn.callback_queue.pop(0) callback(conn, error) def _on_connection_response(self, conn, data=None, **kwargs): if conn.callback_queue: callback = conn.callback_queue.pop(0) callback(conn, data) def connect(self): for addr in self.nsqd_tcp_addresses: host, port = addr.split(':') self.connect_to_nsqd(host, int(port)) def connect_to_nsqd(self, host, port): assert isinstance(host, string_types) assert isinstance(port, int) conn = async.AsyncConn(host, port, **self.conn_kwargs) conn.on('identify', self._on_connection_identify) conn.on('identify_response', self._on_connection_identify_response) conn.on('auth', self._on_connection_auth) conn.on('auth_response', self._on_connection_auth_response) conn.on('error', self._on_connection_error) conn.on('response', self._on_connection_response) conn.on('close', self._on_connection_close) conn.on('ready', self._on_connection_ready) conn.on('heartbeat', self.heartbeat) if conn.id in self.conns: return logger.info('[%s] connecting to nsqd', conn.id) conn.connect() conn.callback_queue = [] def _on_connection_ready(self, conn, **kwargs): # re-check to make sure another connection didn't beat this one if conn.id in self.conns: logger.warning( '[%s] connected but another matching connection already exists', conn.id) conn.close() return self.conns[conn.id] = conn def _on_connection_close(self, conn, **kwargs): if conn.id in self.conns: del self.conns[conn.id] for callback in conn.callback_queue: try: callback(conn, protocol.ConnectionClosedError()) except Exception: logger.exception('[%s] uncaught exception in callback', conn.id) logger.warning('[%s] connection closed', conn.id) logger.info('[%s] attempting to reconnect in %0.2fs', conn.id, self.reconnect_interval) reconnect_callback = functools.partial(self.connect_to_nsqd, host=conn.host, port=conn.port) self.io_loop.add_timeout(time.time() + self.reconnect_interval, reconnect_callback) def _finish_pub(self, conn, data, command, topic, msg): if isinstance(data, protocol.Error): logger.error('[%s] failed to %s (%s, %s), data is %s', conn.id if conn else 'NA', command, topic, msg, data)
virtuald/pynsq
nsq/writer.py
Python
mit
7,774
0.002059
import errno import os import pwd import shutil import sys from jinja2 import Environment, FileSystemLoader class TutorialEnv: LOCAL_MACHINE = ("Local Machine Condor Pool", "submit-host") USC_HPCC_CLUSTER = ("USC HPCC Cluster", "usc-hpcc") OSG_FROM_ISI = ("OSG from ISI submit node", "osg") XSEDE_BOSCO = ("XSEDE, with Bosco", "xsede-bosco") BLUEWATERS_GLITE = ("Bluewaters, with Glite", "bw-glite") TACC_WRANGLER = ("TACC Wrangler with Glite", "wrangler-glite") OLCF_TITAN = ("OLCF TITAN with Glite", "titan-glite") OLCF_SUMMIT_KUBERNETES_BOSCO = ( "OLCF Summit from Kubernetes using BOSCO", "summit-kub-bosco", ) class TutorialExample: PROCESS = ("Process", "process") PIPELINE = ("Pipeline", "pipeline") SPLIT = ("Split", "split") MERGE = ("Merge", "merge") EPA = ("EPA (requires R)", "r-epa") DIAMOND = ("Diamond", "diamond") CONTAINER = ("Population Modeling using Containers", "population") MPI = ("MPI Hello World", "mpi-hw") def choice(question, options, default): "Ask the user to choose from a short list of named options" while True: sys.stdout.write("{} ({}) [{}]: ".format(question, "/".join(options), default)) answer = sys.stdin.readline().strip() if len(answer) == 0: return default for opt in options: if answer == opt: return answer def yesno(question, default="y"): "Ask the user a yes/no question" while True: sys.stdout.write("{} (y/n) [{}]: ".format(question, default)) answer = sys.stdin.readline().strip().lower() if len(answer) == 0: answer = default if answer == "y": return True elif answer == "n": return False def query(question, default=None): "Ask the user a question and return the response" while True: if default: sys.stdout.write("{} [{}]: ".format(question, default)) else: sys.stdout.write("%s: " % question) answer = sys.stdin.readline().strip().replace(" ", "_") if answer == "": if default: return default else: return answer def optionlist(question, options, default=0): "Ask the user to choose from a list of options" for i, option in enumerate(options): print("%d: %s" % (i + 1, option[0])) while True: sys.stdout.write("%s (1-%d) [%d]: " % (question, len(options), default + 1)) answer = sys.stdin.readline().strip() if len(answer) == 0: return options[default][1] try: optno = int(answer) if optno > 0 and optno <= len(options): return options[optno - 1][1] except Exception: pass class Workflow: def __init__(self, workflowdir, sharedir): self.jinja = Environment(loader=FileSystemLoader(sharedir), trim_blocks=True) self.name = os.path.basename(workflowdir) self.workflowdir = workflowdir self.sharedir = sharedir self.properties = {} self.home = os.environ["HOME"] self.user = pwd.getpwuid(os.getuid())[0] self.tutorial = None self.generate_tutorial = False self.tutorial_setup = None self.compute_queue = "default" self.project = "MYPROJ123" sysname, _, _, _, machine = os.uname() if sysname == "Darwin": self.os = "MACOSX" else: # Probably Linux self.os = sysname.upper() self.arch = machine def copy_template(self, template, dest, mode=0o644): "Copy template to dest in workflowdir with mode" path = os.path.join(self.workflowdir, dest) t = self.jinja.get_template(template) t.stream(**self.__dict__).dump(path) os.chmod(path, mode) def copy_dir(self, src, dest): # self.mkdir(dest) if not src.startswith("/"): src = os.path.join(self.sharedir, src) try: dest = os.path.join(self.workflowdir, dest) shutil.copytree(src, dest) except OSError as exc: # python >2.5 if exc.errno == errno.ENOTDIR: shutil.copy(src, dest) else: raise def mkdir(self, path): "Make relative directory in workflowdir" path = os.path.join(self.workflowdir, path) if not os.path.exists(path): os.makedirs(path) def configure(self): # The tutorial is a special case if yesno("Do you want to generate a tutorial workflow?", "n"): self.config = "tutorial" self.daxgen = "tutorial" self.generate_tutorial = True # determine the environment to setup tutorial for self.tutorial_setup = optionlist( "What environment is tutorial to be setup for?", [ TutorialEnv.LOCAL_MACHINE, TutorialEnv.USC_HPCC_CLUSTER, TutorialEnv.OSG_FROM_ISI, TutorialEnv.XSEDE_BOSCO, TutorialEnv.BLUEWATERS_GLITE, TutorialEnv.TACC_WRANGLER, TutorialEnv.OLCF_TITAN, TutorialEnv.OLCF_SUMMIT_KUBERNETES_BOSCO, ], ) # figure out what example options to provide examples = [ TutorialExample.PROCESS, TutorialExample.PIPELINE, TutorialExample.SPLIT, TutorialExample.MERGE, TutorialExample.EPA, TutorialExample.CONTAINER, ] if self.tutorial_setup != "osg": examples.append(TutorialExample.DIAMOND) if self.tutorial_setup in [ "bw-glite", "wrangler-glite", "titan-glite", "summit-kub-bosco", ]: examples.append(TutorialExample.MPI) self.project = query( "What project your jobs should run under. For example on TACC there are like : TG-DDM160003 ?" ) self.tutorial = optionlist("What tutorial workflow do you want?", examples) self.setup_tutorial() return # Determine which DAX generator API to use self.daxgen = choice( "What DAX generator API do you want to use?", ["python", "perl", "java", "r"], "python", ) # Determine what kind of site catalog we need to generate self.config = optionlist( "What does your computing infrastructure look like?", [ ("Local Machine Condor Pool", "condorpool"), ("Remote Cluster using Globus GRAM", "globus"), ("Remote Cluster using CREAMCE", "creamce"), ("Local PBS Cluster with Glite", "glite"), ("Remote PBS Cluster with BOSCO and SSH", "bosco"), ], ) # Find out some information about the site self.sitename = query("What do you want to call your compute site?", "compute") self.os = choice( "What OS does your compute site have?", ["LINUX", "MACOSX"], self.os ) self.arch = choice( "What architecture does your compute site have?", ["x86_64", "x86"], self.arch, ) def setup_tutorial(self): """ Set up tutorial for pre-defined computing environments :return: """ if self.tutorial_setup is None: self.tutorial_setup = "submit-host" if self.tutorial_setup == "submit-host": self.sitename = "condorpool" elif self.tutorial_setup == "usc-hpcc": self.sitename = "usc-hpcc" self.config = "glite" self.compute_queue = "quick" # for running the whole workflow as mpi job self.properties["pegasus.job.aggregator"] = "mpiexec" elif self.tutorial_setup == "osg": self.sitename = "osg" self.os = "linux" if not yesno("Do you want to use Condor file transfers", "y"): self.staging_site = "isi_workflow" elif self.tutorial_setup == "xsede-bosco": self.sitename = "condorpool" elif self.tutorial_setup == "bw-glite": self.sitename = "bluewaters" self.config = "glite" self.compute_queue = "normal" elif self.tutorial_setup == "wrangler-glite": self.sitename = "wrangler" self.config = "glite" self.compute_queue = "normal" elif self.tutorial_setup == "titan-glite": self.sitename = "titan" self.config = "glite" self.compute_queue = "titan" elif self.tutorial_setup == "summit-kub-bosco": self.sitename = "summit" self.config = "bosco" self.compute_queue = "batch" return def generate(self): os.makedirs(self.workflowdir) if self.tutorial != "population": self.mkdir("input") self.mkdir("output") if self.generate_tutorial: self.copy_template("%s/tc.txt" % self.tutorial, "tc.txt") if self.tutorial == "r-epa": self.copy_template("%s/daxgen.R" % self.tutorial, "daxgen.R") elif self.tutorial != "mpi-hw": self.copy_template("%s/daxgen.py" % self.tutorial, "daxgen.py") if self.tutorial == "diamond": # Executables used by the diamond workflow self.mkdir("bin") self.copy_template( "diamond/transformation.py", "bin/preprocess", mode=0o755 ) self.copy_template( "diamond/transformation.py", "bin/findrange", mode=0o755 ) self.copy_template( "diamond/transformation.py", "bin/analyze", mode=0o755 ) # Diamond input file self.copy_template("diamond/f.a", "input/f.a") elif self.tutorial == "split": # Split workflow input file self.mkdir("bin") self.copy_template("split/pegasus.html", "input/pegasus.html") elif self.tutorial == "r-epa": # Executables used by the R-EPA workflow self.mkdir("bin") self.copy_template( "r-epa/epa-wrapper.sh", "bin/epa-wrapper.sh", mode=0o755 ) self.copy_template("r-epa/setupvar.R", "bin/setupvar.R", mode=0o755) self.copy_template( "r-epa/weighted.average.R", "bin/weighted.average.R", mode=0o755 ) self.copy_template( "r-epa/cumulative.percentiles.R", "bin/cumulative.percentiles.R", mode=0o755, ) elif self.tutorial == "population": self.copy_template("%s/Dockerfile" % self.tutorial, "Dockerfile") self.copy_template("%s/Singularity" % self.tutorial, "Singularity") self.copy_template( "%s/tc.txt.containers" % self.tutorial, "tc.txt.containers" ) self.copy_dir("%s/scripts" % self.tutorial, "scripts") self.copy_dir("%s/data" % self.tutorial, "input") # copy the mpi wrapper, c code and mpi elif self.tutorial == "mpi-hw": # copy the mpi wrapper, c code and mpi example # Executables used by the mpi-hw workflow self.mkdir("bin") self.copy_template( "%s/pegasus-mpi-hw.c" % self.tutorial, "pegasus-mpi-hw.c" ) self.copy_template("%s/Makefile" % self.tutorial, "Makefile") self.copy_template("%s/daxgen.py.template" % self.tutorial, "daxgen.py") self.copy_template( "%s/mpi-hello-world-wrapper" % self.tutorial, "bin/mpi-hello-world-wrapper", mode=0o755, ) self.copy_template("split/pegasus.html", "input/f.in") else: self.copy_template("tc.txt", "tc.txt") if self.daxgen == "python": self.copy_template("daxgen/daxgen.py", "daxgen.py") elif self.daxgen == "perl": self.copy_template("daxgen/daxgen.pl", "daxgen.pl") elif self.daxgen == "java": self.copy_template("daxgen/DAXGen.java", "DAXGen.java") elif self.daxgen == "r": self.copy_template("daxgen/daxgen.R", "daxgen.R") else: assert False self.copy_template("sites.xml", "sites.xml") self.copy_template("plan_dax.sh", "plan_dax.sh", mode=0o755) self.copy_template("plan_cluster_dax.sh", "plan_cluster_dax.sh", mode=0o755) self.copy_template("generate_dax.sh", "generate_dax.sh", mode=0o755) self.copy_template("README.md", "README.md") self.copy_template("rc.txt", "rc.txt") self.copy_template("pegasus.properties", "pegasus.properties") if self.tutorial == "diamond": if self.tutorial_setup == "wrangler-glite": self.copy_template( "pmc-wrapper.wrangler", "bin/pmc-wrapper", mode=0o755 ) elif self.tutorial_setup == "titan-glite": self.copy_template("pmc-wrapper.titan", "bin/pmc-wrapper", mode=0o755) elif self.tutorial_setup == "wrangler-glite": self.copy_template( "pmc-wrapper.wrangler", "bin/pmc-wrapper", mode=0o755 ) elif self.tutorial_setup == "summit-kub-bosco": self.copy_template("pmc-wrapper.summit", "bin/pmc-wrapper", mode=0o755) if self.generate_tutorial: sys.stdout.write( "Pegasus Tutorial setup for example workflow - %s for execution on %s in directory %s\n" % (self.tutorial, self.tutorial_setup, self.workflowdir) ) def usage(): print("Usage: %s WORKFLOW_DIR" % sys.argv[0]) def main(pegasus_share_dir): if len(sys.argv) != 2: usage() exit(1) if "-h" in sys.argv: usage() exit(1) workflowdir = sys.argv[1] if os.path.exists(workflowdir): print("ERROR: WORKFLOW_DIR '%s' already exists" % workflowdir) exit(1) workflowdir = os.path.abspath(workflowdir) sharedir = os.path.join(pegasus_share_dir, "init") w = Workflow(workflowdir, sharedir) w.configure() w.generate()
pegasus-isi/pegasus
packages/pegasus-python/src/Pegasus/init-old.py
Python
apache-2.0
14,973
0.001069
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Changing field 'Graph.slug' db.alter_column('muparse_graph', 'slug', self.gf('django.db.models.fields.SlugField')(max_length=128, null=True)) # Adding index on 'Graph', fields ['slug'] db.create_index('muparse_graph', ['slug']) # Changing field 'Graph.name' db.alter_column('muparse_graph', 'name', self.gf('django.db.models.fields.CharField')(max_length=255)) # Removing index on 'Graph', fields ['name'] db.delete_index('muparse_graph', ['name']) def backwards(self, orm): # Adding index on 'Graph', fields ['name'] db.create_index('muparse_graph', ['name']) # Removing index on 'Graph', fields ['slug'] db.delete_index('muparse_graph', ['slug']) # Changing field 'Graph.slug' db.alter_column('muparse_graph', 'slug', self.gf('django.db.models.fields.CharField')(max_length=128, null=True)) # Changing field 'Graph.name' db.alter_column('muparse_graph', 'name', self.gf('django.db.models.fields.SlugField')(max_length=255)) models = { 'muparse.graph': { 'Meta': {'ordering': "['name']", 'object_name': 'Graph'}, 'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['muparse.GraphCategory']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}), 'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}) }, 'muparse.graphcategory': { 'Meta': {'ordering': "['name']", 'object_name': 'GraphCategory'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.SlugField', [], {'max_length': '255'}), 'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}) }, 'muparse.node': { 'Meta': {'ordering': "['name']", 'object_name': 'Node'}, 'graphs': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['muparse.Graph']", 'null': 'True', 'through': "orm['muparse.NodeGraphs']", 'blank': 'True'}), 'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['muparse.NodeGroup']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.SlugField', [], {'max_length': '255'}), 'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'url': ('django.db.models.fields.CharField', [], {'max_length': '512'}) }, 'muparse.nodegraphs': { 'Meta': {'object_name': 'NodeGraphs'}, 'baseurl': ('django.db.models.fields.CharField', [], {'max_length': '512'}), 'graph': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['muparse.Graph']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'node': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['muparse.Node']"}), 'pageurl': ('django.db.models.fields.CharField', [], {'max_length': '512'}), 'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}) }, 'muparse.nodegroup': { 'Meta': {'ordering': "['name']", 'object_name': 'NodeGroup'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'url': ('django.db.models.fields.CharField', [], {'max_length': '512'}) }, 'muparse.savedsearch': { 'Meta': {'object_name': 'SavedSearch'}, 'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'display_type': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'graphs': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['muparse.NodeGraphs']", 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) } } complete_apps = ['muparse']
grnet/mupy
muparse/migrations/0004_auto__chg_field_graph_slug__chg_field_graph_name.py
Python
gpl-3.0
4,828
0.007457
# Copyright (c) 2015 Mirantis, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import uuid from oslo_db.sqlalchemy import models from oslo_utils import timeutils from sqlalchemy import BigInteger from sqlalchemy import Boolean from sqlalchemy import Column from sqlalchemy import DateTime from sqlalchemy.ext import declarative from sqlalchemy import ForeignKey from sqlalchemy import Index from sqlalchemy import Integer from sqlalchemy import Numeric from sqlalchemy.orm import backref from sqlalchemy.orm import composite from sqlalchemy.orm import relationship from sqlalchemy import String from sqlalchemy import Text import glance.artifacts as ga from glance.common import semver_db from glance import i18n from oslo_log import log as os_logging BASE = declarative.declarative_base() LOG = os_logging.getLogger(__name__) _LW = i18n._LW class ArtifactBase(models.ModelBase, models.TimestampMixin): """Base class for Artifact Models.""" __table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'} __table_initialized__ = False __protected_attributes__ = set([ "created_at", "updated_at"]) created_at = Column(DateTime, default=lambda: timeutils.utcnow(), nullable=False) updated_at = Column(DateTime, default=lambda: timeutils.utcnow(), nullable=False, onupdate=lambda: timeutils.utcnow()) def save(self, session=None): from glance.db.sqlalchemy import api as db_api super(ArtifactBase, self).save(session or db_api.get_session()) def keys(self): return self.__dict__.keys() def values(self): return self.__dict__.values() def items(self): return self.__dict__.items() def to_dict(self): d = {} for c in self.__table__.columns: d[c.name] = self[c.name] return d def _parse_property_type_value(prop, show_text_properties=True): columns = [ 'int_value', 'string_value', 'bool_value', 'numeric_value'] if show_text_properties: columns.append('text_value') for prop_type in columns: if getattr(prop, prop_type) is not None: return prop_type.rpartition('_')[0], getattr(prop, prop_type) return None, None class Artifact(BASE, ArtifactBase): __tablename__ = 'artifacts' __table_args__ = ( Index('ix_artifact_name_and_version', 'name', 'version_prefix', 'version_suffix'), Index('ix_artifact_type', 'type_name', 'type_version_prefix', 'type_version_suffix'), Index('ix_artifact_state', 'state'), Index('ix_artifact_owner', 'owner'), Index('ix_artifact_visibility', 'visibility'), {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}) __protected_attributes__ = ArtifactBase.__protected_attributes__.union( set(['published_at', 'deleted_at'])) id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) name = Column(String(255), nullable=False) type_name = Column(String(255), nullable=False) type_version_prefix = Column(BigInteger, nullable=False) type_version_suffix = Column(String(255)) type_version_meta = Column(String(255)) type_version = composite(semver_db.DBVersion, type_version_prefix, type_version_suffix, type_version_meta) version_prefix = Column(BigInteger, nullable=False) version_suffix = Column(String(255)) version_meta = Column(String(255)) version = composite(semver_db.DBVersion, version_prefix, version_suffix, version_meta) description = Column(Text) visibility = Column(String(32), nullable=False) state = Column(String(32), nullable=False) owner = Column(String(255), nullable=False) published_at = Column(DateTime) deleted_at = Column(DateTime) def to_dict(self, show_level=ga.Showlevel.BASIC, show_text_properties=True): d = super(Artifact, self).to_dict() d.pop('type_version_prefix') d.pop('type_version_suffix') d.pop('type_version_meta') d.pop('version_prefix') d.pop('version_suffix') d.pop('version_meta') d['type_version'] = str(self.type_version) d['version'] = str(self.version) tags = [] for tag in self.tags: tags.append(tag.value) d['tags'] = tags if show_level == ga.Showlevel.NONE: return d properties = {} # sort properties self.properties.sort(key=lambda elem: (elem.name, elem.position)) for prop in self.properties: proptype, propvalue = _parse_property_type_value( prop, show_text_properties) if proptype is None: continue if prop.position is not None: # make array for p in properties.keys(): if p == prop.name: # add value to array properties[p]['value'].append(dict(type=proptype, value=propvalue)) break else: # create new array p = dict(type='array', value=[]) p['value'].append(dict(type=proptype, value=propvalue)) properties[prop.name] = p else: # make scalar properties[prop.name] = dict(type=proptype, value=propvalue) d['properties'] = properties blobs = {} # sort blobs self.blobs.sort(key=lambda elem: elem.position) for blob in self.blobs: locations = [] # sort locations blob.locations.sort(key=lambda elem: elem.position) for loc in blob.locations: locations.append(dict(value=loc.value, status=loc.status)) if blob.name in blobs: blobs[blob.name].append(dict(size=blob.size, checksum=blob.checksum, locations=locations, item_key=blob.item_key)) else: blobs[blob.name] = [] blobs[blob.name].append(dict(size=blob.size, checksum=blob.checksum, locations=locations, item_key=blob.item_key)) d['blobs'] = blobs return d class ArtifactDependency(BASE, ArtifactBase): __tablename__ = 'artifact_dependencies' __table_args__ = (Index('ix_artifact_dependencies_source_id', 'artifact_source'), Index('ix_artifact_dependencies_origin_id', 'artifact_origin'), Index('ix_artifact_dependencies_dest_id', 'artifact_dest'), Index('ix_artifact_dependencies_direct_dependencies', 'artifact_source', 'is_direct'), {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}) id = Column(String(36), primary_key=True, nullable=False, default=lambda: str(uuid.uuid4())) artifact_source = Column(String(36), ForeignKey('artifacts.id'), nullable=False) artifact_dest = Column(String(36), ForeignKey('artifacts.id'), nullable=False) artifact_origin = Column(String(36), ForeignKey('artifacts.id'), nullable=False) is_direct = Column(Boolean, nullable=False) position = Column(Integer) name = Column(String(36)) source = relationship('Artifact', backref=backref('dependencies', cascade="all, " "delete"), foreign_keys="ArtifactDependency.artifact_source") dest = relationship('Artifact', foreign_keys="ArtifactDependency.artifact_dest") origin = relationship('Artifact', foreign_keys="ArtifactDependency.artifact_origin") class ArtifactTag(BASE, ArtifactBase): __tablename__ = 'artifact_tags' __table_args__ = (Index('ix_artifact_tags_artifact_id', 'artifact_id'), Index('ix_artifact_tags_artifact_id_tag_value', 'artifact_id', 'value'), {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'},) id = Column(String(36), primary_key=True, nullable=False, default=lambda: str(uuid.uuid4())) artifact_id = Column(String(36), ForeignKey('artifacts.id'), nullable=False) artifact = relationship(Artifact, backref=backref('tags', cascade="all, delete-orphan")) value = Column(String(255), nullable=False) class ArtifactProperty(BASE, ArtifactBase): __tablename__ = 'artifact_properties' __table_args__ = ( Index('ix_artifact_properties_artifact_id', 'artifact_id'), Index('ix_artifact_properties_name', 'name'), {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'},) id = Column(String(36), primary_key=True, nullable=False, default=lambda: str(uuid.uuid4())) artifact_id = Column(String(36), ForeignKey('artifacts.id'), nullable=False) artifact = relationship(Artifact, backref=backref('properties', cascade="all, delete-orphan")) name = Column(String(255), nullable=False) string_value = Column(String(255)) int_value = Column(Integer) numeric_value = Column(Numeric) bool_value = Column(Boolean) text_value = Column(Text) position = Column(Integer) class ArtifactBlob(BASE, ArtifactBase): __tablename__ = 'artifact_blobs' __table_args__ = ( Index('ix_artifact_blobs_artifact_id', 'artifact_id'), Index('ix_artifact_blobs_name', 'name'), {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'},) id = Column(String(36), primary_key=True, nullable=False, default=lambda: str(uuid.uuid4())) artifact_id = Column(String(36), ForeignKey('artifacts.id'), nullable=False) name = Column(String(255), nullable=False) item_key = Column(String(329)) size = Column(BigInteger(), nullable=False) checksum = Column(String(32)) position = Column(Integer) artifact = relationship(Artifact, backref=backref('blobs', cascade="all, delete-orphan")) class ArtifactBlobLocation(BASE, ArtifactBase): __tablename__ = 'artifact_blob_locations' __table_args__ = (Index('ix_artifact_blob_locations_blob_id', 'blob_id'), {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}) id = Column(String(36), primary_key=True, nullable=False, default=lambda: str(uuid.uuid4())) blob_id = Column(String(36), ForeignKey('artifact_blobs.id'), nullable=False) value = Column(Text, nullable=False) position = Column(Integer) status = Column(String(36), default='active', nullable=True) blob = relationship(ArtifactBlob, backref=backref('locations', cascade="all, delete-orphan")) def register_models(engine): """Create database tables for all models with the given engine.""" models = (Artifact, ArtifactTag, ArtifactProperty, ArtifactBlob, ArtifactBlobLocation, ArtifactDependency) for model in models: model.metadata.create_all(engine) def unregister_models(engine): """Drop database tables for all models with the given engine.""" models = (ArtifactDependency, ArtifactBlobLocation, ArtifactBlob, ArtifactProperty, ArtifactTag, Artifact) for model in models: model.metadata.drop_all(engine)
scripnichenko/glance
glance/db/sqlalchemy/models_artifacts.py
Python
apache-2.0
12,959
0
from django.shortcuts import render_to_response,RequestContext from django.http import HttpResponse,HttpResponseRedirect from urllib import urlencode from time import sleep from vertex.models import * from django.core.exceptions import ObjectDoesNotExist from login.views import authDetail def home(request): if not authDetail(request)[0]: sleep(3) d = {'server_message':authDetail(request)[1]} query_str = urlencode(d) return HttpResponseRedirect('/login/?'+query_str) else: client = authDetail(request)[1] vertex = Vertex.objects.get(email = client.email) flows = vertex.flow_set.order_by('-last_forward_date')[:5] return render_to_response('home.html', {"USER_EMAIL":client.email,"login":True,'VERTEX_DETAIL':client,'flows':flows,'VERTEX_ID':client.id}, context_instance=RequestContext(request)) # Create your views here.
bardia-heydarinejad/Graph
home/views.py
Python
mit
852
0.038732
import unittest from efront import repo from mock import Mock class TestTarget(unittest.TestCase): def setUp(self): self.target = repo.Target("myTarget") self.target.root_names = ["git", "svn"] def test_print_svn(self): self.target.add("svn") self.assertEqual(str(self.target), " svn myTarget") def test_print_git(self): self.target.add("git") self.assertEqual(str(self.target), "git myTarget") def test_print_both(self): self.target.add("git") self.target.add("svn") self.assertEqual(str(self.target), "git svn myTarget") if __name__ == '__main__': unittest.main()
sbergot/python
efront/test/testTarget.py
Python
bsd-3-clause
696
0.002874
# -*- coding: utf-8 -*- from protoLib.getStuff import getDjangoModel from protoExt.utils.utilsWeb import JsonError , doReturn from . import validateRequest import json from django.contrib.admin.sites import site from protoExt.utils.utilsBase import traceError def protoExecuteAction(request): """ Ejecuta una opcion """ cBase, message = validateRequest( request ) if message: return None, message cBase.actionName = request.POST.get('actionName', '') cBase.selectedKeys = request.POST.get('selectedKeys', '') cBase.selectedKeys = json.loads(cBase.selectedKeys) cBase.parameters = request.POST.get('parameters', []) cBase.parameters = json.loads(cBase.parameters) cBase.actionDef = request.POST.get('actionDef', {}) cBase.actionDef = json.loads(cBase.actionDef) # hace el QSet de los registros seleccionados if cBase.actionDef.get('selectionMode', '') == 'optional': if cBase.selectedKeys.__len__() > 1: return JsonError( 'too many records selected') elif cBase.actionDef.get('selectionMode', '') != 'none' and cBase.selectedKeys.__len__() == 0: return JsonError( 'No record selected') # Obtiene el modelo try: cBase.model = getDjangoModel(cBase.viewEntity) cBase.modelAdmin = site._registry.get(cBase.model) except : return JsonError( 'Model notFound') # details if cBase.actionDef.get('selectionMode', '') == 'details': cBase.detKeys = request.POST.get('detKeys', {} ) cBase.detKeys = json.loads(cBase.detKeys) return doAdminDetailAction( request, cBase ) # elif cBase.actionDef.get('actionType', '') == 'wflow': # return doWfAction( request, cBase ) elif hasattr(cBase.modelAdmin, 'actions'): return doAdminAction (request, cBase ) else: return JsonError( 'Action notFound') def doAdminAction( request, cBase ): try: action = site.get_action( cBase.actionName ) actionFound = True except: action = None actionFound = False if not actionFound: for action in cBase.modelAdmin.actions: if action.__name__ == cBase.actionName: actionFound = True break if not actionFound: return JsonError( 'Action notFound') Qs = cBase.model.objects.select_related() Qs = Qs.filter(pk__in=cBase.selectedKeys) try: returnObj = action(cBase.modelAdmin, request, Qs , cBase.parameters) return doReturn (returnObj) except Exception as e: traceError() return JsonError( str(e) ) def doAdminDetailAction( request, cBase ): for action in cBase.modelAdmin.actions: if action.__name__ == cBase.actionName: break if not action: return JsonError( 'Action notFound') try: returnObj = action( cBase.modelAdmin, request, cBase.selectedKeys, cBase.detKeys, cBase.parameters ) return doReturn(returnObj) except Exception as e: return JsonError( str(e) ) #TODO: Wf from .prototypeWfActions import doWfAction # ----------------------------------------
DarioGT/docker-carra
src/protoExt/views/protoActionAction.py
Python
mit
3,186
0.015066
class CSharpReference(): def __init__(self,): self.reference_object = None self.line_in_file = -1 self.file_name = ''
adrianogil/SublimeUnityIntel
unityparser/csharp/csharp_reference.py
Python
mit
147
0
"""OpenID Extension modules.""" __all__ = ['ax', 'pape', 'sreg'] from openid.extensions.draft import pape5 as pape
SohKai/ChronoLogger
web/flask/lib/python2.7/site-packages/openid/extensions/__init__.py
Python
mit
117
0
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ astroquery.solarsystem.jpl -------------------------- a collection of data services provided by JPL """ from .sbdb import * from .horizons import * from . import *
imbasimba/astroquery
astroquery/solarsystem/jpl/__init__.py
Python
bsd-3-clause
235
0
__author__ = 'ray' import wave import numpy as np wav_1_path = "origin.wav" wav_2_path = "clap.wav" wav_out_path = "mixed.wav" wav_1 = wave.open(wav_1_path, 'rb') wav_2 = wave.open(wav_2_path, 'rb') wav_out = wave.open(wav_out_path, 'wb') len_1 = wav_1.getnframes() len_2 = wav_2.getnframes() if len_1>len_2: wav_out.setparams(wav_1.getparams()) else: wav_out.setparams(wav_2.getparams()) signal_1 = np.fromstring(wav_1.readframes(-1), 'Int16') signal_2 = np.fromstring(wav_2.readframes(-1), 'Int16') if len_1>len_2: signal_out = np.append(signal_1[:len_2]+signal_2, signal_1[len_2:]).tostring() elif len_2>len_1: signal_out = np.append(signal_1+signal_2[:len_1], signal_2[len_1:]).tostring() else: signal_out = (signal_1+signal_2).tostring() wav_out.writeframes(signal_out) wav_1.close() wav_2.close() wav_out.close() print 'done!'
raybrshen/pattern_recognition
noise_detection/tools/mix_wav.py
Python
apache-2.0
864
0.005787
# -*- coding: utf-8 -*- # Copyright (C) 2015 ZetaOps Inc. # # This file is licensed under the GNU General Public License v3 # (GPLv3). See LICENSE.txt for details. """HITAP Nufus Guncelle Hitap'a personelin Nufus bilgilerinin guncellenmesini yapar. """ from ulakbus.services.personel.hitap.hitap_service import ZatoHitapService class HizmetNufusGuncelle(ZatoHitapService): """ HITAP Ekleme servisinden kalıtılmış Hizmet Nufus Bilgi Guncelleme servisi """ HAS_CHANNEL = True service_dict = { 'service_name': 'HizmetNufusUpdate', 'fields': { 'ad': 'ad', 'cinsiyet': 'cinsiyet', 'dogumTarihi': 'dogum_tarihi', 'durum': 'durum', 'emekliSicilNo': 'emekli_sicil_no', 'ilkSoyad': 'ilk_soy_ad', 'kurumSicili': 'kurum_sicil', 'maluliyetKod': 'maluliyet_kod', 'memuriyetBaslamaTarihi': 'memuriyet_baslama_tarihi', 'sebep': 'sebep', 'soyad': 'soyad', 'tckn': 'tckn', 'aciklama': 'aciklama', 'yetkiSeviyesi': 'yetki_seviyesi', 'kurumaBaslamaTarihi': 'kuruma_baslama_tarihi', 'gorevTarihi6495': 'gorev_tarihi_6495', 'emekliSicil6495': 'emekli_sicil_6495' }, 'date_filter': ['dogum_tarihi', 'memuriyet_baslama_tarihi', 'kuruma_baslama_tarihi'], 'required_fields': ['tckn', 'ad', 'soyad', 'dogumTarihi', 'cinsiyet', 'emekliSicilNo', 'memuriyetBaslamaTarihi', 'durum', 'kurumSicili', 'maluliyetKod', 'sebep', 'yetkiSeviyesi'] }
zetaops/ulakbus
ulakbus/services/personel/hitap/hizmet_nufus_guncelle.py
Python
gpl-3.0
1,651
0.001821
# -*- coding: utf-8 -*- #----------------------------------------------------------------------------- # Copyright (c) 2013, PyInstaller Development Team. # # Distributed under the terms of the GNU General Public License with exception # for distributing bootloader. # # The full license is in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- import sys from PyQt4 import Qt from PyQt4 import QtCore from PyQt4 import QtGui class MyDialog(QtGui.QDialog): def __init__(self): super(MyDialog, self).__init__() self.label = Qt.QLabel( u"Press <ESC> to exit. Some non-ascii chars: řčšěíáŘ", self) self.setWindowTitle("Hello World from PyQt4") #self.resize(500, 300) self.show() def sizeHint(self): return self.label.sizeHint() def keyPressEvent(self, event): if event.key() == QtCore.Qt.Key_Escape: self.close() def main(): app = Qt.QApplication(sys.argv) read_formats = ', '.join([unicode(format).lower() \ for format in QtGui.QImageReader.supportedImageFormats()]) print("Qt4 plugin paths: " + unicode(list(app.libraryPaths()))) print("Qt4 image read support: " + read_formats) print('Qt4 Libraries path: ' + unicode(QtCore.QLibraryInfo.location(QtCore.QLibraryInfo.LibrariesPath))) ex = MyDialog() app.exec_() if __name__ == "__main__": main()
bl4ckdu5t/registron
tests/interactive/test_pyqt4.py
Python
mit
1,490
0.00472
from functools import wraps from flask import Flask, request, jsonify, g import base64 import libs.db_connector as dbconn import libs.db_query as dao import libs.json_keys as jkey import libs.json_builder as jparse import config.api_config as apiconf # region constants DEBUG = True LOCAL_NETWORK = "0.0.0.0" V = apiconf.VERSION # endregion app = Flask(__name__) # region getter def db(autocommit=True): if not hasattr(g, 'db'): g.db = dbconn.connect(autocommit) return g.db # endregion # region lifecycle @app.teardown_request def teardown_request(exception): if hasattr(g, 'db'): dbconn.disconnect(g.db) # endregion # region decorator def requires_authorization(f): @wraps(f) def wrapper(*args, **kwargs): b64_device_auth_id = request.headers.get('authorization') if not b64_device_auth_id: return unauthorized() try: device_auth_id = base64.b64decode(b64_device_auth_id) if device_auth_id in apiconf.AUTHORIZED_DEVICE_IDS: return f(*args, **kwargs) except TypeError as e: pass return unauthorized() return wrapper # endregion # region routes @app.route("/api/" + V + "/shows") @requires_authorization def get_all_shows(): search = request.args.get('search', '') rows = dao.get_all_shows(db(), search) return response(jparse.parse_shows(rows)) @app.route("/api/" + V + "/shows/<id_show>") @requires_authorization def get_show(id_show): rows = dao.get_show_info(db(), id_show) tv_json = jparse.parse_show_info(rows) if tv_json[jkey.SEASON_COUNT] > 1: rows = dao.get_show_seasons(db(), id_show) jparse.add_parsed_seasons(tv_json, rows) else: rows = dao.get_show_episodes(db(), id_show) jparse.add_parsed_episodes(tv_json, rows) return response(tv_json) @app.route("/api/" + V + "/seasons/<id_season>") @requires_authorization def get_season(id_season): rows = dao.get_season(db(), id_season) season_json = jparse.parse_season(rows) e_rows = dao.get_season_episodes(db(), id_season) jparse.add_parsed_episodes(season_json, e_rows) return response(season_json) @app.route("/api/" + V + "/episodes/<id_episode>") @requires_authorization def get_episode(id_episode): rows = dao.get_episode(db(), id_episode) return response(jparse.parse_episode(rows)) @app.route("/api/" + V + "/episodes/recent") @requires_authorization def get_recently_played(): offset = int(request.args.get('offset', 0)) amount = int(request.args.get('amount', 20)) rows = dao.get_recently_played(db(), offset, amount) return response(jparse.parse_recently_played(rows)) @app.route("/api/" + V + "/shows/mark", methods=['POST']) @requires_authorization def mark_shows(): data = request.get_json() success = dao.mark_shows(db(False), data) return response({"success": success}) @app.route("/api/" + V + "/seasons/mark", methods=['POST']) @requires_authorization def mark_seasons(): data = request.get_json() success = dao.mark_seasons(db(False), data) return response({"success": success}) @app.route("/api/" + V + "/episodes/mark", methods=['POST']) @requires_authorization def mark_episodes(): data = request.get_json() success = dao.mark_episodes(db(False), data) return response({"success": success}) # endregion # region return type def error(err='unknown error'): return jsonify(error=err), 400 def response(data): return jsonify(data), 200 def unauthorized(): return 'You are not authorized to perform this action', 401 # endregion if __name__ == "__main__": app.run(debug=DEBUG, host=LOCAL_NETWORK)
Python3Development/KodiAPI
api.py
Python
gpl-3.0
3,710
0
# import needed models from django.db import models from django.utils import timezone from django.contrib.auth.models import User # Create your models here. # create user object class Person(User): internal_id = models.CharField(max_length=25, null=True, blank=True) verified = models.NullBooleanField(default=False) approval_date = models.DateTimeField(null=True, blank=True) # create list object class List(models.Model): name = models.CharField('List Name', max_length=50) created_date = models.DateTimeField(auto_now_add=True) modified_date = models.DateTimeField(auto_now=True) links = models.ManyToManyField("Link") def __str__(self): return self.name # create link object class Link(models.Model): name = models.CharField('Link Name', max_length=50) created_date = models.DateTimeField(auto_now_add=True) modified_date = models.DateTimeField(auto_now=True) tags = models.TextField(null=True, blank=True) def __str__(self): return self.name
prakashksinha/playground
bookmarks/models.py
Python
apache-2.0
1,100
0.012727
# -*- coding: utf-8 -*- ''' Small Dictionary ''' class SD: def translate_in(language): words = {} words["English"] = "in" words["Chinese"] = "在" words["Dutch"] = "in" words["French"] = "dans" words["Italian"] = "nel" words["Japanese"] = "に" words["Korean"] = "에" words["Portuguese"] = "no" words["Russian"] = "в" words["Spanish"] = "en" return words[language] def translate_assert(language): words = {} words["English"] = "ASSERT" words["Chinese"] = "断言" words["Dutch"] = "CONTROLEREN" words["French"] = "VÉRIFIER" words["Italian"] = "VERIFICARE" words["Japanese"] = "検証" words["Korean"] = "확인" words["Portuguese"] = "VERIFICAR" words["Russian"] = "ПОДТВЕРДИТЬ" words["Spanish"] = "VERIFICAR" return words[language] def translate_assert_text(language): words = {} words["English"] = "ASSERT TEXT" words["Chinese"] = "断言文本" words["Dutch"] = "CONTROLEREN TEKST" words["French"] = "VÉRIFIER TEXTE" words["Italian"] = "VERIFICARE TESTO" words["Japanese"] = "テキストを確認する" words["Korean"] = "텍스트 확인" words["Portuguese"] = "VERIFICAR TEXTO" words["Russian"] = "ПОДТВЕРДИТЬ ТЕКСТ" words["Spanish"] = "VERIFICAR TEXTO" return words[language] def translate_assert_exact_text(language): words = {} words["English"] = "ASSERT EXACT TEXT" words["Chinese"] = "确切断言文本" words["Dutch"] = "CONTROLEREN EXACTE TEKST" words["French"] = "VÉRIFIER EXACTEMENT TEXTE" words["Italian"] = "VERIFICARE TESTO ESATTO" words["Japanese"] = "正確なテキストを確認する" words["Korean"] = "정확한 텍스트를 확인하는" words["Portuguese"] = "VERIFICAR TEXTO EXATO" words["Russian"] = "ПОДТВЕРДИТЬ ТЕКСТ ТОЧНО" words["Spanish"] = "VERIFICAR TEXTO EXACTO" return words[language] def translate_assert_link_text(language): words = {} words["English"] = "ASSERT LINK TEXT" words["Chinese"] = "断言链接文本" words["Dutch"] = "CONTROLEREN LINKTEKST" words["French"] = "VÉRIFIER TEXTE DU LIEN" words["Italian"] = "VERIFICARE TESTO DEL COLLEGAMENTO" words["Japanese"] = "リンクテキストを確認する" words["Korean"] = "링크 텍스트 확인" words["Portuguese"] = "VERIFICAR TEXTO DO LINK" words["Russian"] = "ПОДТВЕРДИТЬ ССЫЛКУ" words["Spanish"] = "VERIFICAR TEXTO DEL ENLACE" return words[language] def translate_assert_title(language): words = {} words["English"] = "ASSERT TITLE" words["Chinese"] = "断言标题" words["Dutch"] = "CONTROLEREN TITEL" words["French"] = "VÉRIFIER TITRE" words["Italian"] = "VERIFICARE TITOLO" words["Japanese"] = "タイトルを確認" words["Korean"] = "제목 확인" words["Portuguese"] = "VERIFICAR TÍTULO" words["Russian"] = "ПОДТВЕРДИТЬ НАЗВАНИЕ" words["Spanish"] = "VERIFICAR TÍTULO" return words[language] def translate_assert_no_404_errors(language): words = {} words["English"] = "ASSERT NO 404 ERRORS" words["Chinese"] = "检查断开的链接" words["Dutch"] = "CONTROLEREN OP GEBROKEN LINKS" words["French"] = "VÉRIFIER LES LIENS ROMPUS" words["Italian"] = "VERIFICARE I COLLEGAMENTI" words["Japanese"] = "リンク切れを確認する" words["Korean"] = "끊어진 링크 확인" words["Portuguese"] = "VERIFICAR SE HÁ LINKS QUEBRADOS" words["Russian"] = "ПРОВЕРИТЬ ОШИБКИ 404" words["Spanish"] = "VERIFICAR SI HAY ENLACES ROTOS" return words[language] def translate_assert_no_js_errors(language): words = {} words["English"] = "ASSERT NO JS ERRORS" words["Chinese"] = "检查JS错误" words["Dutch"] = "CONTROLEREN OP JS FOUTEN" words["French"] = "VÉRIFIER LES ERREURS JS" words["Italian"] = "CONTROLLA ERRORI JS" words["Japanese"] = "JSエラーを確認する" words["Korean"] = "JS 오류 확인" words["Portuguese"] = "VERIFICAR SE HÁ ERROS JS" words["Russian"] = "ПРОВЕРИТЬ ОШИБКИ JS" words["Spanish"] = "VERIFICAR SI HAY ERRORES JS" return words[language]
mdmintz/SeleniumBase
seleniumbase/fixtures/words.py
Python
mit
4,708
0
# -*- coding: utf-8 - # # This file is part of couchdbkit released under the MIT license. # See the NOTICE for more information. """ properties used by Document object """ import decimal import datetime import re import time try: from collections import MutableSet, Iterable def is_iterable(c): return isinstance(c, Iterable) support_setproperty = True except ImportError: support_setproperty = False from couchdbkit.exceptions import BadValueError __all__ = ['ALLOWED_PROPERTY_TYPES', 'Property', 'StringProperty', 'IntegerProperty', 'DecimalProperty', 'BooleanProperty', 'FloatProperty', 'DateTimeProperty', 'DateProperty', 'TimeProperty', 'DictProperty', 'StringDictProperty', 'ListProperty', 'StringListProperty', 'dict_to_json', 'list_to_json', 'value_to_json', 'MAP_TYPES_PROPERTIES', 'value_to_python', 'dict_to_python', 'list_to_python', 'convert_property', 'value_to_property', 'LazyDict', 'LazyList'] if support_setproperty: __all__ += ['SetProperty', 'LazySet'] ALLOWED_PROPERTY_TYPES = set([ basestring, str, unicode, bool, int, long, float, datetime.datetime, datetime.date, datetime.time, decimal.Decimal, dict, list, set, type(None) ]) re_date = re.compile('^(\d{4})\D?(0[1-9]|1[0-2])\D?([12]\d|0[1-9]|3[01])$') re_time = re.compile('^([01]\d|2[0-3])\D?([0-5]\d)\D?([0-5]\d)?\D?(\d{3})?$') re_datetime = re.compile('^(\d{4})\D?(0[1-9]|1[0-2])\D?([12]\d|0[1-9]|3[01])(\D?([01]\d|2[0-3])\D?([0-5]\d)\D?([0-5]\d)?\D?(\d{3})?([zZ]|([\+-])([01]\d|2[0-3])\D?([0-5]\d)?)?)?$') re_decimal = re.compile('^(\d+)\.(\d+)$') class Property(object): """ Property base which all other properties inherit.""" creation_counter = 0 def __init__(self, verbose_name=None, name=None, default=None, required=False, validators=None, choices=None): """ Default constructor for a property. :param verbose_name: str, verbose name of field, could be use for description :param name: str, name of field :param default: default value :param required: True if field is required, default is False :param validators: list of callable or callable, field validators function that are executed when document is saved. """ self.verbose_name = verbose_name self.name = name self.default = default self.required = required self.validators = validators self.choices = choices self.creation_counter = Property.creation_counter Property.creation_counter += 1 def __property_config__(self, document_class, property_name): self.document_class = document_class if self.name is None: self.name = property_name def __property_init__(self, document_instance, value): """ method used to set value of the property when we create the document. Don't check required. """ if value is not None: value = self.to_json(self.validate(value, required=False)) document_instance._doc[self.name] = value def __get__(self, document_instance, document_class): if document_instance is None: return self value = document_instance._doc.get(self.name) if value is not None: value = self._to_python(value) return value def __set__(self, document_instance, value): value = self.validate(value, required=False) document_instance._doc[self.name] = self._to_json(value) def __delete__(self, document_instance): pass def default_value(self): """ return default value """ default = self.default if callable(default): default = default() return default def validate(self, value, required=True): """ validate value """ if required and self.empty(value): if self.required: raise BadValueError("Property %s is required." % self.name) else: if self.choices and value is not None: if isinstance(self.choices, list): choice_list = self.choices if isinstance(self.choices, dict): choice_list = self.choices.keys() if isinstance(self.choices, tuple): choice_list = [key for (key, name) in self.choices] if value not in choice_list: raise BadValueError('Property %s is %r; must be one of %r' % ( self.name, value, choice_list)) if self.validators: if isinstance(self.validators, (list, tuple,)): for validator in self.validators: if callable(validator): validator(value) elif callable(self.validators): self.validators(value) return value def empty(self, value): """ test if value is empty """ return (not value and value != 0) or value is None def _to_python(self, value): if value == None: return value return self.to_python(value) def _to_json(self, value): if value == None: return value return self.to_json(value) def to_python(self, value): """ convert to python type """ return unicode(value) def to_json(self, value): """ convert to json, Converted value is saved in couchdb. """ return self.to_python(value) data_type = None class StringProperty(Property): """ string property str or unicode property *Value type*: unicode """ to_python = unicode def validate(self, value, required=True): value = super(StringProperty, self).validate(value, required=required) if value is None: return value if not isinstance(value, basestring): raise BadValueError( 'Property %s must be unicode or str instance, not a %s' % (self.name, type(value).__name__)) return value data_type = unicode class IntegerProperty(Property): """ Integer property. map to int *Value type*: int """ to_python = int def empty(self, value): return value is None def validate(self, value, required=True): value = super(IntegerProperty, self).validate(value, required=required) if value is None: return value if value is not None and not isinstance(value, (int, long,)): raise BadValueError( 'Property %s must be %s or long instance, not a %s' % (self.name, type(self.data_type).__name__, type(value).__name__)) return value data_type = int LongProperty = IntegerProperty class FloatProperty(Property): """ Float property, map to python float *Value type*: float """ to_python = float data_type = float def validate(self, value, required=True): value = super(FloatProperty, self).validate(value, required=required) if value is None: return value if not isinstance(value, float): raise BadValueError( 'Property %s must be float instance, not a %s' % (self.name, type(value).__name__)) return value Number = FloatProperty class BooleanProperty(Property): """ Boolean property, map to python bool *ValueType*: bool """ to_python = bool data_type = bool def validate(self, value, required=True): value = super(BooleanProperty, self).validate(value, required=required) if value is None: return value if value is not None and not isinstance(value, bool): raise BadValueError( 'Property %s must be bool instance, not a %s' % (self.name, type(value).__name__)) return value def empty(self, value): """test if boolean is empty""" return value is None class DecimalProperty(Property): """ Decimal property, map to Decimal python object *ValueType*: decimal.Decimal """ data_type = decimal.Decimal def to_python(self, value): return decimal.Decimal(value) def to_json(self, value): return unicode(value) class DateTimeProperty(Property): """DateTime property. It convert iso3339 string to python and vice-versa. Map to datetime.datetime object. *ValueType*: datetime.datetime """ def __init__(self, verbose_name=None, auto_now=False, auto_now_add=False, **kwds): super(DateTimeProperty, self).__init__(verbose_name, **kwds) self.auto_now = auto_now self.auto_now_add = auto_now_add def validate(self, value, required=True): value = super(DateTimeProperty, self).validate(value, required=required) if value is None: return value if value and not isinstance(value, self.data_type): raise BadValueError('Property %s must be a %s, current is %s' % (self.name, self.data_type.__name__, type(value).__name__)) return value def default_value(self): if self.auto_now or self.auto_now_add: return self.now() return Property.default_value(self) def to_python(self, value): if isinstance(value, basestring): try: value = value.split('.', 1)[0] # strip out microseconds value = value[0:19] # remove timezone value = datetime.datetime.strptime(value, '%Y-%m-%dT%H:%M:%S') except ValueError, e: raise ValueError('Invalid ISO date/time %r [%s]' % (value, str(e))) return value def to_json(self, value): if self.auto_now: value = self.now() if value is None: return value return value.replace(microsecond=0).isoformat() + 'Z' data_type = datetime.datetime @staticmethod def now(): return datetime.datetime.utcnow() class DateProperty(DateTimeProperty): """ Date property, like DateTime property but only for Date. Map to datetime.date object *ValueType*: datetime.date """ data_type = datetime.date @staticmethod def now(): return datetime.datetime.now().date() def to_python(self, value): if isinstance(value, basestring): try: value = datetime.date(*time.strptime(value, '%Y-%m-%d')[:3]) except ValueError, e: raise ValueError('Invalid ISO date %r [%s]' % (value, str(e))) return value def to_json(self, value): if value is None: return value return value.isoformat() class TimeProperty(DateTimeProperty): """ Date property, like DateTime property but only for time. Map to datetime.time object *ValueType*: datetime.time """ data_type = datetime.time @staticmethod def now(self): return datetime.datetime.now().time() def to_python(self, value): if isinstance(value, basestring): try: value = value.split('.', 1)[0] # strip out microseconds value = datetime.time(*time.strptime(value, '%H:%M:%S')[3:6]) except ValueError, e: raise ValueError('Invalid ISO time %r [%s]' % (value, str(e))) return value def to_json(self, value): if value is None: return value return value.replace(microsecond=0).isoformat() class DictProperty(Property): """ A property that stores a dict of things""" def __init__(self, verbose_name=None, default=None, required=False, **kwds): """ :args verbose_name: Optional verbose name. :args default: Optional default value; if omitted, an empty list is used. :args**kwds: Optional additional keyword arguments, passed to base class. Note that the only permissible value for 'required' is True. """ if default is None: default = {} Property.__init__(self, verbose_name, default=default, required=required, **kwds) data_type = dict def validate(self, value, required=True): value = super(DictProperty, self).validate(value, required=required) if value and value is not None: if not isinstance(value, dict): raise BadValueError('Property %s must be a dict' % self.name) value = self.validate_dict_contents(value) return value def validate_dict_contents(self, value): try: value = validate_dict_content(value) except BadValueError: raise BadValueError( 'Items of %s dict must all be in %s' % (self.name, ALLOWED_PROPERTY_TYPES)) return value def default_value(self): """Default value for list. Because the property supplied to 'default' is a static value, that value must be shallow copied to prevent all fields with default values from sharing the same instance. Returns: Copy of the default value. """ value = super(DictProperty, self).default_value() if value is None: value = {} return dict(value) def to_python(self, value): return LazyDict(value) def to_json(self, value): return value_to_json(value) class StringDictProperty(DictProperty): def to_python(self, value): return LazyDict(value, item_type=basestring) def validate_dict_contents(self, value): try: value = validate_dict_content(value, basestring) except BadValueError: raise BadValueError( 'Items of %s dict must all be in %s' % (self.name, basestring)) return value class ListProperty(Property): """A property that stores a list of things. """ def __init__(self, verbose_name=None, default=None, required=False, item_type=None, **kwds): """Construct ListProperty. :args verbose_name: Optional verbose name. :args default: Optional default value; if omitted, an empty list is used. :args**kwds: Optional additional keyword arguments, passed to base class. """ if default is None: default = [] if item_type is not None and item_type not in ALLOWED_PROPERTY_TYPES: raise ValueError('item_type %s not in %s' % (item_type, ALLOWED_PROPERTY_TYPES)) self.item_type = item_type Property.__init__(self, verbose_name, default=default, required=required, **kwds) data_type = list def validate(self, value, required=True): value = super(ListProperty, self).validate(value, required=required) if value and value is not None: if not isinstance(value, list): raise BadValueError('Property %s must be a list' % self.name) value = self.validate_list_contents(value) return value def validate_list_contents(self, value): value = validate_list_content(value, item_type=self.item_type) try: value = validate_list_content(value, item_type=self.item_type) except BadValueError: raise BadValueError( 'Items of %s list must all be in %s' % (self.name, ALLOWED_PROPERTY_TYPES)) return value def default_value(self): """Default value for list. Because the property supplied to 'default' is a static value, that value must be shallow copied to prevent all fields with default values from sharing the same instance. Returns: Copy of the default value. """ value = super(ListProperty, self).default_value() if value is None: value = [] return list(value) def to_python(self, value): return LazyList(value, item_type=self.item_type) def to_json(self, value): return value_to_json(value, item_type=self.item_type) class StringListProperty(ListProperty): """ shorthand for list that should containe only unicode""" def __init__(self, verbose_name=None, default=None, required=False, **kwds): super(StringListProperty, self).__init__(verbose_name=verbose_name, default=default, required=required, item_type=basestring, **kwds) # dict proxy class LazyDict(dict): """ object to make sure we keep updated of dict in _doc. We just override a dict and maintain change in doc reference (doc[keyt] obviously). if init_vals is specified, doc is overwritten with the dict given. Otherwise, the values already in doc are used. """ def __init__(self, doc, item_type=None, init_vals=None): dict.__init__(self) self.item_type = item_type self.doc = doc if init_vals is None: self._wrap() else: for key, value in init_vals.items(): self[key] = value def _wrap(self): for key, json_value in self.doc.items(): if isinstance(json_value, dict): value = LazyDict(json_value, item_type=self.item_type) elif isinstance(json_value, list): value = LazyList(json_value, item_type=self.item_type) else: value = value_to_python(json_value, self.item_type) dict.__setitem__(self, key, value) def __setitem__(self, key, value): if isinstance(value, dict): self.doc[key] = {} value = LazyDict(self.doc[key], item_type=self.item_type, init_vals=value) elif isinstance(value, list): self.doc[key] = [] value = LazyList(self.doc[key], item_type=self.item_type, init_vals=value) else: self.doc.update({key: value_to_json(value, item_type=self.item_type) }) super(LazyDict, self).__setitem__(key, value) def __delitem__(self, key): del self.doc[key] super(LazyDict, self).__delitem__(key) def pop(self, key, *args): default = len(args) == 1 if default: self.doc.pop(key, args[-1]) return super(LazyDict, self).pop(key, args[-1]) self.doc.pop(key) return super(LazyDict, self).pop(key) def setdefault(self, key, default): if key in self: return self[key] self.doc.setdefault(key, value_to_json(default, item_type=self.item_type)) super(LazyDict, self).setdefault(key, default) return default def update(self, value): for k, v in value.items(): self[k] = v def popitem(self, value): new_value = super(LazyDict, self).popitem(value) self.doc.popitem(value_to_json(value, item_type=self.item_type)) return new_value def clear(self): self.doc.clear() super(LazyDict, self).clear() class LazyList(list): """ object to make sure we keep update of list in _doc. We just override a list and maintain change in doc reference (doc[index] obviously). if init_vals is specified, doc is overwritten with the list given. Otherwise, the values already in doc are used. """ def __init__(self, doc, item_type=None, init_vals=None): list.__init__(self) self.item_type = item_type self.doc = doc if init_vals is None: # just wrap the current values self._wrap() else: # initialize this list and the underlying list # with the values given. del self.doc[:] for item in init_vals: self.append(item) def _wrap(self): for json_value in self.doc: if isinstance(json_value, dict): value = LazyDict(json_value, item_type=self.item_type) elif isinstance(json_value, list): value = LazyList(json_value, item_type=self.item_type) else: value = value_to_python(json_value, self.item_type) list.append(self, value) def __delitem__(self, index): del self.doc[index] list.__delitem__(self, index) def __setitem__(self, index, value): if isinstance(value, dict): self.doc[index] = {} value = LazyDict(self.doc[index], item_type=self.item_type, init_vals=value) elif isinstance(value, list): self.doc[index] = [] value = LazyList(self.doc[index], item_type=self.item_type, init_vals=value) else: self.doc[index] = value_to_json(value, item_type=self.item_type) list.__setitem__(self, index, value) def __delslice__(self, i, j): del self.doc[i:j] list.__delslice__(self, i, j) def __getslice__(self, i, j): return LazyList(self.doc[i:j], self.item_type) def __setslice__(self, i, j, seq): self.doc[i:j] = (value_to_json(v, item_type=self.item_type) for v in seq) list.__setslice__(self, i, j, seq) def __contains__(self, value): jvalue = value_to_json(value) for m in self.doc: if m == jvalue: return True return False def append(self, *args, **kwargs): if args: assert len(args) == 1 value = args[0] else: value = kwargs index = len(self) if isinstance(value, dict): self.doc.append({}) value = LazyDict(self.doc[index], item_type=self.item_type, init_vals=value) elif isinstance(value, list): self.doc.append([]) value = LazyList(self.doc[index], item_type=self.item_type, init_vals=value) else: self.doc.append(value_to_json(value, item_type=self.item_type)) super(LazyList, self).append(value) def extend(self, x): self.doc.extend( [value_to_json(v, item_type=self.item_type) for v in x]) super(LazyList, self).extend(x) def index(self, x, *args): x = value_to_json(x, item_type=self.item_type) return self.doc.index(x) def insert(self, i, x): self.__setslice__(i, i, [x]) def pop(self, i=-1): del self.doc[i] v = super(LazyList, self).pop(i) return value_to_python(v, item_type=self.item_type) def remove(self, x): del self[self.index(x)] def sort(self, cmp=None, key=None, reverse=False): self.doc.sort(cmp, key, reverse) list.sort(self, cmp, key, reverse) def reverse(self): self.doc.reverse() list.reverse(self) if support_setproperty: class SetProperty(Property): """A property that stores a Python set as a list of unique elements. Note that Python set operations like union that return a set object do not alter list that will be stored with the next save, while operations like update that change a set object in-place do keep the list in sync. """ def __init__(self, verbose_name=None, default=None, required=None, item_type=None, **kwds): """Construct SetProperty. :args verbose_name: Optional verbose name. :args default: Optional default value; if omitted, an empty set is used. :args required: True if field is required, default is False. :args item_type: Optional data type of items that set contains. Used to assist with JSON serialization/deserialization when data is stored/retireved. :args **kwds: Optional additional keyword arguments, passed to base class. """ if default is None: default = set() if item_type is not None and item_type not in ALLOWED_PROPERTY_TYPES: raise ValueError('item_type %s not in %s' % (item_type, ALLOWED_PROPERTY_TYPES)) self.item_type = item_type super(SetProperty, self).__init__( verbose_name=verbose_name, default=default, required=required, **kwds) data_type = set def validate(self, value, required=True): value = super(SetProperty, self).validate(value, required=required) if value and value is not None: if not isinstance(value, MutableSet): raise BadValueError('Property %s must be a set' % self.name) value = self.validate_set_contents(value) return value def validate_set_contents(self, value): try: value = validate_set_content(value, item_type=self.item_type) except BadValueError: raise BadValueError( 'Items of %s set must all be in %s' % (self.name, ALLOWED_PROPERTY_TYPES)) return value def default_value(self): """Return default value for set. Because the property supplied to 'default' is a static value, that value must be shallow copied to prevent all fields with default values from sharing the same instance. Returns: Copy of the default value. """ value = super(SetProperty, self).default_value() if value is None: return set() return value.copy() def to_python(self, value): return LazySet(value, item_type=self.item_type) def to_json(self, value): return value_to_json(value, item_type=self.item_type) class LazySet(MutableSet): """Object to make sure that we keep set and _doc synchronized. We sub-class MutableSet and maintain changes in doc. Note that methods like union that return a set object do not alter _doc, while methods like update that change a set object in-place do keep _doc in sync. """ def _map_named_operation(opname): fn = getattr(MutableSet, opname) if hasattr(fn, 'im_func'): fn = fn.im_func def method(self, other, fn=fn): if not isinstance(other, MutableSet): other = self._from_iterable(other) return fn(self, other) return method issubset = _map_named_operation('__le__') issuperset = _map_named_operation('__ge__') symmetric_difference = _map_named_operation('__xor__') def __init__(self, doc, item_type=None): self.item_type = item_type self.doc = doc self.elements = set(value_to_python(value, self.item_type) for value in self.doc) def __repr__(self): return '%s(%r)' % (type(self).__name__, list(self)) @classmethod def _from_iterable(cls, it): return cls(it) def __iand__(self, iterator): for value in (self.elements - iterator): self.elements.discard(value) return self def __iter__(self): return iter(element for element in self.elements) def __len__(self): return len(self.elements) def __contains__(self, item): return item in self.elements def __xor__(self, other): if not isinstance(other, MutableSet): if not is_iterable(other): return NotImplemented other = self._from_iterable(other) return (self.elements - other) | (other - self.elements) def __gt__(self, other): if not isinstance(other, MutableSet): return NotImplemented return other < self.elements def __ge__(self, other): if not isinstance(other, MutableSet): return NotImplemented return other <= self.elements def __ne__(self, other): return not (self.elements == other) def add(self, value): self.elements.add(value) if value not in self.doc: self.doc.append(value_to_json(value, item_type=self.item_type)) def copy(self): return self.elements.copy() def difference(self, other, *args): return self.elements.difference(other, *args) def difference_update(self, other, *args): for value in other: self.discard(value) for arg in args: self.difference_update(arg) def discard(self, value): self.elements.discard(value) try: self.doc.remove(value) except ValueError: pass def intersection(self, other, *args): return self.elements.intersection(other, *args) def intersection_update(self, other, *args): if not isinstance(other, MutableSet): other = set(other) for value in self.elements - other: self.discard(value) for arg in args: self.intersection_update(arg) def symmetric_difference_update(self, other): if not isinstance(other, MutableSet): other = set(other) for value in other: if value in self.elements: self.discard(value) else: self.add(value) def union(self, other, *args): return self.elements.union(other, *args) def update(self, other, *args): self.elements.update(other, *args) for element in self.elements: if element not in self.doc: self.doc.append( value_to_json(element, item_type=self.item_type)) # some mapping MAP_TYPES_PROPERTIES = { decimal.Decimal: DecimalProperty, datetime.datetime: DateTimeProperty, datetime.date: DateProperty, datetime.time: TimeProperty, str: StringProperty, unicode: StringProperty, bool: BooleanProperty, int: IntegerProperty, long: LongProperty, float: FloatProperty, list: ListProperty, dict: DictProperty } if support_setproperty: MAP_TYPES_PROPERTIES[set] = SetProperty def convert_property(value): """ convert a value to json from Property._to_json """ if type(value) in MAP_TYPES_PROPERTIES: prop = MAP_TYPES_PROPERTIES[type(value)]() value = prop.to_json(value) return value def value_to_property(value): """ Convert value in a Property object """ if type(value) in MAP_TYPES_PROPERTIES: prop = MAP_TYPES_PROPERTIES[type(value)]() return prop else: return value # utilities functions def validate_list_content(value, item_type=None): """ validate type of values in a list """ return [validate_content(item, item_type=item_type) for item in value] def validate_dict_content(value, item_type=None): """ validate type of values in a dict """ return dict([(k, validate_content(v, item_type=item_type)) for k, v in value.iteritems()]) def validate_set_content(value, item_type=None): """ validate type of values in a set """ return set(validate_content(item, item_type=item_type) for item in value) def validate_content(value, item_type=None): """ validate a value. test if value is in supported types """ if isinstance(value, list): value = validate_list_content(value, item_type=item_type) elif isinstance(value, dict): value = validate_dict_content(value, item_type=item_type) elif item_type is not None and not isinstance(value, item_type): raise BadValueError( 'Items must all be in %s' % item_type) elif type(value) not in ALLOWED_PROPERTY_TYPES: raise BadValueError( 'Items must all be in %s' % (ALLOWED_PROPERTY_TYPES)) return value def dict_to_json(value, item_type=None): """ convert a dict to json """ return dict([(k, value_to_json(v, item_type=item_type)) for k, v in value.iteritems()]) def list_to_json(value, item_type=None): """ convert a list to json """ return [value_to_json(item, item_type=item_type) for item in value] def value_to_json(value, item_type=None): """ convert a value to json using appropriate regexp. For Dates we use ISO 8601. Decimal are converted to string. """ if isinstance(value, datetime.datetime) and is_type_ok(item_type, datetime.datetime): value = value.replace(microsecond=0).isoformat() + 'Z' elif isinstance(value, datetime.date) and is_type_ok(item_type, datetime.date): value = value.isoformat() elif isinstance(value, datetime.time) and is_type_ok(item_type, datetime.time): value = value.replace(microsecond=0).isoformat() elif isinstance(value, decimal.Decimal) and is_type_ok(item_type, decimal.Decimal): value = unicode(value) elif isinstance(value, (list, MutableSet)): value = list_to_json(value, item_type) elif isinstance(value, dict): value = dict_to_json(value, item_type) return value def is_type_ok(item_type, value_type): return item_type is None or item_type == value_type def value_to_python(value, item_type=None): """ convert a json value to python type using regexp. values converted have been put in json via `value_to_json` . """ data_type = None if isinstance(value, basestring): if re_date.match(value) and is_type_ok(item_type, datetime.date): data_type = datetime.date elif re_time.match(value) and is_type_ok(item_type, datetime.time): data_type = datetime.time elif re_datetime.match(value) and is_type_ok(item_type, datetime.datetime): data_type = datetime.datetime elif re_decimal.match(value) and is_type_ok(item_type, decimal.Decimal): data_type = decimal.Decimal if data_type is not None: prop = MAP_TYPES_PROPERTIES[data_type]() try: #sometimes regex fail so return value value = prop.to_python(value) except: pass elif isinstance(value, (list, MutableSet)): value = list_to_python(value, item_type=item_type) elif isinstance(value, dict): value = dict_to_python(value, item_type=item_type) return value def list_to_python(value, item_type=None): """ convert a list of json values to python list """ return [value_to_python(item, item_type=item_type) for item in value] def dict_to_python(value, item_type=None): """ convert a json object values to python dict """ return dict([(k, value_to_python(v, item_type=item_type)) for k, v in value.iteritems()])
benoitc/couchdbkit
couchdbkit/schema/properties.py
Python
mit
35,449
0.003836
#!/usr/bin/env python u""" notation3.py - Standalone Notation3 Parser Derived from CWM, the Closed World Machine Authors of the original suite: * Dan Connolly <@@> * Tim Berners-Lee <@@> * Yosi Scharf <@@> * Joseph M. Reagle Jr. <reagle@w3.org> * Rich Salz <rsalz@zolera.com> http://www.w3.org/2000/10/swap/notation3.py Copyright 2000-2007, World Wide Web Consortium. Copyright 2001, MIT. Copyright 2001, Zolera Systems Inc. License: W3C Software License http://www.w3.org/Consortium/Legal/copyright-software Modified by Sean B. Palmer Copyright 2007, Sean B. Palmer. Modified to work with rdflib by Gunnar Aastrand Grimnes Copyright 2010, Gunnar A. Grimnes """ # Python standard libraries import types import sys import os import re import codecs import warnings from decimal import Decimal from uuid import uuid4 from rdflib.term import URIRef, BNode, Literal, Variable, _XSD_PFX, _unique_id from rdflib.graph import QuotedGraph, ConjunctiveGraph, Graph from rdflib import py3compat b = py3compat.b __all__ = ['BadSyntax', 'N3Parser', 'TurtleParser', "splitFragP", "join", "base", "runNamespace", "uniqueURI", "hexify"] from rdflib.parser import Parser def splitFragP(uriref, punct=0): """split a URI reference before the fragment Punctuation is kept. e.g. >>> splitFragP("abc#def") ('abc', '#def') >>> splitFragP("abcdef") ('abcdef', '') """ i = uriref.rfind("#") if i >= 0: return uriref[:i], uriref[i:] else: return uriref, '' @py3compat.format_doctest_out def join(here, there): """join an absolute URI and URI reference (non-ascii characters are supported/doctested; haven't checked the details of the IRI spec though) ``here`` is assumed to be absolute. ``there`` is URI reference. >>> join('http://example/x/y/z', '../abc') 'http://example/x/abc' Raise ValueError if there uses relative path syntax but here has no hierarchical path. >>> join('mid:foo@example', '../foo') # doctest: +NORMALIZE_WHITESPACE Traceback (most recent call last): raise ValueError(here) ValueError: Base <mid:foo@example> has no slash after colon - with relative '../foo'. >>> join('http://example/x/y/z', '') 'http://example/x/y/z' >>> join('mid:foo@example', '#foo') 'mid:foo@example#foo' We grok IRIs >>> len(%(u)s'Andr\\xe9') 5 >>> join('http://example.org/', %(u)s'#Andr\\xe9') %(u)s'http://example.org/#Andr\\xe9' """ # assert(here.find("#") < 0), \ # "Base may not contain hash: '%s'" % here # why must caller splitFrag? slashl = there.find('/') colonl = there.find(':') # join(base, 'foo:/') -- absolute if colonl >= 0 and (slashl < 0 or colonl < slashl): return there bcolonl = here.find(':') assert(bcolonl >= 0), \ "Base uri '%s' is not absolute" % here # else it's not absolute path, frag = splitFragP(there) if not path: return here + frag # join('mid:foo@example', '../foo') bzzt if here[bcolonl + 1:bcolonl + 2] != '/': raise ValueError( ("Base <%s> has no slash after " "colon - with relative '%s'.") % (here, there)) if here[bcolonl + 1:bcolonl + 3] == '//': bpath = here.find('/', bcolonl + 3) else: bpath = bcolonl + 1 # join('http://xyz', 'foo') if bpath < 0: bpath = len(here) here = here + '/' # join('http://xyz/', '//abc') => 'http://abc' if there[:2] == '//': return here[:bcolonl + 1] + there # join('http://xyz/', '/abc') => 'http://xyz/abc' if there[:1] == '/': return here[:bpath] + there slashr = here.rfind('/') while 1: if path[:2] == './': path = path[2:] if path == '.': path = '' elif path[:3] == '../' or path == '..': path = path[3:] i = here.rfind('/', bpath, slashr) if i >= 0: here = here[:i + 1] slashr = i else: break return here[:slashr + 1] + path + frag def base(): """The base URI for this process - the Web equiv of cwd Relative or abolute unix-standard filenames parsed relative to this yeild the URI of the file. If we had a reliable way of getting a computer name, we should put it in the hostname just to prevent ambiguity """ # return "file://" + hostname + os.getcwd() + "/" return "file://" + _fixslash(os.getcwd()) + "/" def _fixslash(s): """ Fix windowslike filename to unixlike - (#ifdef WINDOWS)""" s = s.replace("\\", "/") if s[0] != "/" and s[1] == ":": s = s[2:] # @@@ Hack when drive letter present return s CONTEXT = 0 PRED = 1 SUBJ = 2 OBJ = 3 PARTS = PRED, SUBJ, OBJ ALL4 = CONTEXT, PRED, SUBJ, OBJ SYMBOL = 0 FORMULA = 1 LITERAL = 2 LITERAL_DT = 21 LITERAL_LANG = 22 ANONYMOUS = 3 XMLLITERAL = 25 Logic_NS = "http://www.w3.org/2000/10/swap/log#" NODE_MERGE_URI = Logic_NS + "is" # Pseudo-property indicating node merging forSomeSym = Logic_NS + "forSome" forAllSym = Logic_NS + "forAll" RDF_type_URI = "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" RDF_NS_URI = "http://www.w3.org/1999/02/22-rdf-syntax-ns#" OWL_NS = "http://www.w3.org/2002/07/owl#" DAML_sameAs_URI = OWL_NS + "sameAs" parsesTo_URI = Logic_NS + "parsesTo" RDF_spec = "http://www.w3.org/TR/REC-rdf-syntax/" List_NS = RDF_NS_URI # From 20030808 _Old_Logic_NS = "http://www.w3.org/2000/10/swap/log.n3#" N3_first = (SYMBOL, List_NS + "first") N3_rest = (SYMBOL, List_NS + "rest") N3_li = (SYMBOL, List_NS + "li") N3_nil = (SYMBOL, List_NS + "nil") N3_List = (SYMBOL, List_NS + "List") N3_Empty = (SYMBOL, List_NS + "Empty") runNamespaceValue = None def runNamespace(): "Return a URI suitable as a namespace for run-local objects" # @@@ include hostname (privacy?) (hash it?) global runNamespaceValue if runNamespaceValue is None: runNamespaceValue = join(base(), _unique_id()) + '#' return runNamespaceValue nextu = 0 def uniqueURI(): "A unique URI" global nextu nextu += 1 # return runNamespace() + "u_" + `nextu` return runNamespace() + "u_" + str(nextu) tracking = False chatty_flag = 50 # from why import BecauseOfData, becauseSubexpression def BecauseOfData(*args, **kargs): # print args, kargs pass def becauseSubexpression(*args, **kargs): # print args, kargs pass N3_forSome_URI = forSomeSym N3_forAll_URI = forAllSym # Magic resources we know about ADDED_HASH = "#" # Stop where we use this in case we want to remove it! # This is the hash on namespace URIs RDF_type = (SYMBOL, RDF_type_URI) DAML_sameAs = (SYMBOL, DAML_sameAs_URI) LOG_implies_URI = "http://www.w3.org/2000/10/swap/log#implies" BOOLEAN_DATATYPE = _XSD_PFX + "boolean" DECIMAL_DATATYPE = _XSD_PFX + "decimal" DOUBLE_DATATYPE = _XSD_PFX + "double" FLOAT_DATATYPE = _XSD_PFX + "float" INTEGER_DATATYPE = _XSD_PFX + "integer" option_noregen = 0 # If set, do not regenerate genids on output # @@ I18n - the notname chars need extending for well known unicode non-text # characters. The XML spec switched to assuming unknown things were name # characaters. # _namechars = string.lowercase + string.uppercase + string.digits + '_-' _notQNameChars = \ "\t\r\n !\"#$&'()*,+/;<=>?@[\\]^`{|}~" # else valid qname :-/ _notKeywordsChars = _notQNameChars + "." _notNameChars = _notQNameChars + ":" # Assume anything else valid name :-/ _rdfns = 'http://www.w3.org/1999/02/22-rdf-syntax-ns#' hexChars = 'ABCDEFabcdef0123456789' escapeChars = "(_~.-!$&'()*+,;=/?#@%)" # valid for \ escapes in localnames def unicodeExpand(m): try: return unichr(int(m.group(1), 16)) except: raise Exception("Invalid unicode code point: " + m.group(1)) if py3compat.narrow_build: def unicodeExpand(m): try: return unichr(int(m.group(1), 16)) except ValueError: warnings.warn( 'Encountered a unicode char > 0xFFFF in a narrow python build. ' 'Trying to degrade gracefully, but this can cause problems ' 'later when working with the string:\n%s' % m.group(0)) return codecs.decode(m.group(0), 'unicode_escape') unicodeEscape4 = re.compile( r'\\u([0-9a-fA-F]{4})') unicodeEscape8 = re.compile( r'\\U([0-9a-fA-F]{8})') N3CommentCharacter = "#" # For unix script # ! compatabilty ########################################## Parse string to sink # # Regular expressions: eol = re.compile( r'[ \t]*(#[^\n]*)?\r?\n') # end of line, poss. w/comment eof = re.compile( r'[ \t]*(#[^\n]*)?$') # end of file, poss. w/comment ws = re.compile(r'[ \t]*') # Whitespace not including NL signed_integer = re.compile(r'[-+]?[0-9]+') # integer integer_syntax = re.compile(r'[-+]?[0-9]+') decimal_syntax = re.compile(r'[-+]?[0-9]*\.[0-9]+') exponent_syntax = re.compile(r'[-+]?(?:[0-9]+\.[0-9]*(?:e|E)[-+]?[0-9]+|'+ r'\.[0-9](?:e|E)[-+]?[0-9]+|'+ r'[0-9]+(?:e|E)[-+]?[0-9]+)') digitstring = re.compile(r'[0-9]+') # Unsigned integer interesting = re.compile(r"""[\\\r\n\"\']""") langcode = re.compile(r'[a-zA-Z0-9]+(-[a-zA-Z0-9]+)*') class SinkParser: def __init__(self, store, openFormula=None, thisDoc="", baseURI=None, genPrefix="", why=None, turtle=False): """ note: namespace names should *not* end in # ; the # will get added during qname processing """ self._bindings = {} if thisDoc != "": assert ':' in thisDoc, "Document URI not absolute: <%s>" % thisDoc self._bindings[""] = thisDoc + "#" # default self._store = store if genPrefix: store.setGenPrefix(genPrefix) # pass it on self._thisDoc = thisDoc self.lines = 0 # for error handling self.startOfLine = 0 # For calculating character number self._genPrefix = genPrefix self.keywords = ['a', 'this', 'bind', 'has', 'is', 'of', 'true', 'false'] self.keywordsSet = 0 # Then only can others be considerd qnames self._anonymousNodes = {} # Dict of anon nodes already declared ln: Term self._variables = {} self._parentVariables = {} self._reason = why # Why the parser was asked to parse this self.turtle = turtle # raise exception when encountering N3 extensions # Turtle allows single or double quotes around strings, whereas N3 # only allows double quotes. self.string_delimiters = ('"', "'") if turtle else ('"',) self._reason2 = None # Why these triples # was: diag.tracking if tracking: self._reason2 = BecauseOfData( store.newSymbol(thisDoc), because=self._reason) if baseURI: self._baseURI = baseURI else: if thisDoc: self._baseURI = thisDoc else: self._baseURI = None assert not self._baseURI or ':' in self._baseURI if not self._genPrefix: if self._thisDoc: self._genPrefix = self._thisDoc + "#_g" else: self._genPrefix = uniqueURI() if openFormula is None: if self._thisDoc: self._formula = store.newFormula(thisDoc + "#_formula") else: self._formula = store.newFormula() else: self._formula = openFormula self._context = self._formula self._parentContext = None def here(self, i): """String generated from position in file This is for repeatability when refering people to bnodes in a document. This has diagnostic uses less formally, as it should point one to which bnode the arbitrary identifier actually is. It gives the line and character number of the '[' charcacter or path character which introduced the blank node. The first blank node is boringly _L1C1. It used to be used only for tracking, but for tests in general it makes the canonical ordering of bnodes repeatable.""" return "%s_L%iC%i" % (self._genPrefix, self.lines, i - self.startOfLine + 1) def formula(self): return self._formula def loadStream(self, stream): return self.loadBuf(stream.read()) # Not ideal def loadBuf(self, buf): """Parses a buffer and returns its top level formula""" self.startDoc() self.feed(buf) return self.endDoc() # self._formula def feed(self, octets): """Feed an octet stream tothe parser if BadSyntax is raised, the string passed in the exception object is the remainder after any statements have been parsed. So if there is more data to feed to the parser, it should be straightforward to recover.""" if not isinstance(octets, unicode): s = octets.decode('utf-8') # NB already decoded, so \ufeff if len(s) > 0 and s[0] == codecs.BOM_UTF8.decode('utf-8'): s = s[1:] else: s = octets i = 0 while i >= 0: j = self.skipSpace(s, i) if j < 0: return i = self.directiveOrStatement(s, j) if i < 0: #print("# next char: %s" % s[j]) self.BadSyntax(s, j, "expected directive or statement") def directiveOrStatement(self, argstr, h): i = self.skipSpace(argstr, h) if i < 0: return i # EOF if self.turtle: j = self.sparqlDirective(argstr, i) if j >= 0: return j j = self.directive(argstr, i) if j >= 0: return self.checkDot(argstr, j) j = self.statement(argstr, i) if j >= 0: return self.checkDot(argstr, j) return j # @@I18N # _namechars = string.lowercase + string.uppercase + string.digits + '_-' def tok(self, tok, argstr, i, colon=False): """Check for keyword. Space must have been stripped on entry and we must not be at end of file. if colon, then keyword followed by colon is ok (@prefix:<blah> is ok, rdf:type shortcut a must be followed by ws) """ assert tok[0] not in _notNameChars # not for punctuation if argstr[i:i + 1] == "@": i = i + 1 else: if tok not in self.keywords: return -1 # No, this has neither keywords declaration nor "@" if (argstr[i:i + len(tok)] == tok and ( argstr[i + len(tok)] in _notKeywordsChars) or (colon and argstr[i+len(tok)] == ':')): i = i + len(tok) return i else: return -1 def sparqlTok(self, tok, argstr, i): """Check for SPARQL keyword. Space must have been stripped on entry and we must not be at end of file. Case insensitive and not preceeded by @ """ assert tok[0] not in _notNameChars # not for punctuation if (argstr[i:i + len(tok)].lower() == tok.lower() and (argstr[i + len(tok)] in _notQNameChars)): i = i + len(tok) return i else: return -1 def directive(self, argstr, i): j = self.skipSpace(argstr, i) if j < 0: return j # eof res = [] j = self.tok('bind', argstr, i) # implied "#". Obsolete. if j > 0: self.BadSyntax(argstr, i, "keyword bind is obsolete: use @prefix") j = self.tok('keywords', argstr, i) if j > 0: if self.turtle: self.BadSyntax(argstr, i, "Found 'keywords' when in Turtle mode.") i = self.commaSeparatedList(argstr, j, res, self.bareWord) if i < 0: self.BadSyntax(argstr, i, "'@keywords' needs comma separated list of words") self.setKeywords(res[:]) return i j = self.tok('forAll', argstr, i) if j > 0: if self.turtle: self.BadSyntax(argstr, i, "Found 'forAll' when in Turtle mode.") i = self.commaSeparatedList(argstr, j, res, self.uri_ref2) if i < 0: self.BadSyntax(argstr, i, "Bad variable list after @forAll") for x in res: # self._context.declareUniversal(x) if x not in self._variables or x in self._parentVariables: self._variables[x] = self._context.newUniversal(x) return i j = self.tok('forSome', argstr, i) if j > 0: if self.turtle: self.BadSyntax(argstr, i, "Found 'forSome' when in Turtle mode.") i = self. commaSeparatedList(argstr, j, res, self.uri_ref2) if i < 0: self.BadSyntax(argstr, i, "Bad variable list after @forSome") for x in res: self._context.declareExistential(x) return i j = self.tok('prefix', argstr, i, colon=True) # no implied "#" if j >= 0: t = [] i = self.qname(argstr, j, t) if i < 0: self.BadSyntax(argstr, j, "expected qname after @prefix") j = self.uri_ref2(argstr, i, t) if j < 0: self.BadSyntax(argstr, i, "expected <uriref> after @prefix _qname_") ns = self.uriOf(t[1]) if self._baseURI: ns = join(self._baseURI, ns) elif ":" not in ns: self.BadSyntax(argstr, j, "With no base URI, cannot use " + "relative URI in @prefix <" + ns + ">") assert ':' in ns # must be absolute self._bindings[t[0][0]] = ns self.bind(t[0][0], hexify(ns)) return j j = self.tok('base', argstr, i) # Added 2007/7/7 if j >= 0: t = [] i = self.uri_ref2(argstr, j, t) if i < 0: self.BadSyntax(argstr, j, "expected <uri> after @base ") ns = self.uriOf(t[0]) if self._baseURI: ns = join(self._baseURI, ns) else: self.BadSyntax(argstr, j, "With no previous base URI, cannot use " + "relative URI in @base <" + ns + ">") assert ':' in ns # must be absolute self._baseURI = ns return i return -1 # Not a directive, could be something else. def sparqlDirective(self, argstr, i): """ turtle and trig support BASE/PREFIX without @ and without terminating . """ j = self.skipSpace(argstr, i) if j < 0: return j # eof j = self.sparqlTok('PREFIX', argstr, i) if j >= 0: t = [] i = self.qname(argstr, j, t) if i < 0: self.BadSyntax(argstr, j, "expected qname after @prefix") j = self.uri_ref2(argstr, i, t) if j < 0: self.BadSyntax(argstr, i, "expected <uriref> after @prefix _qname_") ns = self.uriOf(t[1]) if self._baseURI: ns = join(self._baseURI, ns) elif ":" not in ns: self.BadSyntax(argstr, j, "With no base URI, cannot use " + "relative URI in @prefix <" + ns + ">") assert ':' in ns # must be absolute self._bindings[t[0][0]] = ns self.bind(t[0][0], hexify(ns)) return j j = self.sparqlTok('BASE', argstr, i) if j >= 0: t = [] i = self.uri_ref2(argstr, j, t) if i < 0: self.BadSyntax(argstr, j, "expected <uri> after @base ") ns = self.uriOf(t[0]) if self._baseURI: ns = join(self._baseURI, ns) else: self.BadSyntax(argstr, j, "With no previous base URI, cannot use " + "relative URI in @base <" + ns + ">") assert ':' in ns # must be absolute self._baseURI = ns return i return -1 # Not a directive, could be something else. def bind(self, qn, uri): assert isinstance( uri, types.StringType), "Any unicode must be %x-encoded already" if qn == "": self._store.setDefaultNamespace(uri) else: self._store.bind(qn, uri) def setKeywords(self, k): "Takes a list of strings" if k is None: self.keywordsSet = 0 else: self.keywords = k self.keywordsSet = 1 def startDoc(self): # was: self._store.startDoc() self._store.startDoc(self._formula) def endDoc(self): """Signal end of document and stop parsing. returns formula""" self._store.endDoc(self._formula) # don't canonicalize yet return self._formula def makeStatement(self, quadruple): # $$$$$$$$$$$$$$$$$$$$$ # print "# Parser output: ", `quadruple` self._store.makeStatement(quadruple, why=self._reason2) def statement(self, argstr, i): r = [] i = self.object( argstr, i, r) # Allow literal for subject - extends RDF if i < 0: return i j = self.property_list(argstr, i, r[0]) if j < 0: self.BadSyntax( argstr, i, "expected propertylist") return j def subject(self, argstr, i, res): return self.item(argstr, i, res) def verb(self, argstr, i, res): """ has _prop_ is _prop_ of a = _prop_ >- prop -> <- prop -< _operator_""" j = self.skipSpace(argstr, i) if j < 0: return j # eof r = [] j = self.tok('has', argstr, i) if j >= 0: if self.turtle: self.BadSyntax(argstr, i, "Found 'has' keyword in Turtle mode") i = self.prop(argstr, j, r) if i < 0: self.BadSyntax(argstr, j, "expected property after 'has'") res.append(('->', r[0])) return i j = self.tok('is', argstr, i) if j >= 0: if self.turtle: self.BadSyntax(argstr, i, "Found 'is' keyword in Turtle mode") i = self.prop(argstr, j, r) if i < 0: self.BadSyntax(argstr, j, "expected <property> after 'is'") j = self.skipSpace(argstr, i) if j < 0: self.BadSyntax(argstr, i, "End of file found, expected property after 'is'") i = j j = self.tok('of', argstr, i) if j < 0: self.BadSyntax(argstr, i, "expected 'of' after 'is' <prop>") res.append(('<-', r[0])) return j j = self.tok('a', argstr, i) if j >= 0: res.append(('->', RDF_type)) return j if argstr[i:i + 2] == "<=": if self.turtle: self.BadSyntax(argstr, i, "Found '<=' in Turtle mode. ") res.append(('<-', self._store.newSymbol(Logic_NS + "implies"))) return i + 2 if argstr[i:i + 1] == "=": if self.turtle: self.BadSyntax(argstr, i, "Found '=' in Turtle mode") if argstr[i + 1:i + 2] == ">": res.append(('->', self._store.newSymbol(Logic_NS + "implies"))) return i + 2 res.append(('->', DAML_sameAs)) return i + 1 if argstr[i:i + 2] == ":=": if self.turtle: self.BadSyntax(argstr, i, "Found ':=' in Turtle mode") # patch file relates two formulae, uses this @@ really? res.append(('->', Logic_NS + "becomes")) return i + 2 j = self.prop(argstr, i, r) if j >= 0: res.append(('->', r[0])) return j if argstr[i:i + 2] == ">-" or argstr[i:i + 2] == "<-": self.BadSyntax(argstr, j, ">- ... -> syntax is obsolete.") return -1 def prop(self, argstr, i, res): return self.item(argstr, i, res) def item(self, argstr, i, res): return self.path(argstr, i, res) def blankNode(self, uri=None): return self._store.newBlankNode(self._context, uri, why=self._reason2) def path(self, argstr, i, res): """Parse the path production. """ j = self.nodeOrLiteral(argstr, i, res) if j < 0: return j # nope while argstr[j:j + 1] in "!^": # no spaces, must follow exactly (?) ch = argstr[j:j + 1] subj = res.pop() obj = self.blankNode(uri=self.here(j)) j = self.node(argstr, j + 1, res) if j < 0: self.BadSyntax(argstr, j, "EOF found in middle of path syntax") pred = res.pop() if ch == "^": # Reverse traverse self.makeStatement((self._context, pred, obj, subj)) else: self.makeStatement((self._context, pred, subj, obj)) res.append(obj) return j def anonymousNode(self, ln): """Remember or generate a term for one of these _: anonymous nodes""" term = self._anonymousNodes.get(ln, None) if term is not None: return term term = self._store.newBlankNode(self._context, why=self._reason2) self._anonymousNodes[ln] = term return term def node(self, argstr, i, res, subjectAlready=None): """Parse the <node> production. Space is now skipped once at the beginning instead of in multipe calls to self.skipSpace(). """ subj = subjectAlready j = self.skipSpace(argstr, i) if j < 0: return j # eof i = j ch = argstr[i:i + 1] # Quick 1-character checks first: if ch == "[": bnodeID = self.here(i) j = self.skipSpace(argstr, i + 1) if j < 0: self.BadSyntax(argstr, i, "EOF after '['") # Hack for "is" binding name to anon node if argstr[j:j + 1] == "=": if self.turtle: self.BadSyntax(argstr, j, "Found '[=' or '[ =' when in turtle mode.") i = j + 1 objs = [] j = self.objectList(argstr, i, objs) if j >= 0: subj = objs[0] if len(objs) > 1: for obj in objs: self.makeStatement((self._context, DAML_sameAs, subj, obj)) j = self.skipSpace(argstr, j) if j < 0: self.BadSyntax(argstr, i, "EOF when objectList expected after [ = ") if argstr[j:j + 1] == ";": j = j + 1 else: self.BadSyntax(argstr, i, "objectList expected after [= ") if subj is None: subj = self.blankNode(uri=bnodeID) i = self.property_list(argstr, j, subj) if i < 0: self.BadSyntax(argstr, j, "property_list expected") j = self.skipSpace(argstr, i) if j < 0: self.BadSyntax(argstr, i, "EOF when ']' expected after [ <propertyList>") if argstr[j:j + 1] != "]": self.BadSyntax(argstr, j, "']' expected") res.append(subj) return j + 1 if not self.turtle and ch == "{": # if self.turtle: # self.BadSyntax(argstr, i, # "found '{' while in Turtle mode, Formulas not supported!") ch2 = argstr[i + 1:i + 2] if ch2 == '$': # a set i += 1 j = i + 1 List = [] first_run = True while 1: i = self.skipSpace(argstr, j) if i < 0: self.BadSyntax(argstr, i, "needed '$}', found end.") if argstr[i:i + 2] == '$}': j = i + 2 break if not first_run: if argstr[i:i + 1] == ',': i += 1 else: self.BadSyntax( argstr, i, "expected: ','") else: first_run = False item = [] j = self.item( argstr, i, item) # @@@@@ should be path, was object if j < 0: self.BadSyntax(argstr, i, "expected item in set or '$}'") List.append(self._store.intern(item[0])) res.append(self._store.newSet(List, self._context)) return j else: # parse a formula j = i + 1 oldParentContext = self._parentContext self._parentContext = self._context parentAnonymousNodes = self._anonymousNodes grandParentVariables = self._parentVariables self._parentVariables = self._variables self._anonymousNodes = {} self._variables = self._variables.copy() reason2 = self._reason2 self._reason2 = becauseSubexpression if subj is None: subj = self._store.newFormula() self._context = subj while 1: i = self.skipSpace(argstr, j) if i < 0: self.BadSyntax( argstr, i, "needed '}', found end.") if argstr[i:i + 1] == "}": j = i + 1 break j = self.directiveOrStatement(argstr, i) if j < 0: self.BadSyntax( argstr, i, "expected statement or '}'") self._anonymousNodes = parentAnonymousNodes self._variables = self._parentVariables self._parentVariables = grandParentVariables self._context = self._parentContext self._reason2 = reason2 self._parentContext = oldParentContext res.append(subj.close()) # No use until closed return j if ch == "(": thing_type = self._store.newList ch2 = argstr[i + 1:i + 2] if ch2 == '$': thing_type = self._store.newSet i += 1 j = i + 1 List = [] while 1: i = self.skipSpace(argstr, j) if i < 0: self.BadSyntax( argstr, i, "needed ')', found end.") if argstr[i:i + 1] == ')': j = i + 1 break item = [] j = self.item( argstr, i, item) # @@@@@ should be path, was object if j < 0: self.BadSyntax(argstr, i, "expected item in list or ')'") List.append(self._store.intern(item[0])) res.append(thing_type(List, self._context)) return j j = self.tok('this', argstr, i) # This context if j >= 0: self.BadSyntax(argstr, i, "Keyword 'this' was ancient N3. Now use " + "@forSome and @forAll keywords.") # booleans j = self.tok('true', argstr, i) if j >= 0: res.append(True) return j j = self.tok('false', argstr, i) if j >= 0: res.append(False) return j if subj is None: # If this can be a named node, then check for a name. j = self.uri_ref2(argstr, i, res) if j >= 0: return j return -1 def property_list(self, argstr, i, subj): """Parse property list Leaves the terminating punctuation in the buffer """ while 1: while 1: # skip repeat ; j = self.skipSpace(argstr, i) if j < 0: self.BadSyntax(argstr, i, "EOF found when expected verb in property list") if argstr[j]!=';': break i = j+1 if argstr[j:j + 2] == ":-": if self.turtle: self.BadSyntax(argstr, j, "Found in ':-' in Turtle mode") i = j + 2 res = [] j = self.node(argstr, i, res, subj) if j < 0: self.BadSyntax(argstr, i, "bad {} or () or [] node after :- ") i = j continue i = j v = [] j = self.verb(argstr, i, v) if j <= 0: return i # void but valid objs = [] i = self.objectList(argstr, j, objs) if i < 0: self.BadSyntax(argstr, j, "objectList expected") for obj in objs: dira, sym = v[0] if dira == '->': self.makeStatement((self._context, sym, subj, obj)) else: self.makeStatement((self._context, sym, obj, subj)) j = self.skipSpace(argstr, i) if j < 0: self.BadSyntax(argstr, j, "EOF found in list of objects") if argstr[i:i + 1] != ";": return i i = i + 1 # skip semicolon and continue def commaSeparatedList(self, argstr, j, res, what): """return value: -1 bad syntax; >1 new position in argstr res has things found appended """ i = self.skipSpace(argstr, j) if i < 0: self.BadSyntax(argstr, i, "EOF found expecting comma sep list") if argstr[i] == ".": return j # empty list is OK i = what(argstr, i, res) if i < 0: return -1 while 1: j = self.skipSpace(argstr, i) if j < 0: return j # eof ch = argstr[j:j + 1] if ch != ",": if ch != ".": return -1 return j # Found but not swallowed "." i = what(argstr, j + 1, res) if i < 0: self.BadSyntax(argstr, i, "bad list content") def objectList(self, argstr, i, res): i = self.object(argstr, i, res) if i < 0: return -1 while 1: j = self.skipSpace(argstr, i) if j < 0: self.BadSyntax(argstr, j, "EOF found after object") if argstr[j:j + 1] != ",": return j # Found something else! i = self.object(argstr, j + 1, res) if i < 0: return i def checkDot(self, argstr, i): j = self.skipSpace(argstr, i) if j < 0: return j # eof if argstr[j:j + 1] == ".": return j + 1 # skip if argstr[j:j + 1] == "}": return j # don't skip it if argstr[j:j + 1] == "]": return j self.BadSyntax(argstr, j, "expected '.' or '}' or ']' at end of statement") def uri_ref2(self, argstr, i, res): """Generate uri from n3 representation. Note that the RDF convention of directly concatenating NS and local name is now used though I prefer inserting a '#' to make the namesapces look more like what XML folks expect. """ qn = [] j = self.qname(argstr, i, qn) if j >= 0: pfx, ln = qn[0] if pfx is None: assert 0, "not used?" ns = self._baseURI + ADDED_HASH else: try: ns = self._bindings[pfx] except KeyError: if pfx == "_": # Magic prefix 2001/05/30, can be changed res.append(self.anonymousNode(ln)) return j if not self.turtle and pfx == "": ns = join(self._baseURI or "", "#") else: self.BadSyntax(argstr, i, "Prefix \"%s:\" not bound" % (pfx)) symb = self._store.newSymbol(ns + ln) if symb in self._variables: res.append(self._variables[symb]) else: res.append(symb) # @@@ "#" CONVENTION return j i = self.skipSpace(argstr, i) if i < 0: return -1 if argstr[i] == "?": v = [] j = self.variable(argstr, i, v) if j > 0: # Forget varibles as a class, only in context. res.append(v[0]) return j return -1 elif argstr[i] == "<": i = i + 1 st = i while i < len(argstr): if argstr[i] == ">": uref = argstr[st:i] # the join should dealt with "": # expand unicode escapes uref = unicodeEscape8.sub(unicodeExpand, uref) uref = unicodeEscape4.sub(unicodeExpand, uref) if self._baseURI: uref = join(self._baseURI, uref) # was: uripath.join else: assert ":" in uref, \ "With no base URI, cannot deal with relative URIs" if argstr[i - 1:i] == "#" and not uref[-1:] == "#": uref = uref + \ "#" # She meant it! Weirdness in urlparse? symb = self._store.newSymbol(uref) if symb in self._variables: res.append(self._variables[symb]) else: res.append(symb) return i + 1 i = i + 1 self.BadSyntax(argstr, j, "unterminated URI reference") elif self.keywordsSet: v = [] j = self.bareWord(argstr, i, v) if j < 0: return -1 # Forget varibles as a class, only in context. if v[0] in self.keywords: self.BadSyntax(argstr, i, 'Keyword "%s" not allowed here.' % v[0]) res.append(self._store.newSymbol(self._bindings[""] + v[0])) return j else: return -1 def skipSpace(self, argstr, i): """Skip white space, newlines and comments. return -1 if EOF, else position of first non-ws character""" while 1: m = eol.match(argstr, i) if m is None: break self.lines = self.lines + 1 i = m.end() # Point to first character unmatched self.startOfLine = i m = ws.match(argstr, i) if m is not None: i = m.end() m = eof.match(argstr, i) if m is not None: return -1 return i def variable(self, argstr, i, res): """ ?abc -> variable(:abc) """ j = self.skipSpace(argstr, i) if j < 0: return -1 if argstr[j:j + 1] != "?": return -1 j = j + 1 i = j if argstr[j] in "0123456789-": self.BadSyntax(argstr, j, "Varible name can't start with '%s'" % argstr[j]) while i < len(argstr) and argstr[i] not in _notKeywordsChars: i = i + 1 if self._parentContext is None: varURI = self._store.newSymbol(self._baseURI + "#" + argstr[j:i]) if varURI not in self._variables: self._variables[varURI] = self._context.newUniversal( varURI, why=self._reason2) res.append(self._variables[varURI]) return i # @@ was: # self.BadSyntax(argstr, j, # "Can't use ?xxx syntax for variable in outermost level: %s" # % argstr[j-1:i]) varURI = self._store.newSymbol(self._baseURI + "#" + argstr[j:i]) if varURI not in self._parentVariables: self._parentVariables[varURI] = self._parentContext.newUniversal( varURI, why=self._reason2) res.append(self._parentVariables[varURI]) return i def bareWord(self, argstr, i, res): """ abc -> :abc """ j = self.skipSpace(argstr, i) if j < 0: return -1 if argstr[j] in "0123456789-" or argstr[j] in _notKeywordsChars: return -1 i = j while i < len(argstr) and argstr[i] not in _notKeywordsChars: i = i + 1 res.append(argstr[j:i]) return i def qname(self, argstr, i, res): """ xyz:def -> ('xyz', 'def') If not in keywords and keywordsSet: def -> ('', 'def') :def -> ('', 'def') """ i = self.skipSpace(argstr, i) if i < 0: return -1 c = argstr[i] if c in "0123456789-+.": return -1 if c not in _notNameChars: ln = c i = i + 1 while i < len(argstr): c = argstr[i] if c not in _notNameChars: ln = ln + c i = i + 1 else: break if argstr[i - 1] == ".": # qname cannot end with "." ln = ln[:-1] if not ln: return -1 i -= 1 else: # First character is non-alpha ln = '' # Was: None - TBL (why? useful?) if i < len(argstr) and argstr[i] == ':': pfx = ln # bnodes names have different rules if pfx == '_': allowedChars = _notNameChars else: allowedChars = _notQNameChars i = i + 1 lastslash = False # start = i # TODO first char . ln = '' while i < len(argstr): c = argstr[i] if not lastslash and c == '\\': lastslash = True i += 1 elif lastslash or c not in allowedChars: if lastslash: if c not in escapeChars: raise BadSyntax(self._thisDoc, self.line, argstr, i, "illegal escape "+c) elif c=='%': if argstr[i+1] not in hexChars or argstr[i+2] not in hexChars: raise BadSyntax(self._thisDoc, self.line, argstr, i, "illegal hex escape "+c) ln = ln + c i = i + 1 lastslash = False else: break if lastslash: raise BadSyntax( self._thisDoc, self.line, argstr, i, "qname cannot end with \\") if argstr[i-1]=='.': # localname cannot end in . ln = ln[:-1] if not ln: return -1 i -= 1 res.append((pfx, ln)) return i else: # delimiter was not ":" if ln and self.keywordsSet and ln not in self.keywords: res.append(('', ln)) return i return -1 def object(self, argstr, i, res): j = self.subject(argstr, i, res) if j >= 0: return j else: j = self.skipSpace(argstr, i) if j < 0: return -1 else: i = j if argstr[i] in self.string_delimiters: if argstr[i:i + 3] == argstr[i] * 3: delim = argstr[i] * 3 else: delim = argstr[i] i = i + len(delim) j, s = self.strconst(argstr, i, delim) res.append(self._store.newLiteral(s)) return j else: return -1 def nodeOrLiteral(self, argstr, i, res): j = self.node(argstr, i, res) startline = self.lines # Remember where for error messages if j >= 0: return j else: j = self.skipSpace(argstr, i) if j < 0: return -1 else: i = j ch = argstr[i] if ch in "-+0987654321.": m = exponent_syntax.match(argstr, i) if m: j = m.end() res.append(float(argstr[i:j])) return j m = decimal_syntax.match(argstr, i) if m: j = m.end() res.append(Decimal(argstr[i:j])) return j m = integer_syntax.match(argstr, i) if m: j = m.end() res.append(long(argstr[i:j])) return j # return -1 ## or fall through? if argstr[i] in self.string_delimiters: if argstr[i:i + 3] == argstr[i] * 3: delim = argstr[i] * 3 else: delim = argstr[i] i = i + len(delim) dt = None j, s = self.strconst(argstr, i, delim) lang = None if argstr[j:j + 1] == "@": # Language? m = langcode.match(argstr, j + 1) if m is None: raise BadSyntax( self._thisDoc, startline, argstr, i, "Bad language code syntax on string " + "literal, after @") i = m.end() lang = argstr[j + 1:i] j = i if argstr[j:j + 2] == "^^": res2 = [] j = self.uri_ref2(argstr, j + 2, res2) # Read datatype URI dt = res2[0] res.append(self._store.newLiteral(s, dt, lang)) return j else: return -1 def uriOf(self, sym): if isinstance(sym, types.TupleType): return sym[1] # old system for --pipe # return sym.uriref() # cwm api return sym def strconst(self, argstr, i, delim): """parse an N3 string constant delimited by delim. return index, val """ delim1 = delim[0] delim2, delim3, delim4, delim5 = delim1 * 2, delim1 * 3, delim1 * 4, delim1 * 5 j = i ustr = u"" # Empty unicode string startline = self.lines # Remember where for error messages while j < len(argstr): if argstr[j] == delim1: if delim == delim1: # done when delim is " or ' i = j + 1 return i, ustr if delim == delim3: # done when delim is """ or ''' and, respectively ... if argstr[j:j + 5] == delim5: # ... we have "" or '' before i = j + 5 ustr = ustr + delim2 return i, ustr if argstr[j:j + 4] == delim4: # ... we have " or ' before i = j + 4 ustr = ustr + delim1 return i, ustr if argstr[j:j + 3] == delim3: # current " or ' is part of delim i = j + 3 return i, ustr # we are inside of the string and current char is " or ' j = j + 1 ustr = ustr + delim1 continue m = interesting.search(argstr, j) # was argstr[j:]. # Note for pos param to work, MUST be compiled ... re bug? assert m, "Quote expected in string at ^ in %s^%s" % ( argstr[j - 20:j], argstr[j:j + 20]) # at least need a quote i = m.start() try: ustr = ustr + argstr[j:i] except UnicodeError: err = "" for c in argstr[j:i]: err = err + (" %02x" % ord(c)) streason = sys.exc_info()[1].__str__() raise BadSyntax( self._thisDoc, startline, argstr, j, "Unicode error appending characters" + " %s to string, because\n\t%s" % (err, streason)) # print "@@@ i = ",i, " j=",j, "m.end=", m.end() ch = argstr[i] if ch == delim1: j = i continue elif ch in ('"', "'") and ch != delim1: ustr = ustr + ch j = i + 1 continue elif ch in "\r\n": if delim == delim1: raise BadSyntax( self._thisDoc, startline, argstr, i, "newline found in string literal") self.lines = self.lines + 1 ustr = ustr + ch j = i + 1 self.startOfLine = j elif ch == "\\": j = i + 1 ch = argstr[j:j + 1] # Will be empty if string ends if not ch: raise BadSyntax( self._thisDoc, startline, argstr, i, "unterminated string literal (2)") k = 'abfrtvn\\"'.find(ch) if k >= 0: uch = '\a\b\f\r\t\v\n\\"'[k] ustr = ustr + uch j = j + 1 elif ch == "u": j, ch = self.uEscape(argstr, j + 1, startline) ustr = ustr + ch elif ch == "U": j, ch = self.UEscape(argstr, j + 1, startline) ustr = ustr + ch else: self.BadSyntax(argstr, i, "bad escape") self.BadSyntax(argstr, i, "unterminated string literal") def _unicodeEscape(self, argstr, i, startline, reg, n, prefix): if len(argstr)<i+n: raise BadSyntax( self._thisDoc, startline, argstr, i, "unterminated string literal(3)") try: return i+n, reg.sub(unicodeExpand, '\\'+prefix+argstr[i:i+n]) except: raise BadSyntax( self._thisDoc, startline, argstr, i, "bad string literal hex escape: "+argstr[i:i+n]) def uEscape(self, argstr, i, startline): return self._unicodeEscape(argstr, i, startline, unicodeEscape4, 4, 'u') def UEscape(self, argstr, i, startline): return self._unicodeEscape(argstr, i, startline, unicodeEscape8, 8, 'U') def BadSyntax(self, argstr, i, msg): raise BadSyntax(self._thisDoc, self.lines, argstr, i, msg) # If we are going to do operators then they should generate # [ is operator:plus of ( \1 \2 ) ] class BadSyntax(SyntaxError): def __init__(self, uri, lines, argstr, i, why): self._str = argstr.encode( 'utf-8') # Better go back to strings for errors self._i = i self._why = why self.lines = lines self._uri = uri def __str__(self): argstr = self._str i = self._i st = 0 if i > 60: pre = "..." st = i - 60 else: pre = "" if len(argstr) - i > 60: post = "..." else: post = "" return 'at line %i of <%s>:\nBad syntax (%s) at ^ in:\n"%s%s^%s%s"' \ % (self.lines + 1, self._uri, self._why, pre, argstr[st:i], argstr[i:i + 60], post) @property def message(self): return str(self) ############################################################################### class Formula(object): number = 0 def __init__(self, parent): self.uuid = uuid4().hex self.counter = 0 Formula.number += 1 self.number = Formula.number self.existentials = {} self.universals = {} self.quotedgraph = QuotedGraph( store=parent.store, identifier=self.id()) def __str__(self): return '_:Formula%s' % self.number def id(self): return BNode('_:Formula%s' % self.number) def newBlankNode(self, uri=None, why=None): if uri is None: self.counter += 1 bn = BNode('f%sb%s' % (self.uuid, self.counter)) else: bn = BNode(uri.split('#').pop().replace('_', 'b')) return bn def newUniversal(self, uri, why=None): return Variable(uri.split('#').pop()) def declareExistential(self, x): self.existentials[x] = self.newBlankNode() def close(self): return self.quotedgraph r_hibyte = re.compile(r'([\x80-\xff])') class RDFSink(object): def __init__(self, graph): self.rootFormula = None self.counter = 0 self.graph = graph def newFormula(self): assert self.graph.store.formula_aware f = Formula(self.graph) return f def newGraph(self, identifier): return Graph(self.graph.store, identifier) def newSymbol(self, *args): return URIRef(args[0]) def newBlankNode(self, arg=None, uri=None, why=None): if isinstance(arg, Formula): return arg.newBlankNode(uri) elif isinstance(arg, Graph) or arg is None: self.counter += 1 bn = BNode('n' + str(self.counter)) else: bn = BNode(str(arg[0]).split('#').pop().replace('_', 'b')) return bn def newLiteral(self, s, dt, lang): if dt: return Literal(s, datatype=dt) else: return Literal(s, lang=lang) def newList(self, n, f): if not n: return self.newSymbol( 'http://www.w3.org/1999/02/22-rdf-syntax-ns#nil' ) a = self.newBlankNode(f) first = self.newSymbol( 'http://www.w3.org/1999/02/22-rdf-syntax-ns#first' ) rest = self.newSymbol( 'http://www.w3.org/1999/02/22-rdf-syntax-ns#rest') self.makeStatement((f, first, a, n[0])) self.makeStatement((f, rest, a, self.newList(n[1:], f))) return a def newSet(self, *args): return set(args) def setDefaultNamespace(self, *args): return ':'.join(repr(n) for n in args) def makeStatement(self, quadruple, why=None): f, p, s, o = quadruple if hasattr(p, 'formula'): raise Exception("Formula used as predicate") s = self.normalise(f, s) p = self.normalise(f, p) o = self.normalise(f, o) if f == self.rootFormula: # print s, p, o, '.' self.graph.add((s, p, o)) elif isinstance(f, Formula): f.quotedgraph.add((s, p, o)) else: f.add((s,p,o)) # return str(quadruple) def normalise(self, f, n): if isinstance(n, tuple): return URIRef(unicode(n[1])) if isinstance(n, bool): s = Literal(str(n).lower(), datatype=BOOLEAN_DATATYPE) return s if isinstance(n, int) or isinstance(n, long): s = Literal(unicode(n), datatype=INTEGER_DATATYPE) return s if isinstance(n, Decimal): value = str(n) if value == '-0': value = '0' s = Literal(value, datatype=DECIMAL_DATATYPE) return s if isinstance(n, float): s = Literal(str(n), datatype=DOUBLE_DATATYPE) return s if isinstance(f, Formula): if n in f.existentials: return f.existentials[n] # if isinstance(n, Var): # if f.universals.has_key(n): # return f.universals[n] # f.universals[n] = f.newBlankNode() # return f.universals[n] return n def intern(self, something): return something def bind(self, pfx, uri): pass # print pfx, ':', uri def startDoc(self, formula): self.rootFormula = formula def endDoc(self, formula): pass ################################################### # # Utilities # @py3compat.format_doctest_out def hexify(ustr): """Use URL encoding to return an ASCII string corresponding to the given UTF8 string >>> hexify("http://example/a b") %(b)s'http://example/a%%20b' """ # s1=ustr.encode('utf-8') s = "" for ch in ustr: # .encode('utf-8'): if ord(ch) > 126 or ord(ch) < 33: ch = "%%%02X" % ord(ch) else: ch = "%c" % ord(ch) s = s + ch return b(s) class TurtleParser(Parser): """ An RDFLib parser for Turtle See http://www.w3.org/TR/turtle/ """ def __init__(self): pass def parse(self, source, graph, encoding="utf-8", turtle=True): if encoding not in [None, "utf-8"]: raise Exception( ("N3/Turtle files are always utf-8 encoded, ", "I was passed: %s") % encoding) sink = RDFSink(graph) baseURI = graph.absolutize( source.getPublicId() or source.getSystemId() or "") p = SinkParser(sink, baseURI=baseURI, turtle=turtle) p.loadStream(source.getByteStream()) for prefix, namespace in p._bindings.items(): graph.bind(prefix, namespace) class N3Parser(TurtleParser): """ An RDFLib parser for Notation3 See http://www.w3.org/DesignIssues/Notation3.html """ def __init__(self): pass def parse(self, source, graph, encoding="utf-8"): # we're currently being handed a Graph, not a ConjunctiveGraph assert graph.store.context_aware # is this implied by formula_aware assert graph.store.formula_aware conj_graph = ConjunctiveGraph(store=graph.store) conj_graph.default_context = graph # TODO: CG __init__ should have a # default_context arg # TODO: update N3Processor so that it can use conj_graph as the sink conj_graph.namespace_manager = graph.namespace_manager TurtleParser.parse(self, source, conj_graph, encoding, turtle=False) def _test(): import doctest doctest.testmod() # if __name__ == '__main__': # _test() def main(): g = ConjunctiveGraph() sink = RDFSink(g) base_uri = 'file://' + os.path.join(os.getcwd(), sys.argv[1]) p = SinkParser(sink, baseURI=base_uri) p._bindings[''] = p._baseURI + '#' p.startDoc() f = open(sys.argv[1], 'rb') rdbytes = f.read() f.close() p.feed(rdbytes) p.endDoc() for t in g.quads((None, None, None)): print t if __name__ == '__main__': main() # ends
mr-niels-christensen/finna-be-octo-archer
briefme/src/main/rdflib/plugins/parsers/notation3.py
Python
gpl-2.0
61,070
0.002554
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for the private `_RestructuredDataset` transformation.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.data.experimental.ops import batching from tensorflow.python.data.kernel_tests import test_base from tensorflow.python.data.ops import dataset_ops from tensorflow.python.data.util import nest from tensorflow.python.framework import dtypes from tensorflow.python.framework import test_util from tensorflow.python.ops import array_ops from tensorflow.python.platform import test # TODO(b/117581999): Add eager specific test. class RestructuredDatasetTest(test_base.DatasetTestBase): @test_util.run_deprecated_v1 def testRestructureDataset(self): components = (array_ops.placeholder(dtypes.int32), (array_ops.placeholder(dtypes.int32, shape=[None]), array_ops.placeholder(dtypes.int32, shape=[20, 30]))) dataset = dataset_ops.Dataset.from_tensors(components) i32 = dtypes.int32 test_cases = [((i32, i32, i32), None), (((i32, i32), i32), None), ((i32, i32, i32), (None, None, None)), ((i32, i32, i32), ([17], [17], [20, 30]))] for new_types, new_shape_lists in test_cases: # pylint: disable=protected-access new = batching._RestructuredDataset(dataset, new_types, new_shape_lists) # pylint: enable=protected-access self.assertEqual(new_types, new.output_types) if new_shape_lists is not None: for expected_shape_list, shape in zip( nest.flatten(new_shape_lists), nest.flatten(new.output_shapes)): if expected_shape_list is None: self.assertIs(None, shape.ndims) else: self.assertEqual(expected_shape_list, shape.as_list()) fail_cases = [((i32, dtypes.int64, i32), None), ((i32, i32, i32, i32), None), ((i32, i32, i32), ((None, None), None)), ((i32, i32, i32), (None, None, None, None)), ((i32, i32, i32), (None, [None], [21, 30]))] for new_types, new_shape_lists in fail_cases: with self.assertRaises(ValueError): # pylint: disable=protected-access new = batching._RestructuredDataset(dataset, new_types, new_shape_lists) # pylint: enable=protected-access if __name__ == "__main__": test.main()
jendap/tensorflow
tensorflow/python/data/experimental/kernel_tests/restructured_dataset_test.py
Python
apache-2.0
3,111
0.004179
import widgy from widgy.models import Content from widgy.utils import update_context, render_to_string @widgy.register class Adaptive(Content): def render(self, context): template = 'widgy/adaptive/render.html' size = context.get('device_info') if size['type'] == 'tablet': template = 'widgy/adaptive/tablet.html' elif size['type'] == 'phone': template = 'widgy/adaptive/phone.html' with update_context(context, {'self': self}): return render_to_string(template, context)
zmetcalf/fusionbox-demo-project
adaptive/models.py
Python
gpl-3.0
558
0
# # The Python Imaging Library. # $Id: GifImagePlugin.py 2134 2004-10-06 08:55:20Z fredrik $ # # GIF file handling # # History: # 1995-09-01 fl Created # 1996-12-14 fl Added interlace support # 1996-12-30 fl Added animation support # 1997-01-05 fl Added write support, fixed local colour map bug # 1997-02-23 fl Make sure to load raster data in getdata() # 1997-07-05 fl Support external decoder (0.4) # 1998-07-09 fl Handle all modes when saving (0.5) # 1998-07-15 fl Renamed offset attribute to avoid name clash # 2001-04-16 fl Added rewind support (seek to frame 0) (0.6) # 2001-04-17 fl Added palette optimization (0.7) # 2002-06-06 fl Added transparency support for save (0.8) # 2004-02-24 fl Disable interlacing for small images # # Copyright (c) 1997-2004 by Secret Labs AB # Copyright (c) 1995-2004 by Fredrik Lundh # # See the README file for information on usage and redistribution. # __version__ = "0.9" import Image, ImageFile, ImagePalette # -------------------------------------------------------------------- # Helpers def i16(c): return ord(c[0]) + (ord(c[1])<<8) def o16(i): return chr(i&255) + chr(i>>8&255) # -------------------------------------------------------------------- # Identify/read GIF files def _accept(prefix): return prefix[:6] in ["GIF87a", "GIF89a"] ## # Image plugin for GIF images. This plugin supports both GIF87 and # GIF89 images. class GifImageFile(ImageFile.ImageFile): format = "GIF" format_description = "Compuserve GIF" global_palette = None def data(self): s = self.fp.read(1) if s and ord(s): return self.fp.read(ord(s)) return None def _open(self): # Screen s = self.fp.read(13) if s[:6] not in ["GIF87a", "GIF89a"]: raise SyntaxError, "not a GIF file" self.info["version"] = s[:6] self.size = i16(s[6:]), i16(s[8:]) self.tile = [] flags = ord(s[10]) bits = (flags & 7) + 1 if flags & 128: # get global palette self.info["background"] = ord(s[11]) # check if palette contains colour indices p = self.fp.read(3<<bits) for i in range(0, len(p), 3): if not (chr(i/3) == p[i] == p[i+1] == p[i+2]): p = ImagePalette.raw("RGB", p) self.global_palette = self.palette = p break self.__fp = self.fp # FIXME: hack self.__rewind = self.fp.tell() self.seek(0) # get ready to read first frame def seek(self, frame): if frame == 0: # rewind self.__offset = 0 self.dispose = None self.__frame = -1 self.__fp.seek(self.__rewind) if frame != self.__frame + 1: raise ValueError, "cannot seek to frame %d" % frame self.__frame = frame self.tile = [] self.fp = self.__fp if self.__offset: # backup to last frame self.fp.seek(self.__offset) while self.data(): pass self.__offset = 0 if self.dispose: self.im = self.dispose self.dispose = None self.palette = self.global_palette while 1: s = self.fp.read(1) if not s or s == ";": break elif s == "!": # # extensions # s = self.fp.read(1) block = self.data() if ord(s) == 249: # # graphic control extension # flags = ord(block[0]) if flags & 1: self.info["transparency"] = ord(block[3]) self.info["duration"] = i16(block[1:3]) * 10 try: # disposal methods if flags & 8: # replace with background colour self.dispose = Image.core.fill("P", self.size, self.info["background"]) elif flags & 16: # replace with previous contents self.dispose = self.im.copy() except (AttributeError, KeyError): pass elif ord(s) == 255: # # application extension # self.info["extension"] = block, self.fp.tell() if block[:11] == "NETSCAPE2.0": self.info["loop"] = 1 # FIXME while self.data(): pass elif s == ",": # # local image # s = self.fp.read(9) # extent x0, y0 = i16(s[0:]), i16(s[2:]) x1, y1 = x0 + i16(s[4:]), y0 + i16(s[6:]) flags = ord(s[8]) interlace = (flags & 64) != 0 if flags & 128: bits = (flags & 7) + 1 self.palette =\ ImagePalette.raw("RGB", self.fp.read(3<<bits)) # image data bits = ord(self.fp.read(1)) self.__offset = self.fp.tell() self.tile = [("gif", (x0, y0, x1, y1), self.__offset, (bits, interlace))] break else: pass # raise IOError, "illegal GIF tag `%x`" % ord(s) if not self.tile: # self.__fp = None raise EOFError, "no more images in GIF file" self.mode = "L" if self.palette: self.mode = "P" def tell(self): return self.__frame # -------------------------------------------------------------------- # Write GIF files try: import _imaging_gif except ImportError: _imaging_gif = None RAWMODE = { "1": "L", "L": "L", "P": "P", } def _save(im, fp, filename): if _imaging_gif: # call external driver try: _imaging_gif.save(im, fp, filename) return except IOError: pass # write uncompressed file try: rawmode = RAWMODE[im.mode] imOut = im except KeyError: # convert on the fly (EXPERIMENTAL -- I'm not sure PIL # should automatically convert images on save...) if Image.getmodebase(im.mode) == "RGB": imOut = im.convert("P") rawmode = "P" else: imOut = im.convert("L") rawmode = "L" # header for s in getheader(imOut, im.encoderinfo): fp.write(s) flags = 0 try: interlace = im.encoderinfo["interlace"] except KeyError: interlace = 1 # workaround for @PIL153 if min(im.size) < 16: interlace = 0 if interlace: flags = flags | 64 try: transparency = im.encoderinfo["transparency"] except KeyError: pass else: # transparency extension block fp.write("!" + chr(249) + # extension intro chr(4) + # length chr(1) + # transparency info present o16(0) + # duration chr(int(transparency)) # transparency index + chr(0)) # local image header fp.write("," + o16(0) + o16(0) + # bounding box o16(im.size[0]) + # size o16(im.size[1]) + chr(flags) + # flags chr(8)) # bits imOut.encoderconfig = (8, interlace) ImageFile._save(imOut, fp, [("gif", (0,0)+im.size, 0, rawmode)]) fp.write("\0") # end of image data fp.write(";") # end of file try: fp.flush() except: pass def _save_netpbm(im, fp, filename): # # If you need real GIF compression and/or RGB quantization, you # can use the external NETPBM/PBMPLUS utilities. See comments # below for information on how to enable this. import os file = im._dump() if im.mode != "RGB": os.system("ppmtogif %s >%s" % (file, filename)) else: os.system("ppmquant 256 %s | ppmtogif >%s" % (file, filename)) try: os.unlink(file) except: pass # -------------------------------------------------------------------- # GIF utilities def getheader(im, info=None): """Return a list of strings representing a GIF header""" optimize = info and info.get("optimize", 0) s = [ "GIF87a" + # magic o16(im.size[0]) + # size o16(im.size[1]) + chr(7 + 128) + # flags: bits + palette chr(0) + # background chr(0) # reserved/aspect ] if optimize: # minimize color palette i = 0 maxcolor = 0 for count in im.histogram(): if count: maxcolor = i i = i + 1 else: maxcolor = 256 # global palette if im.mode == "P": # colour palette s.append(im.im.getpalette("RGB")[:maxcolor*3]) else: # greyscale for i in range(maxcolor): s.append(chr(i) * 3) return s def getdata(im, offset = (0, 0), **params): """Return a list of strings representing this image. The first string is a local image header, the rest contains encoded image data.""" class collector: data = [] def write(self, data): self.data.append(data) im.load() # make sure raster data is available fp = collector() try: im.encoderinfo = params # local image header fp.write("," + o16(offset[0]) + # offset o16(offset[1]) + o16(im.size[0]) + # size o16(im.size[1]) + chr(0) + # flags chr(8)) # bits ImageFile._save(im, fp, [("gif", (0,0)+im.size, 0, RAWMODE[im.mode])]) fp.write("\0") # end of image data finally: del im.encoderinfo return fp.data # -------------------------------------------------------------------- # Registry Image.register_open(GifImageFile.format, GifImageFile, _accept) Image.register_save(GifImageFile.format, _save) Image.register_extension(GifImageFile.format, ".gif") Image.register_mime(GifImageFile.format, "image/gif") # # Uncomment the following line if you wish to use NETPBM/PBMPLUS # instead of the built-in "uncompressed" GIF encoder # Image.register_save(GifImageFile.format, _save_netpbm)
tkaitchuck/nupic
external/linux64/lib/python2.6/site-packages/PIL/GifImagePlugin.py
Python
gpl-3.0
10,996
0.003183
class Solution(object): def generateParenthesis(self, n): if n < 1: return [] parens=[] def generate(p, left, right): if left: generate(p + '(', left-1, right) if right > left: generate(p + ')', left, right-1) if not right: # Base Condition. parens.append(p), return parens output = generate('', n, n) return output n=3 obj = Solution() output = obj.generateParenthesis(n) print(output)
ravyg/algorithms
python/22_generateParentheses.py
Python
gpl-3.0
529
0.009452
''' Name: Weather Application Author: Redder04 Extra Requirements: Unirest, Mashape Key Unirest: http://unirest.io/ Mashape: https://www.mashape.com/ Description: This application will connect to a Mashape Weather API. The user will supply a City or State (I might add GPS Capabilites later) and send the request. The API will return JSON data with the weather data. Github: https://github.com/Redder/Weather-App-Python P.S: I tried to heavily code my project, any questions feel free to post on Github. P.S*2: Any "clear" commands can be changed to "cls" for windows ''' #Import all the libraries we need import unirest import json import os #Assign X to 1 for our loop (We can use a While True Loop too, and break in the end, but I used x =1 loop and was to lazy to change it, AS long as it works, BUT! Is the while true loop more efficient?) x = 1 #Prints Welcome Screen os.system('clear') print('================================') print('Welcome to the Weather App!') print('Press Enter to Continue!') print('================================') raw_input('') #While Loops begins, You can use While True loop too while x == 1: #UserValue equals What the user inputs, the city or state UserValue = raw_input('Please enter a City or State: ') #Replace Space with a plus sign(So we can pass it onto the url) UserValue = UserValue.replace(' ','+' ) #Make web request to the url(with url value attached) with the Mashape KEY and the content type response = unirest.get("https://george-vustrey-weather.p.mashape.com/api.php?location=" + UserValue, headers={ "X-Mashape-Key": "Mashape Key goes Here!!!", "Accept": "application/json" } ) #Assigned the JSON Data we recieved with the varible data data = json.loads(response.raw_body) #Try to extract data and apply to varibles try: DOW1 = data[0]["day_of_week"] DOW2 = data[1]["day_of_week"] DOW3 = data[2]["day_of_week"] DOW4 = data[3]["day_of_week"] DOW5 = data[4]["day_of_week"] DOW6 = data[5]["day_of_week"] DOW7 = data[6]["day_of_week"] H1 = data[0]["high"] H2 = data[1]["high"] H3 = data[2]["high"] H4 = data[3]["high"] H5 = data[4]["high"] H6 = data[5]["high"] H7 = data[6]["high"] L1 = data[0]["low"] L2 = data[1]["low"] L3 = data[2]["low"] L4 = data[3]["low"] L5 = data[4]["low"] L6 = data[5]["low"] L7 = data[6]["low"] C1 = data[0]["condition"] C2 = data[1]["condition"] C3 = data[2]["condition"] C4 = data[3]["condition"] C5 = data[4]["condition"] C6 = data[5]["condition"] C7 = data[6]["condition"] print('\n') print('================================') print(DOW1) print('Condition: ' + C1) print('High: ' + H1) print('Low: ' + L1) print('================================') print('\n') print('================================') print(DOW2) print('Condition: ' + C2) print('High: ' + H2) print('Low: ' + L2) print('================================') print('\n') print('================================') print(DOW3) print('Condition: ' + C3) print('High: ' + H3) print('Low: ' + L3) print('================================') print('\n') print('================================') print(DOW4) print('Condition: ' + C4) print('High: ' + H4) print('Low: ' + L4) print('================================') print('\n') print('================================') print(DOW5) print('Condition: ' + C5) print('High: ' + H5) print('Low: ' + L5) print('================================') print('\n') print('================================') print(DOW6) print('Condition: ' + C6) print('High: ' + H6) print('Low: ' + L6) print('================================') print('\n') print('================================') print(DOW7) print('Condition: ' + C7) print('High: ' + H7) print('Low: ' + L7) print('================================') print('\n') raw_input('') pass #If the data does not exist, it may be due to the user inputting something thats not a city or state, OR any error with the API except KeyError, e: #Clear Screen and show error message that we get from the API os.system('clear') print('Error ' + str(data[0]['code']) + ':' + ' ' + data[0]['message']) raw_input('') #Clear Screen and ask user if they want to quit or perform a search again os.system('clear') print('Would you like to search again? or Quit?') print('1: Search again') print('2: Quit') ans = input('') #If the quit, then x = 2 which breaks out of the loop, if Search again then do nothing and the Loop will restart if ans == 2: x = 2
Redder/Weather-App-Python
Weather.py
Python
mit
4,583
0.031857
""" Oracle database backend for Django. Requires cx_Oracle: http://cx-oracle.sourceforge.net/ """ from __future__ import unicode_literals import decimal import re import platform import sys import warnings def _setup_environment(environ): # Cygwin requires some special voodoo to set the environment variables # properly so that Oracle will see them. if platform.system().upper().startswith('CYGWIN'): try: import ctypes except ImportError as e: from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured("Error loading ctypes: %s; " "the Oracle backend requires ctypes to " "operate correctly under Cygwin." % e) kernel32 = ctypes.CDLL('kernel32') for name, value in environ: kernel32.SetEnvironmentVariableA(name, value) else: import os os.environ.update(environ) _setup_environment([ # Oracle takes client-side character set encoding from the environment. ('NLS_LANG', '.UTF8'), # This prevents unicode from getting mangled by getting encoded into the # potentially non-unicode database character set. ('ORA_NCHAR_LITERAL_REPLACE', 'TRUE'), ]) try: import cx_Oracle as Database except ImportError as e: from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured("Error loading cx_Oracle module: %s" % e) try: import pytz except ImportError: pytz = None from django.db import utils from django.db.backends import * from django.db.backends.oracle.client import DatabaseClient from django.db.backends.oracle.creation import DatabaseCreation from django.db.backends.oracle.introspection import DatabaseIntrospection from django.db.backends.oracle.schema import DatabaseSchemaEditor from django.utils.encoding import force_bytes, force_text DatabaseError = Database.DatabaseError IntegrityError = Database.IntegrityError # Check whether cx_Oracle was compiled with the WITH_UNICODE option if cx_Oracle is pre-5.1. This will # also be True for cx_Oracle 5.1 and in Python 3.0. See #19606 if int(Database.version.split('.', 1)[0]) >= 5 and \ (int(Database.version.split('.', 2)[1]) >= 1 or not hasattr(Database, 'UNICODE')): convert_unicode = force_text else: convert_unicode = force_bytes class DatabaseFeatures(BaseDatabaseFeatures): empty_fetchmany_value = () needs_datetime_string_cast = False interprets_empty_strings_as_nulls = True uses_savepoints = True has_select_for_update = True has_select_for_update_nowait = True can_return_id_from_insert = True allow_sliced_subqueries = False supports_subqueries_in_group_by = False supports_transactions = True supports_timezones = False has_zoneinfo_database = pytz is not None supports_bitwise_or = False can_defer_constraint_checks = True ignores_nulls_in_unique_constraints = False has_bulk_insert = True supports_tablespaces = True supports_sequence_reset = False supports_combined_alters = False max_index_name_length = 30 nulls_order_largest = True requires_literal_defaults = True connection_persists_old_columns = True nulls_order_largest = True class DatabaseOperations(BaseDatabaseOperations): compiler_module = "django.db.backends.oracle.compiler" def autoinc_sql(self, table, column): # To simulate auto-incrementing primary keys in Oracle, we have to # create a sequence and a trigger. sq_name = self._get_sequence_name(table) tr_name = self._get_trigger_name(table) tbl_name = self.quote_name(table) col_name = self.quote_name(column) sequence_sql = """ DECLARE i INTEGER; BEGIN SELECT COUNT(*) INTO i FROM USER_CATALOG WHERE TABLE_NAME = '%(sq_name)s' AND TABLE_TYPE = 'SEQUENCE'; IF i = 0 THEN EXECUTE IMMEDIATE 'CREATE SEQUENCE "%(sq_name)s"'; END IF; END; /""" % locals() trigger_sql = """ CREATE OR REPLACE TRIGGER "%(tr_name)s" BEFORE INSERT ON %(tbl_name)s FOR EACH ROW WHEN (new.%(col_name)s IS NULL) BEGIN SELECT "%(sq_name)s".nextval INTO :new.%(col_name)s FROM dual; END; /""" % locals() return sequence_sql, trigger_sql def cache_key_culling_sql(self): return """ SELECT cache_key FROM (SELECT cache_key, rank() OVER (ORDER BY cache_key) AS rank FROM %s) WHERE rank = %%s + 1 """ def date_extract_sql(self, lookup_type, field_name): if lookup_type == 'week_day': # TO_CHAR(field, 'D') returns an integer from 1-7, where 1=Sunday. return "TO_CHAR(%s, 'D')" % field_name else: # http://docs.oracle.com/cd/B19306_01/server.102/b14200/functions050.htm return "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name) def date_interval_sql(self, sql, connector, timedelta): """ Implements the interval functionality for expressions format for Oracle: (datefield + INTERVAL '3 00:03:20.000000' DAY(1) TO SECOND(6)) """ minutes, seconds = divmod(timedelta.seconds, 60) hours, minutes = divmod(minutes, 60) days = str(timedelta.days) day_precision = len(days) fmt = "(%s %s INTERVAL '%s %02d:%02d:%02d.%06d' DAY(%d) TO SECOND(6))" return fmt % (sql, connector, days, hours, minutes, seconds, timedelta.microseconds, day_precision) def date_trunc_sql(self, lookup_type, field_name): # http://docs.oracle.com/cd/B19306_01/server.102/b14200/functions230.htm#i1002084 if lookup_type in ('year', 'month'): return "TRUNC(%s, '%s')" % (field_name, lookup_type.upper()) else: return "TRUNC(%s)" % field_name # Oracle crashes with "ORA-03113: end-of-file on communication channel" # if the time zone name is passed in parameter. Use interpolation instead. # https://groups.google.com/forum/#!msg/django-developers/zwQju7hbG78/9l934yelwfsJ # This regexp matches all time zone names from the zoneinfo database. _tzname_re = re.compile(r'^[\w/:+-]+$') def _convert_field_to_tz(self, field_name, tzname): if not self._tzname_re.match(tzname): raise ValueError("Invalid time zone name: %s" % tzname) # Convert from UTC to local time, returning TIMESTAMP WITH TIME ZONE. result = "(FROM_TZ(%s, '0:00') AT TIME ZONE '%s')" % (field_name, tzname) # Extracting from a TIMESTAMP WITH TIME ZONE ignore the time zone. # Convert to a DATETIME, which is called DATE by Oracle. There's no # built-in function to do that; the easiest is to go through a string. result = "TO_CHAR(%s, 'YYYY-MM-DD HH24:MI:SS')" % result result = "TO_DATE(%s, 'YYYY-MM-DD HH24:MI:SS')" % result # Re-convert to a TIMESTAMP because EXTRACT only handles the date part # on DATE values, even though they actually store the time part. return "CAST(%s AS TIMESTAMP)" % result def datetime_extract_sql(self, lookup_type, field_name, tzname): if settings.USE_TZ: field_name = self._convert_field_to_tz(field_name, tzname) if lookup_type == 'week_day': # TO_CHAR(field, 'D') returns an integer from 1-7, where 1=Sunday. sql = "TO_CHAR(%s, 'D')" % field_name else: # http://docs.oracle.com/cd/B19306_01/server.102/b14200/functions050.htm sql = "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name) return sql, [] def datetime_trunc_sql(self, lookup_type, field_name, tzname): if settings.USE_TZ: field_name = self._convert_field_to_tz(field_name, tzname) # http://docs.oracle.com/cd/B19306_01/server.102/b14200/functions230.htm#i1002084 if lookup_type in ('year', 'month'): sql = "TRUNC(%s, '%s')" % (field_name, lookup_type.upper()) elif lookup_type == 'day': sql = "TRUNC(%s)" % field_name elif lookup_type == 'hour': sql = "TRUNC(%s, 'HH24')" % field_name elif lookup_type == 'minute': sql = "TRUNC(%s, 'MI')" % field_name else: sql = field_name # Cast to DATE removes sub-second precision. return sql, [] def convert_values(self, value, field): if isinstance(value, Database.LOB): value = value.read() if field and field.get_internal_type() == 'TextField': value = force_text(value) # Oracle stores empty strings as null. We need to undo this in # order to adhere to the Django convention of using the empty # string instead of null, but only if the field accepts the # empty string. if value is None and field and field.empty_strings_allowed: value = '' # Convert 1 or 0 to True or False elif value in (1, 0) and field and field.get_internal_type() in ('BooleanField', 'NullBooleanField'): value = bool(value) # Force floats to the correct type elif value is not None and field and field.get_internal_type() == 'FloatField': value = float(value) # Convert floats to decimals elif value is not None and field and field.get_internal_type() == 'DecimalField': value = util.typecast_decimal(field.format_number(value)) # cx_Oracle always returns datetime.datetime objects for # DATE and TIMESTAMP columns, but Django wants to see a # python datetime.date, .time, or .datetime. We use the type # of the Field to determine which to cast to, but it's not # always available. # As a workaround, we cast to date if all the time-related # values are 0, or to time if the date is 1/1/1900. # This could be cleaned a bit by adding a method to the Field # classes to normalize values from the database (the to_python # method is used for validation and isn't what we want here). elif isinstance(value, Database.Timestamp): if field and field.get_internal_type() == 'DateTimeField': pass elif field and field.get_internal_type() == 'DateField': value = value.date() elif field and field.get_internal_type() == 'TimeField' or (value.year == 1900 and value.month == value.day == 1): value = value.time() elif value.hour == value.minute == value.second == value.microsecond == 0: value = value.date() return value def deferrable_sql(self): return " DEFERRABLE INITIALLY DEFERRED" def drop_sequence_sql(self, table): return "DROP SEQUENCE %s;" % self.quote_name(self._get_sequence_name(table)) def fetch_returned_insert_id(self, cursor): return int(cursor._insert_id_var.getvalue()) def field_cast_sql(self, db_type, internal_type): if db_type and db_type.endswith('LOB'): return "DBMS_LOB.SUBSTR(%s)" else: return "%s" def last_executed_query(self, cursor, sql, params): # http://cx-oracle.sourceforge.net/html/cursor.html#Cursor.statement # The DB API definition does not define this attribute. statement = cursor.statement if statement and six.PY2 and not isinstance(statement, unicode): statement = statement.decode('utf-8') # Unlike Psycopg's `query` and MySQLdb`'s `_last_executed`, CxOracle's # `statement` doesn't contain the query parameters. refs #20010. return super(DatabaseOperations, self).last_executed_query(cursor, statement, params) def last_insert_id(self, cursor, table_name, pk_name): sq_name = self._get_sequence_name(table_name) cursor.execute('SELECT "%s".currval FROM dual' % sq_name) return cursor.fetchone()[0] def lookup_cast(self, lookup_type): if lookup_type in ('iexact', 'icontains', 'istartswith', 'iendswith'): return "UPPER(%s)" return "%s" def max_in_list_size(self): return 1000 def max_name_length(self): return 30 def prep_for_iexact_query(self, x): return x def process_clob(self, value): if value is None: return '' return force_text(value.read()) def quote_name(self, name): # SQL92 requires delimited (quoted) names to be case-sensitive. When # not quoted, Oracle has case-insensitive behavior for identifiers, but # always defaults to uppercase. # We simplify things by making Oracle identifiers always uppercase. if not name.startswith('"') and not name.endswith('"'): name = '"%s"' % util.truncate_name(name.upper(), self.max_name_length()) # Oracle puts the query text into a (query % args) construct, so % signs # in names need to be escaped. The '%%' will be collapsed back to '%' at # that stage so we aren't really making the name longer here. name = name.replace('%', '%%') return name.upper() def quote_parameter(self, value): if isinstance(value, (datetime.date, datetime.time, datetime.datetime)): return "'%s'" % value elif isinstance(value, six.string_types): return repr(value) elif isinstance(value, bool): return "1" if value else "0" else: return str(value) def random_function_sql(self): return "DBMS_RANDOM.RANDOM" def regex_lookup_9(self, lookup_type): raise NotImplementedError("Regexes are not supported in Oracle before version 10g.") def regex_lookup_10(self, lookup_type): if lookup_type == 'regex': match_option = "'c'" else: match_option = "'i'" return 'REGEXP_LIKE(%%s, %%s, %s)' % match_option def regex_lookup(self, lookup_type): # If regex_lookup is called before it's been initialized, then create # a cursor to initialize it and recur. self.connection.cursor() return self.connection.ops.regex_lookup(lookup_type) def return_insert_id(self): return "RETURNING %s INTO %%s", (InsertIdVar(),) def savepoint_create_sql(self, sid): return convert_unicode("SAVEPOINT " + self.quote_name(sid)) def savepoint_rollback_sql(self, sid): return convert_unicode("ROLLBACK TO SAVEPOINT " + self.quote_name(sid)) def sql_flush(self, style, tables, sequences, allow_cascade=False): # Return a list of 'TRUNCATE x;', 'TRUNCATE y;', # 'TRUNCATE z;'... style SQL statements if tables: # Oracle does support TRUNCATE, but it seems to get us into # FK referential trouble, whereas DELETE FROM table works. sql = ['%s %s %s;' % ( style.SQL_KEYWORD('DELETE'), style.SQL_KEYWORD('FROM'), style.SQL_FIELD(self.quote_name(table)) ) for table in tables] # Since we've just deleted all the rows, running our sequence # ALTER code will reset the sequence to 0. sql.extend(self.sequence_reset_by_name_sql(style, sequences)) return sql else: return [] def sequence_reset_by_name_sql(self, style, sequences): sql = [] for sequence_info in sequences: sequence_name = self._get_sequence_name(sequence_info['table']) table_name = self.quote_name(sequence_info['table']) column_name = self.quote_name(sequence_info['column'] or 'id') query = _get_sequence_reset_sql() % {'sequence': sequence_name, 'table': table_name, 'column': column_name} sql.append(query) return sql def sequence_reset_sql(self, style, model_list): from django.db import models output = [] query = _get_sequence_reset_sql() for model in model_list: for f in model._meta.local_fields: if isinstance(f, models.AutoField): table_name = self.quote_name(model._meta.db_table) sequence_name = self._get_sequence_name(model._meta.db_table) column_name = self.quote_name(f.column) output.append(query % {'sequence': sequence_name, 'table': table_name, 'column': column_name}) # Only one AutoField is allowed per model, so don't # continue to loop break for f in model._meta.many_to_many: if not f.rel.through: table_name = self.quote_name(f.m2m_db_table()) sequence_name = self._get_sequence_name(f.m2m_db_table()) column_name = self.quote_name('id') output.append(query % {'sequence': sequence_name, 'table': table_name, 'column': column_name}) return output def start_transaction_sql(self): return '' def tablespace_sql(self, tablespace, inline=False): if inline: return "USING INDEX TABLESPACE %s" % self.quote_name(tablespace) else: return "TABLESPACE %s" % self.quote_name(tablespace) def value_to_db_datetime(self, value): if value is None: return None # Oracle doesn't support tz-aware datetimes if timezone.is_aware(value): if settings.USE_TZ: value = value.astimezone(timezone.utc).replace(tzinfo=None) else: raise ValueError("Oracle backend does not support timezone-aware datetimes when USE_TZ is False.") return six.text_type(value) def value_to_db_time(self, value): if value is None: return None if isinstance(value, six.string_types): return datetime.datetime.strptime(value, '%H:%M:%S') # Oracle doesn't support tz-aware times if timezone.is_aware(value): raise ValueError("Oracle backend does not support timezone-aware times.") return datetime.datetime(1900, 1, 1, value.hour, value.minute, value.second, value.microsecond) def year_lookup_bounds_for_date_field(self, value): first = '%s-01-01' second = '%s-12-31' return [first % value, second % value] def year_lookup_bounds_for_datetime_field(self, value): # The default implementation uses datetime objects for the bounds. # This must be overridden here, to use a formatted date (string) as # 'second' instead -- cx_Oracle chops the fraction-of-second part # off of datetime objects, leaving almost an entire second out of # the year under the default implementation. bounds = super(DatabaseOperations, self).year_lookup_bounds_for_datetime_field(value) if settings.USE_TZ: bounds = [b.astimezone(timezone.utc).replace(tzinfo=None) for b in bounds] return [b.isoformat(b' ') for b in bounds] def combine_expression(self, connector, sub_expressions): "Oracle requires special cases for %% and & operators in query expressions" if connector == '%%': return 'MOD(%s)' % ','.join(sub_expressions) elif connector == '&': return 'BITAND(%s)' % ','.join(sub_expressions) elif connector == '|': raise NotImplementedError("Bit-wise or is not supported in Oracle.") return super(DatabaseOperations, self).combine_expression(connector, sub_expressions) def _get_sequence_name(self, table): name_length = self.max_name_length() - 3 return '%s_SQ' % util.truncate_name(table, name_length).upper() def _get_trigger_name(self, table): name_length = self.max_name_length() - 3 return '%s_TR' % util.truncate_name(table, name_length).upper() def bulk_insert_sql(self, fields, num_values): items_sql = "SELECT %s FROM DUAL" % ", ".join(["%s"] * len(fields)) return " UNION ALL ".join([items_sql] * num_values) class _UninitializedOperatorsDescriptor(object): def __get__(self, instance, owner): # If connection.operators is looked up before a connection has been # created, transparently initialize connection.operators to avert an # AttributeError. if instance is None: raise AttributeError("operators not available as class attribute") # Creating a cursor will initialize the operators. instance.cursor().close() return instance.__dict__['operators'] class DatabaseWrapper(BaseDatabaseWrapper): vendor = 'oracle' operators = _UninitializedOperatorsDescriptor() _standard_operators = { 'exact': '= %s', 'iexact': '= UPPER(%s)', 'contains': "LIKE TRANSLATE(%s USING NCHAR_CS) ESCAPE TRANSLATE('\\' USING NCHAR_CS)", 'icontains': "LIKE UPPER(TRANSLATE(%s USING NCHAR_CS)) ESCAPE TRANSLATE('\\' USING NCHAR_CS)", 'gt': '> %s', 'gte': '>= %s', 'lt': '< %s', 'lte': '<= %s', 'startswith': "LIKE TRANSLATE(%s USING NCHAR_CS) ESCAPE TRANSLATE('\\' USING NCHAR_CS)", 'endswith': "LIKE TRANSLATE(%s USING NCHAR_CS) ESCAPE TRANSLATE('\\' USING NCHAR_CS)", 'istartswith': "LIKE UPPER(TRANSLATE(%s USING NCHAR_CS)) ESCAPE TRANSLATE('\\' USING NCHAR_CS)", 'iendswith': "LIKE UPPER(TRANSLATE(%s USING NCHAR_CS)) ESCAPE TRANSLATE('\\' USING NCHAR_CS)", } _likec_operators = _standard_operators.copy() _likec_operators.update({ 'contains': "LIKEC %s ESCAPE '\\'", 'icontains': "LIKEC UPPER(%s) ESCAPE '\\'", 'startswith': "LIKEC %s ESCAPE '\\'", 'endswith': "LIKEC %s ESCAPE '\\'", 'istartswith': "LIKEC UPPER(%s) ESCAPE '\\'", 'iendswith': "LIKEC UPPER(%s) ESCAPE '\\'", }) Database = Database def __init__(self, *args, **kwargs): super(DatabaseWrapper, self).__init__(*args, **kwargs) self.features = DatabaseFeatures(self) use_returning_into = self.settings_dict["OPTIONS"].get('use_returning_into', True) self.features.can_return_id_from_insert = use_returning_into self.ops = DatabaseOperations(self) self.client = DatabaseClient(self) self.creation = DatabaseCreation(self) self.introspection = DatabaseIntrospection(self) self.validation = BaseDatabaseValidation(self) def _connect_string(self): settings_dict = self.settings_dict if not settings_dict['HOST'].strip(): settings_dict['HOST'] = 'localhost' if settings_dict['PORT'].strip(): dsn = Database.makedsn(settings_dict['HOST'], int(settings_dict['PORT']), settings_dict['NAME']) else: dsn = settings_dict['NAME'] return "%s/%s@%s" % (settings_dict['USER'], settings_dict['PASSWORD'], dsn) def get_connection_params(self): conn_params = self.settings_dict['OPTIONS'].copy() if 'use_returning_into' in conn_params: del conn_params['use_returning_into'] return conn_params def get_new_connection(self, conn_params): conn_string = convert_unicode(self._connect_string()) return Database.connect(conn_string, **conn_params) def init_connection_state(self): cursor = self.create_cursor() # Set the territory first. The territory overrides NLS_DATE_FORMAT # and NLS_TIMESTAMP_FORMAT to the territory default. When all of # these are set in single statement it isn't clear what is supposed # to happen. cursor.execute("ALTER SESSION SET NLS_TERRITORY = 'AMERICA'") # Set oracle date to ansi date format. This only needs to execute # once when we create a new connection. We also set the Territory # to 'AMERICA' which forces Sunday to evaluate to a '1' in # TO_CHAR(). cursor.execute( "ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS'" " NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS.FF'" + (" TIME_ZONE = 'UTC'" if settings.USE_TZ else '')) cursor.close() if 'operators' not in self.__dict__: # Ticket #14149: Check whether our LIKE implementation will # work for this connection or we need to fall back on LIKEC. # This check is performed only once per DatabaseWrapper # instance per thread, since subsequent connections will use # the same settings. cursor = self.create_cursor() try: cursor.execute("SELECT 1 FROM DUAL WHERE DUMMY %s" % self._standard_operators['contains'], ['X']) except DatabaseError: self.operators = self._likec_operators else: self.operators = self._standard_operators cursor.close() # There's no way for the DatabaseOperations class to know the # currently active Oracle version, so we do some setups here. # TODO: Multi-db support will need a better solution (a way to # communicate the current version). if self.oracle_version is not None and self.oracle_version <= 9: self.ops.regex_lookup = self.ops.regex_lookup_9 else: self.ops.regex_lookup = self.ops.regex_lookup_10 try: self.connection.stmtcachesize = 20 except: # Django docs specify cx_Oracle version 4.3.1 or higher, but # stmtcachesize is available only in 4.3.2 and up. pass def create_cursor(self): return FormatStylePlaceholderCursor(self.connection) def _commit(self): if self.connection is not None: try: return self.connection.commit() except Database.DatabaseError as e: # cx_Oracle 5.0.4 raises a cx_Oracle.DatabaseError exception # with the following attributes and values: # code = 2091 # message = 'ORA-02091: transaction rolled back # 'ORA-02291: integrity constraint (TEST_DJANGOTEST.SYS # _C00102056) violated - parent key not found' # We convert that particular case to our IntegrityError exception x = e.args[0] if hasattr(x, 'code') and hasattr(x, 'message') \ and x.code == 2091 and 'ORA-02291' in x.message: six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2]) raise def schema_editor(self, *args, **kwargs): "Returns a new instance of this backend's SchemaEditor" return DatabaseSchemaEditor(self, *args, **kwargs) # Oracle doesn't support savepoint commits. Ignore them. def _savepoint_commit(self, sid): pass def _set_autocommit(self, autocommit): self.connection.autocommit = autocommit def check_constraints(self, table_names=None): """ To check constraints, we set constraints to immediate. Then, when, we're done we must ensure they are returned to deferred. """ self.cursor().execute('SET CONSTRAINTS ALL IMMEDIATE') self.cursor().execute('SET CONSTRAINTS ALL DEFERRED') def is_usable(self): try: if hasattr(self.connection, 'ping'): # Oracle 10g R2 and higher self.connection.ping() else: # Use a cx_Oracle cursor directly, bypassing Django's utilities. self.connection.cursor().execute("SELECT 1 FROM DUAL") except DatabaseError: return False else: return True @cached_property def oracle_version(self): with self.temporary_connection(): version = self.connection.version try: return int(version.split('.')[0]) except ValueError: return None class OracleParam(object): """ Wrapper object for formatting parameters for Oracle. If the string representation of the value is large enough (greater than 4000 characters) the input size needs to be set as CLOB. Alternatively, if the parameter has an `input_size` attribute, then the value of the `input_size` attribute will be used instead. Otherwise, no input size will be set for the parameter when executing the query. """ def __init__(self, param, cursor, strings_only=False): # With raw SQL queries, datetimes can reach this function # without being converted by DateTimeField.get_db_prep_value. if settings.USE_TZ and isinstance(param, datetime.datetime): if timezone.is_naive(param): warnings.warn("Oracle received a naive datetime (%s)" " while time zone support is active." % param, RuntimeWarning) default_timezone = timezone.get_default_timezone() param = timezone.make_aware(param, default_timezone) param = param.astimezone(timezone.utc).replace(tzinfo=None) # Oracle doesn't recognize True and False correctly in Python 3. # The conversion done below works both in 2 and 3. if param is True: param = "1" elif param is False: param = "0" if hasattr(param, 'bind_parameter'): self.force_bytes = param.bind_parameter(cursor) elif isinstance(param, six.memoryview): self.force_bytes = param else: self.force_bytes = convert_unicode(param, cursor.charset, strings_only) if hasattr(param, 'input_size'): # If parameter has `input_size` attribute, use that. self.input_size = param.input_size elif isinstance(param, six.string_types) and len(param) > 4000: # Mark any string param greater than 4000 characters as a CLOB. self.input_size = Database.CLOB else: self.input_size = None class VariableWrapper(object): """ An adapter class for cursor variables that prevents the wrapped object from being converted into a string when used to instanciate an OracleParam. This can be used generally for any other object that should be passed into Cursor.execute as-is. """ def __init__(self, var): self.var = var def bind_parameter(self, cursor): return self.var def __getattr__(self, key): return getattr(self.var, key) def __setattr__(self, key, value): if key == 'var': self.__dict__[key] = value else: setattr(self.var, key, value) class InsertIdVar(object): """ A late-binding cursor variable that can be passed to Cursor.execute as a parameter, in order to receive the id of the row created by an insert statement. """ def bind_parameter(self, cursor): param = cursor.cursor.var(Database.NUMBER) cursor._insert_id_var = param return param class FormatStylePlaceholderCursor(object): """ Django uses "format" (e.g. '%s') style placeholders, but Oracle uses ":var" style. This fixes it -- but note that if you want to use a literal "%s" in a query, you'll need to use "%%s". We also do automatic conversion between Unicode on the Python side and UTF-8 -- for talking to Oracle -- in here. """ charset = 'utf-8' def __init__(self, connection): self.cursor = connection.cursor() # Necessary to retrieve decimal values without rounding error. self.cursor.numbersAsStrings = True # Default arraysize of 1 is highly sub-optimal. self.cursor.arraysize = 100 def _format_params(self, params): try: return dict((k, OracleParam(v, self, True)) for k, v in params.items()) except AttributeError: return tuple(OracleParam(p, self, True) for p in params) def _guess_input_sizes(self, params_list): # Try dict handling; if that fails, treat as sequence if hasattr(params_list[0], 'keys'): sizes = {} for params in params_list: for k, value in params.items(): if value.input_size: sizes[k] = value.input_size self.setinputsizes(**sizes) else: # It's not a list of dicts; it's a list of sequences sizes = [None] * len(params_list[0]) for params in params_list: for i, value in enumerate(params): if value.input_size: sizes[i] = value.input_size self.setinputsizes(*sizes) def _param_generator(self, params): # Try dict handling; if that fails, treat as sequence if hasattr(params, 'items'): return dict((k, v.force_bytes) for k, v in params.items()) else: return [p.force_bytes for p in params] def _fix_for_params(self, query, params): # cx_Oracle wants no trailing ';' for SQL statements. For PL/SQL, it # it does want a trailing ';' but not a trailing '/'. However, these # characters must be included in the original query in case the query # is being passed to SQL*Plus. if query.endswith(';') or query.endswith('/'): query = query[:-1] if params is None: params = [] query = convert_unicode(query, self.charset) elif hasattr(params, 'keys'): # Handle params as dict args = dict((k, ":%s" % k) for k in params.keys()) query = convert_unicode(query % args, self.charset) else: # Handle params as sequence args = [(':arg%d' % i) for i in range(len(params))] query = convert_unicode(query % tuple(args), self.charset) return query, self._format_params(params) def execute(self, query, params=None): query, params = self._fix_for_params(query, params) self._guess_input_sizes([params]) try: return self.cursor.execute(query, self._param_generator(params)) except Database.DatabaseError as e: # cx_Oracle <= 4.4.0 wrongly raises a DatabaseError for ORA-01400. if hasattr(e.args[0], 'code') and e.args[0].code == 1400 and not isinstance(e, IntegrityError): six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2]) raise def executemany(self, query, params=None): if not params: # No params given, nothing to do return None # uniform treatment for sequences and iterables params_iter = iter(params) query, firstparams = self._fix_for_params(query, next(params_iter)) # we build a list of formatted params; as we're going to traverse it # more than once, we can't make it lazy by using a generator formatted = [firstparams] + [self._format_params(p) for p in params_iter] self._guess_input_sizes(formatted) try: return self.cursor.executemany(query, [self._param_generator(p) for p in formatted]) except Database.DatabaseError as e: # cx_Oracle <= 4.4.0 wrongly raises a DatabaseError for ORA-01400. if hasattr(e.args[0], 'code') and e.args[0].code == 1400 and not isinstance(e, IntegrityError): six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2]) raise def fetchone(self): row = self.cursor.fetchone() if row is None: return row return _rowfactory(row, self.cursor) def fetchmany(self, size=None): if size is None: size = self.arraysize return tuple(_rowfactory(r, self.cursor) for r in self.cursor.fetchmany(size)) def fetchall(self): return tuple(_rowfactory(r, self.cursor) for r in self.cursor.fetchall()) def var(self, *args): return VariableWrapper(self.cursor.var(*args)) def arrayvar(self, *args): return VariableWrapper(self.cursor.arrayvar(*args)) def __getattr__(self, attr): if attr in self.__dict__: return self.__dict__[attr] else: return getattr(self.cursor, attr) def __iter__(self): return CursorIterator(self.cursor) class CursorIterator(six.Iterator): """Cursor iterator wrapper that invokes our custom row factory.""" def __init__(self, cursor): self.cursor = cursor self.iter = iter(cursor) def __iter__(self): return self def __next__(self): return _rowfactory(next(self.iter), self.cursor) def _rowfactory(row, cursor): # Cast numeric values as the appropriate Python type based upon the # cursor description, and convert strings to unicode. casted = [] for value, desc in zip(row, cursor.description): if value is not None and desc[1] is Database.NUMBER: precision, scale = desc[4:6] if scale == -127: if precision == 0: # NUMBER column: decimal-precision floating point # This will normally be an integer from a sequence, # but it could be a decimal value. if '.' in value: value = decimal.Decimal(value) else: value = int(value) else: # FLOAT column: binary-precision floating point. # This comes from FloatField columns. value = float(value) elif precision > 0: # NUMBER(p,s) column: decimal-precision fixed point. # This comes from IntField and DecimalField columns. if scale == 0: value = int(value) else: value = decimal.Decimal(value) elif '.' in value: # No type information. This normally comes from a # mathematical expression in the SELECT list. Guess int # or Decimal based on whether it has a decimal point. value = decimal.Decimal(value) else: value = int(value) # datetimes are returned as TIMESTAMP, except the results # of "dates" queries, which are returned as DATETIME. elif desc[1] in (Database.TIMESTAMP, Database.DATETIME): # Confirm that dt is naive before overwriting its tzinfo. if settings.USE_TZ and value is not None and timezone.is_naive(value): value = value.replace(tzinfo=timezone.utc) elif desc[1] in (Database.STRING, Database.FIXED_CHAR, Database.LONG_STRING): value = to_unicode(value) casted.append(value) return tuple(casted) def to_unicode(s): """ Convert strings to Unicode objects (and return all other data types unchanged). """ if isinstance(s, six.string_types): return force_text(s) return s def _get_sequence_reset_sql(): # TODO: colorize this SQL code with style.SQL_KEYWORD(), etc. return """ DECLARE table_value integer; seq_value integer; BEGIN SELECT NVL(MAX(%(column)s), 0) INTO table_value FROM %(table)s; SELECT NVL(last_number - cache_size, 0) INTO seq_value FROM user_sequences WHERE sequence_name = '%(sequence)s'; WHILE table_value > seq_value LOOP SELECT "%(sequence)s".nextval INTO seq_value FROM dual; END LOOP; END; /"""
adambrenecki/django
django/db/backends/oracle/base.py
Python
bsd-3-clause
40,557
0.001381
#!/usr/bin/env python # game.py # # Copyright (C) 2013, 2014 Kano Computing Ltd. # License: http://www.gnu.org/licenses/gpl-2.0.txt GNU General Public License v2 # import stage import gameloop import math import random import config import gamestate as gs direction = (0, 0) lastPos = (0, 0) snake = [] speed = 1 apples = [] grow = config.initial_size - 1 score = 0 lives = 1 livesMax = 1 def update(): moveSnake() checkCatch() checkPositionAllowed() def checkCatch(): if not len(snake) or not len(apples): return for i, apple in enumerate(apples): if (snake[0][0]) == apple[0] and (snake[0][1]) == apple[1]: eatApple(i) def eatApple(i): global grow, score apples.pop(i) spawnApple() grow += config.food_values['apple'] score_value = 1 score += score_value # adjust total score try: gs.state['total_score'] += score_value except Exception: pass # adjust highest score try: if score > gs.state['highest_score']: gs.state['highest_score'] = score except Exception: pass # adjust total number of apples try: gs.state['total_number_of_apples'] += 1 except Exception: pass def moveSnake(): global grow, lastPos last_unchanged = None lastPos = (snake[len(snake) - 1][0], snake[len(snake) - 1][1]) for i, part in enumerate(snake): if i == 0: x = part[0] + speed * direction[0] y = part[1] + speed * direction[1] else: x = last_unchanged[0] y = last_unchanged[1] last_unchanged = (snake[i][0], snake[i][1]) snake[i] = (x, y) if grow: snake.append(last_unchanged) grow -= 1 # adjust longest snake try: if len(snake) > gs.state['longest_snake']: gs.state['longest_snake'] = len(snake) except Exception: pass # adjust total length try: gs.state['total_length'] += 1 except Exception: pass def getGameArea(): w = math.fabs(stage.boundaries['right'] - stage.boundaries['left']) h = math.fabs(stage.boundaries['top'] - stage.boundaries['bottom']) return int(math.floor(w * h)) def reset(): global direction, snake, apples_count, apples, score, grow direction = (1, 0) snake = [(0, 0)] gameloop.frame = 1 apples_count = 1 apples = [] grow = config.initial_size - 1 apples_count += int(math.floor(getGameArea() / config.apple_domain)) for i in range(0, apples_count): spawnApple() def spawnApple(): if len(apples) >= getGameArea(): return x = random.randrange(stage.boundaries['left'], stage.boundaries['right']) y = random.randrange(stage.boundaries['top'], stage.boundaries['bottom']) position_free = True for apple in apples: if apple[0] == x and apple[1] == y: position_free = False for part in snake: if part[0] == x and part[1] == y: position_free = False if position_free and not isOutOfBoundaries(x, y): apples.append((x, y)) else: spawnApple() def isOutOfBoundaries(x, y): if x < stage.boundaries['left'] or x > stage.boundaries['right'] - 1: return True elif y < stage.boundaries['top'] or y > stage.boundaries['bottom'] - 1: return True return False def checkPositionAllowed(): global lives collides_with_body = False x = snake[0][0] y = snake[0][1] for i in range(1, len(snake) - 1): if x == snake[i][0] and y == snake[i][1]: collides_with_body = True break if (collides_with_body or isOutOfBoundaries(x, y)): gameloop.reset() lives -= 1 if lives == 0: lives = livesMax gameloop.state = 2
alexaverill/make-snake
snake/game.py
Python
gpl-2.0
3,912
0.000256
# A funny, but common thing you'll see in python scipts is that if __name__ ... # block below # To start off, just run this script and see what happens. # Then run the test and note that it fails in a curious way! print "I was run - maybe by a test?" if __name__ == '__main__': # The problem is that this variable needs to be defined OUTSIDE the if # __name__ block. Can you move it above where it will be picked up by the # test? # Don't forget to fix the indentation! module_var = "I am totally defined" print "I'm being run directly" print "And module_var is:", module_var
marwahaha/python-fundamentals
challenges/04-Functions/B_script_conventions.py
Python
apache-2.0
610
0
import json from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt from apps.admin.utils.exception_handling import ExceptionHandler from apps.commission.models import Commission from apps.seller.models.product import Product from settings.people import support_team import re from apps.communication.views.email_class import Email def estimate(request): try: product = Product.objects.get(id=request.GET['product_id']) if product and product.weight and product.price: product.length = product.length if product.length else 1 product.height = product.height if product.height else 1 product.width = product.width if product.width else 1 old_volume = product.length * product.width * product.height #sort dimensions and update two biggest ones dimensions = [product.length, product.width, product.height] dimensions.sort() #sort numbers smallest to biggest dimensions.reverse() #reverse order, so now biggest first if request.GET.get('length') and int(request.GET['length']) > 0: dimensions[0] = int(request.GET['length']) if request.GET.get('width') and int(request.GET['width']) > 0: dimensions[1] = int(request.GET['width']) #get ratio from volume difference new_volume = dimensions[0] * dimensions[1] * dimensions[2] ratio = float(new_volume)/old_volume #scale ratio with quantity if request.GET.get('quantity') and request.GET['quantity'] > 1: ratio = ratio * int(request.GET['quantity']) #use ratio to scale price, weight product.price = int(round(product.price * ratio)) product.weight = int(round(product.weight * ratio)) #increase weight a bit to bump estimate to next shipping price tier if close product.weight = int(round((product.weight * 1.05) + 100)) #add 5% + 100grams response = {'display_price_estimate': product.display_price} product.pk = None #DO NOT SAVE!!! return HttpResponse(json.dumps(response), content_type='application/json') else: return HttpResponse(status=500) except Exception as e: ExceptionHandler(e, "error in product.custom_order_estimate") return HttpResponse(str(e), status=500) @csrf_exempt def request(request): #todo: change function name if request.method == 'POST' and request.POST.get('email'): try: product = Product.objects.get(id=request.POST['product_id']) data = { 'product': product, 'country': request.POST['country'], 'email': request.POST['email'], 'size_imperial': request.POST.get('size_imperial', ""), 'size_metric': request.POST.get('size_metric', ""), 'quantity': request.POST.get('quantity', ""), 'description': request.POST.get('description', ""), 'estimate': request.POST.get('estimate', ""), } try: commission = Commission.objects.create() commission.base_product = product # size_string = request.POST.get('size_metric', "") # pattern = re.compile('\D*(\d{1,6})\D*(\d{1,6})\D*') # dimensions = commission.length = pattern.match(size_string).groups() # commission.length = dimensions[0] # if len(dimensions) > 1: # commission.width = dimensions[1] commission.length = int(float(request.POST.get('length', 0))) or None commission.width = int(float(request.POST.get('width', 0))) or None commission.quantity = request.POST.get('quantity', 1) or 1 commission.createProduct(save=False)#calculates estimated artisan price commission.save() print "commission saved. go sms artisan..." commission.askArtisan() except Exception as e: ExceptionHandler(e, "in custom_order.request Commission") else: data['commission_id'] = commission.id recipient_email_list = [data['email'],] + [person.email for person in support_team] Email('custom_order/request', data).sendTo(recipient_email_list) return HttpResponse(status=200) except Exception as e: ExceptionHandler(e, "error in custom_order.request") return HttpResponse(status=500) else: return HttpResponse(status=400)
tomcounsell/Cobra
apps/public/views/custom_order.py
Python
gpl-2.0
4,294
0.015137
from .registry import Registry from drole.types import Permission def action(f): """ mark a method as being an action. """ if isinstance(f, Permission): def decorator(decorated): decorated.action = True decorated.permission = f return decorated return decorator else: f.action = True f.permission = None return f class ActionRegistry(dict): def register(self, handler, action, path=None, spoke=None): if action not in self: self[action] = [] self[action].append((handler, path, spoke)) def get(self, action, path=None, spoke=None): """ Action resolution is as follows: - A handler is registered on an action and optionally a spoke and path - spoke and path have to match if specified if there are no entries at all, find a handler on the spoke itself. To consider: add priority when registering action Een action / handler registreer je in een bepaalde context: - globaal (geld in iedere context) - voor bepaalde spoke - voor bepaald path - spoke en path Vervolgens zoek je een handler in die context. Als je een nauwkeurige context specificeert, dan verwacht Een action die op path P en spoke S geregistreerd is matcht dus niet op path P' en spoke S """ entries = super(ActionRegistry, self).get(action) if entries: ## Match spoke against an actual instance first for (h, epath, espoke) in entries: if epath and path != epath: continue if espoke and spoke != espoke: continue return h ## and then against a spoke type class for (h, epath, espoke) in entries: if epath and path != epath: continue if espoke and espoke != spoke.__class__: continue return h ## give up if there's no explicit spoke context passed if not spoke: return None classhandler = getattr(spoke, action, None) ## A lookup by action id is actually preferable XXX if classhandler and getattr(classhandler, 'action', False): return classhandler return None class tab(object): def __init__(self, permission=None, id=None, label=None, condition=None): self.permission = permission self.id = id self.label = label self.condition = condition def __call__(self, f): def wrapped(self, *a, **b): res = f(self, *a, **b) return res name = f.func_name if self.permission: wrapped = action(self.permission)(wrapped) else: wrapped = action(wrapped) wrapped.tab = True wrapped.tab_id = self.id or name wrapped.tab_label = self.label or wrapped.tab_id wrapped.condition = self.condition return wrapped def tabaction(handler): """ return the tab identifier of a handler if it's a tab, or else None """ if getattr(handler, 'action', False) and getattr(handler, 'tab', False): return handler.tab_id return None action_registry = Registry(ActionRegistry())
wheelcms/wheelcms_axle
wheelcms_axle/actions.py
Python
bsd-2-clause
3,462
0.003177
#!/usr/bin/env python import sys def run_277853s(): N = 1 while N<16384: run_277853(N) N = N*2 def run_277853(N): cnt = 0 #for (int i = 1; i*i <= N; i = i*2) i = 1 while i*i <= N: cnt += 1 print(N, i, cnt) i = i*4 #print "{:>5}=N {:>5}=cnt".format(N, cnt) def run_605062s(): N = 1 while N<4096: run_605062(N) N = N*2 def run_605062(N): """N^(1/2). The body of inner loop is executed 1 + 2 + 4 + 8 + ... + sqrt(N) ~ 2 sqrt(N) """ cnt = 0 #for (int i = 1; i*i <= N; i = i*2) i = 1 while i <= N: #for (int j = 0; j < i; j++) for j in range(i): cnt += 1 #print i, j, cnt #print i i = i*2 print("{:>5}=N {:>5}=cnt".format(N, cnt)) if __name__ == '__main__': run_277853s()
dvklopfenstein/PrincetonAlgorithms
py/AlgsSedgewickWayne/testcode/order.py
Python
gpl-2.0
768
0.041667
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License # from org.apache.qpid.proton.messenger.impl import Address def pn_url(): return Address() def pn_url_parse(urlstr): return Address(urlstr) def pn_url_free(url): pass def pn_url_clear(url): url.clear(); def pn_url_str(url): return url.toString() def pn_url_get_scheme(url): return url.getScheme() def pn_url_get_username(url): return url.getUser() def pn_url_get_password(url): return url.getPass() def pn_url_get_host(url): return url.getHost() or None def pn_url_get_port(url): return url.getPort() def pn_url_get_path(url): return url.getName() def pn_url_set_scheme(url, value): url.setScheme(value) def pn_url_set_username(url, value): url.setUser(value) def pn_url_set_password(url, value): url.setPass(value) def pn_url_set_host(url, value): url.setHost(value) def pn_url_set_port(url, value): url.setPort(value) def pn_url_set_path(url, value): url.setName(value)
prestona/qpid-proton
tests/java/shim/curl.py
Python
apache-2.0
1,678
0.004768
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import functools from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request from ...operations._iot_hub_resource_operations import build_check_name_availability_request, build_create_event_hub_consumer_group_request, build_create_or_update_request_initial, build_delete_event_hub_consumer_group_request, build_delete_request_initial, build_export_devices_request, build_get_event_hub_consumer_group_request, build_get_job_request, build_get_keys_for_key_name_request, build_get_quota_metrics_request, build_get_request, build_get_stats_request, build_get_valid_skus_request, build_import_devices_request, build_list_by_resource_group_request, build_list_by_subscription_request, build_list_event_hub_consumer_groups_request, build_list_jobs_request, build_list_keys_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class IotHubResourceOperations: """IotHubResourceOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.iothub.v2016_02_03.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config @distributed_trace_async async def get( self, resource_group_name: str, resource_name: str, **kwargs: Any ) -> "_models.IotHubDescription": """Get the non-security related metadata of an IoT hub. Get the non-security related metadata of an IoT hub. :param resource_group_name: The name of the resource group that contains the IoT hub. :type resource_group_name: str :param resource_name: The name of the IoT hub. :type resource_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: IotHubDescription, or the result of cls(response) :rtype: ~azure.mgmt.iothub.v2016_02_03.models.IotHubDescription :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.IotHubDescription"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_request( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, resource_name=resource_name, template_url=self.get.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('IotHubDescription', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}'} # type: ignore async def _create_or_update_initial( self, resource_group_name: str, resource_name: str, iot_hub_description: "_models.IotHubDescription", **kwargs: Any ) -> "_models.IotHubDescription": cls = kwargs.pop('cls', None) # type: ClsType["_models.IotHubDescription"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] _json = self._serialize.body(iot_hub_description, 'IotHubDescription') request = build_create_or_update_request_initial( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, resource_name=resource_name, content_type=content_type, json=_json, template_url=self._create_or_update_initial.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if response.status_code == 200: deserialized = self._deserialize('IotHubDescription', pipeline_response) if response.status_code == 201: deserialized = self._deserialize('IotHubDescription', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}'} # type: ignore @distributed_trace_async async def begin_create_or_update( self, resource_group_name: str, resource_name: str, iot_hub_description: "_models.IotHubDescription", **kwargs: Any ) -> AsyncLROPoller["_models.IotHubDescription"]: """Create or update the metadata of an IoT hub. Create or update the metadata of an Iot hub. The usual pattern to modify a property is to retrieve the IoT hub metadata and security metadata, and then combine them with the modified values in a new body to update the IoT hub. If certain properties are missing in the JSON, updating IoT Hub may cause these values to fallback to default, which may lead to unexpected behavior. :param resource_group_name: The name of the resource group that contains the IoT hub. :type resource_group_name: str :param resource_name: The name of the IoT hub to create or update. :type resource_name: str :param iot_hub_description: The IoT hub metadata and security metadata. :type iot_hub_description: ~azure.mgmt.iothub.v2016_02_03.models.IotHubDescription :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either IotHubDescription or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.iothub.v2016_02_03.models.IotHubDescription] :raises: ~azure.core.exceptions.HttpResponseError """ content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.IotHubDescription"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, resource_name=resource_name, iot_hub_description=iot_hub_description, content_type=content_type, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) def get_long_running_output(pipeline_response): response = pipeline_response.http_response deserialized = self._deserialize('IotHubDescription', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}'} # type: ignore async def _delete_initial( self, resource_group_name: str, resource_name: str, **kwargs: Any ) -> Optional[Union["_models.IotHubDescription", "_models.ErrorDetails"]]: cls = kwargs.pop('cls', None) # type: ClsType[Optional[Union["_models.IotHubDescription", "_models.ErrorDetails"]]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_delete_request_initial( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, resource_name=resource_name, template_url=self._delete_initial.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202, 204, 404]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: deserialized = self._deserialize('IotHubDescription', pipeline_response) if response.status_code == 202: deserialized = self._deserialize('IotHubDescription', pipeline_response) if response.status_code == 404: deserialized = self._deserialize('ErrorDetails', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}'} # type: ignore @distributed_trace_async async def begin_delete( self, resource_group_name: str, resource_name: str, **kwargs: Any ) -> AsyncLROPoller[Union["_models.IotHubDescription", "_models.ErrorDetails"]]: """Delete an IoT hub. Delete an IoT hub. :param resource_group_name: The name of the resource group that contains the IoT hub. :type resource_group_name: str :param resource_name: The name of the IoT hub to delete. :type resource_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either IotHubDescription or ErrorDetails or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.iothub.v2016_02_03.models.IotHubDescription or ~azure.mgmt.iothub.v2016_02_03.models.ErrorDetails] :raises: ~azure.core.exceptions.HttpResponseError """ polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[Union["_models.IotHubDescription", "_models.ErrorDetails"]] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = await self._delete_initial( resource_group_name=resource_group_name, resource_name=resource_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) def get_long_running_output(pipeline_response): response = pipeline_response.http_response deserialized = self._deserialize('IotHubDescription', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}'} # type: ignore @distributed_trace def list_by_subscription( self, **kwargs: Any ) -> AsyncIterable["_models.IotHubDescriptionListResult"]: """Get all the IoT hubs in a subscription. Get all the IoT hubs in a subscription. :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either IotHubDescriptionListResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.iothub.v2016_02_03.models.IotHubDescriptionListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.IotHubDescriptionListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) def prepare_request(next_link=None): if not next_link: request = build_list_by_subscription_request( subscription_id=self._config.subscription_id, template_url=self.list_by_subscription.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: request = build_list_by_subscription_request( subscription_id=self._config.subscription_id, template_url=next_link, ) request = _convert_request(request) request.url = self._client.format_url(request.url) request.method = "GET" return request async def extract_data(pipeline_response): deserialized = self._deserialize("IotHubDescriptionListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged( get_next, extract_data ) list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Devices/IotHubs'} # type: ignore @distributed_trace def list_by_resource_group( self, resource_group_name: str, **kwargs: Any ) -> AsyncIterable["_models.IotHubDescriptionListResult"]: """Get all the IoT hubs in a resource group. Get all the IoT hubs in a resource group. :param resource_group_name: The name of the resource group that contains the IoT hubs. :type resource_group_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either IotHubDescriptionListResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.iothub.v2016_02_03.models.IotHubDescriptionListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.IotHubDescriptionListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) def prepare_request(next_link=None): if not next_link: request = build_list_by_resource_group_request( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, template_url=self.list_by_resource_group.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: request = build_list_by_resource_group_request( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, template_url=next_link, ) request = _convert_request(request) request.url = self._client.format_url(request.url) request.method = "GET" return request async def extract_data(pipeline_response): deserialized = self._deserialize("IotHubDescriptionListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged( get_next, extract_data ) list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs'} # type: ignore @distributed_trace_async async def get_stats( self, resource_group_name: str, resource_name: str, **kwargs: Any ) -> "_models.RegistryStatistics": """Get the statistics from an IoT hub. Get the statistics from an IoT hub. :param resource_group_name: The name of the resource group that contains the IoT hub. :type resource_group_name: str :param resource_name: The name of the IoT hub. :type resource_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: RegistryStatistics, or the result of cls(response) :rtype: ~azure.mgmt.iothub.v2016_02_03.models.RegistryStatistics :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.RegistryStatistics"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_stats_request( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, resource_name=resource_name, template_url=self.get_stats.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('RegistryStatistics', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_stats.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/IotHubStats'} # type: ignore @distributed_trace def get_valid_skus( self, resource_group_name: str, resource_name: str, **kwargs: Any ) -> AsyncIterable["_models.IotHubSkuDescriptionListResult"]: """Get the list of valid SKUs for an IoT hub. Get the list of valid SKUs for an IoT hub. :param resource_group_name: The name of the resource group that contains the IoT hub. :type resource_group_name: str :param resource_name: The name of the IoT hub. :type resource_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either IotHubSkuDescriptionListResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.iothub.v2016_02_03.models.IotHubSkuDescriptionListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.IotHubSkuDescriptionListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) def prepare_request(next_link=None): if not next_link: request = build_get_valid_skus_request( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, resource_name=resource_name, template_url=self.get_valid_skus.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: request = build_get_valid_skus_request( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, resource_name=resource_name, template_url=next_link, ) request = _convert_request(request) request.url = self._client.format_url(request.url) request.method = "GET" return request async def extract_data(pipeline_response): deserialized = self._deserialize("IotHubSkuDescriptionListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged( get_next, extract_data ) get_valid_skus.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/skus'} # type: ignore @distributed_trace def list_event_hub_consumer_groups( self, resource_group_name: str, resource_name: str, event_hub_endpoint_name: str, **kwargs: Any ) -> AsyncIterable["_models.EventHubConsumerGroupsListResult"]: """Get a list of the consumer groups in the Event Hub-compatible device-to-cloud endpoint in an IoT hub. Get a list of the consumer groups in the Event Hub-compatible device-to-cloud endpoint in an IoT hub. :param resource_group_name: The name of the resource group that contains the IoT hub. :type resource_group_name: str :param resource_name: The name of the IoT hub. :type resource_name: str :param event_hub_endpoint_name: The name of the Event Hub-compatible endpoint. :type event_hub_endpoint_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either EventHubConsumerGroupsListResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.iothub.v2016_02_03.models.EventHubConsumerGroupsListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.EventHubConsumerGroupsListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) def prepare_request(next_link=None): if not next_link: request = build_list_event_hub_consumer_groups_request( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, resource_name=resource_name, event_hub_endpoint_name=event_hub_endpoint_name, template_url=self.list_event_hub_consumer_groups.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: request = build_list_event_hub_consumer_groups_request( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, resource_name=resource_name, event_hub_endpoint_name=event_hub_endpoint_name, template_url=next_link, ) request = _convert_request(request) request.url = self._client.format_url(request.url) request.method = "GET" return request async def extract_data(pipeline_response): deserialized = self._deserialize("EventHubConsumerGroupsListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged( get_next, extract_data ) list_event_hub_consumer_groups.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/eventHubEndpoints/{eventHubEndpointName}/ConsumerGroups'} # type: ignore @distributed_trace_async async def get_event_hub_consumer_group( self, resource_group_name: str, resource_name: str, event_hub_endpoint_name: str, name: str, **kwargs: Any ) -> "_models.EventHubConsumerGroupInfo": """Get a consumer group from the Event Hub-compatible device-to-cloud endpoint for an IoT hub. Get a consumer group from the Event Hub-compatible device-to-cloud endpoint for an IoT hub. :param resource_group_name: The name of the resource group that contains the IoT hub. :type resource_group_name: str :param resource_name: The name of the IoT hub. :type resource_name: str :param event_hub_endpoint_name: The name of the Event Hub-compatible endpoint in the IoT hub. :type event_hub_endpoint_name: str :param name: The name of the consumer group to retrieve. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: EventHubConsumerGroupInfo, or the result of cls(response) :rtype: ~azure.mgmt.iothub.v2016_02_03.models.EventHubConsumerGroupInfo :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.EventHubConsumerGroupInfo"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_event_hub_consumer_group_request( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, resource_name=resource_name, event_hub_endpoint_name=event_hub_endpoint_name, name=name, template_url=self.get_event_hub_consumer_group.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('EventHubConsumerGroupInfo', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_event_hub_consumer_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/eventHubEndpoints/{eventHubEndpointName}/ConsumerGroups/{name}'} # type: ignore @distributed_trace_async async def create_event_hub_consumer_group( self, resource_group_name: str, resource_name: str, event_hub_endpoint_name: str, name: str, **kwargs: Any ) -> "_models.EventHubConsumerGroupInfo": """Add a consumer group to an Event Hub-compatible endpoint in an IoT hub. Add a consumer group to an Event Hub-compatible endpoint in an IoT hub. :param resource_group_name: The name of the resource group that contains the IoT hub. :type resource_group_name: str :param resource_name: The name of the IoT hub. :type resource_name: str :param event_hub_endpoint_name: The name of the Event Hub-compatible endpoint in the IoT hub. :type event_hub_endpoint_name: str :param name: The name of the consumer group to add. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: EventHubConsumerGroupInfo, or the result of cls(response) :rtype: ~azure.mgmt.iothub.v2016_02_03.models.EventHubConsumerGroupInfo :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.EventHubConsumerGroupInfo"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_create_event_hub_consumer_group_request( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, resource_name=resource_name, event_hub_endpoint_name=event_hub_endpoint_name, name=name, template_url=self.create_event_hub_consumer_group.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('EventHubConsumerGroupInfo', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized create_event_hub_consumer_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/eventHubEndpoints/{eventHubEndpointName}/ConsumerGroups/{name}'} # type: ignore @distributed_trace_async async def delete_event_hub_consumer_group( self, resource_group_name: str, resource_name: str, event_hub_endpoint_name: str, name: str, **kwargs: Any ) -> None: """Delete a consumer group from an Event Hub-compatible endpoint in an IoT hub. Delete a consumer group from an Event Hub-compatible endpoint in an IoT hub. :param resource_group_name: The name of the resource group that contains the IoT hub. :type resource_group_name: str :param resource_name: The name of the IoT hub. :type resource_name: str :param event_hub_endpoint_name: The name of the Event Hub-compatible endpoint in the IoT hub. :type event_hub_endpoint_name: str :param name: The name of the consumer group to delete. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_delete_event_hub_consumer_group_request( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, resource_name=resource_name, event_hub_endpoint_name=event_hub_endpoint_name, name=name, template_url=self.delete_event_hub_consumer_group.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) delete_event_hub_consumer_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/eventHubEndpoints/{eventHubEndpointName}/ConsumerGroups/{name}'} # type: ignore @distributed_trace def list_jobs( self, resource_group_name: str, resource_name: str, **kwargs: Any ) -> AsyncIterable["_models.JobResponseListResult"]: """Get a list of all the jobs in an IoT hub. For more information, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry. Get a list of all the jobs in an IoT hub. For more information, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry. :param resource_group_name: The name of the resource group that contains the IoT hub. :type resource_group_name: str :param resource_name: The name of the IoT hub. :type resource_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either JobResponseListResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.iothub.v2016_02_03.models.JobResponseListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResponseListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) def prepare_request(next_link=None): if not next_link: request = build_list_jobs_request( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, resource_name=resource_name, template_url=self.list_jobs.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: request = build_list_jobs_request( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, resource_name=resource_name, template_url=next_link, ) request = _convert_request(request) request.url = self._client.format_url(request.url) request.method = "GET" return request async def extract_data(pipeline_response): deserialized = self._deserialize("JobResponseListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged( get_next, extract_data ) list_jobs.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/jobs'} # type: ignore @distributed_trace_async async def get_job( self, resource_group_name: str, resource_name: str, job_id: str, **kwargs: Any ) -> "_models.JobResponse": """Get the details of a job from an IoT hub. For more information, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry. Get the details of a job from an IoT hub. For more information, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry. :param resource_group_name: The name of the resource group that contains the IoT hub. :type resource_group_name: str :param resource_name: The name of the IoT hub. :type resource_name: str :param job_id: The job identifier. :type job_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: JobResponse, or the result of cls(response) :rtype: ~azure.mgmt.iothub.v2016_02_03.models.JobResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_job_request( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, resource_name=resource_name, job_id=job_id, template_url=self.get_job.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('JobResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_job.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/jobs/{jobId}'} # type: ignore @distributed_trace def get_quota_metrics( self, resource_group_name: str, resource_name: str, **kwargs: Any ) -> AsyncIterable["_models.IotHubQuotaMetricInfoListResult"]: """Get the quota metrics for an IoT hub. Get the quota metrics for an IoT hub. :param resource_group_name: The name of the resource group that contains the IoT hub. :type resource_group_name: str :param resource_name: The name of the IoT hub. :type resource_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either IotHubQuotaMetricInfoListResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.iothub.v2016_02_03.models.IotHubQuotaMetricInfoListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.IotHubQuotaMetricInfoListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) def prepare_request(next_link=None): if not next_link: request = build_get_quota_metrics_request( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, resource_name=resource_name, template_url=self.get_quota_metrics.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: request = build_get_quota_metrics_request( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, resource_name=resource_name, template_url=next_link, ) request = _convert_request(request) request.url = self._client.format_url(request.url) request.method = "GET" return request async def extract_data(pipeline_response): deserialized = self._deserialize("IotHubQuotaMetricInfoListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged( get_next, extract_data ) get_quota_metrics.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/quotaMetrics'} # type: ignore @distributed_trace_async async def check_name_availability( self, operation_inputs: "_models.OperationInputs", **kwargs: Any ) -> "_models.IotHubNameAvailabilityInfo": """Check if an IoT hub name is available. Check if an IoT hub name is available. :param operation_inputs: Set the name parameter in the OperationInputs structure to the name of the IoT hub to check. :type operation_inputs: ~azure.mgmt.iothub.v2016_02_03.models.OperationInputs :keyword callable cls: A custom type or function that will be passed the direct response :return: IotHubNameAvailabilityInfo, or the result of cls(response) :rtype: ~azure.mgmt.iothub.v2016_02_03.models.IotHubNameAvailabilityInfo :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.IotHubNameAvailabilityInfo"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] _json = self._serialize.body(operation_inputs, 'OperationInputs') request = build_check_name_availability_request( subscription_id=self._config.subscription_id, content_type=content_type, json=_json, template_url=self.check_name_availability.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('IotHubNameAvailabilityInfo', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized check_name_availability.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Devices/checkNameAvailability'} # type: ignore @distributed_trace def list_keys( self, resource_group_name: str, resource_name: str, **kwargs: Any ) -> AsyncIterable["_models.SharedAccessSignatureAuthorizationRuleListResult"]: """Get the security metadata for an IoT hub. For more information, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-security. Get the security metadata for an IoT hub. For more information, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-security. :param resource_group_name: The name of the resource group that contains the IoT hub. :type resource_group_name: str :param resource_name: The name of the IoT hub. :type resource_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either SharedAccessSignatureAuthorizationRuleListResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.iothub.v2016_02_03.models.SharedAccessSignatureAuthorizationRuleListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.SharedAccessSignatureAuthorizationRuleListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) def prepare_request(next_link=None): if not next_link: request = build_list_keys_request( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, resource_name=resource_name, template_url=self.list_keys.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: request = build_list_keys_request( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, resource_name=resource_name, template_url=next_link, ) request = _convert_request(request) request.url = self._client.format_url(request.url) request.method = "GET" return request async def extract_data(pipeline_response): deserialized = self._deserialize("SharedAccessSignatureAuthorizationRuleListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged( get_next, extract_data ) list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/listkeys'} # type: ignore @distributed_trace_async async def get_keys_for_key_name( self, resource_group_name: str, resource_name: str, key_name: str, **kwargs: Any ) -> "_models.SharedAccessSignatureAuthorizationRule": """Get a shared access policy by name from an IoT hub. For more information, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-security. Get a shared access policy by name from an IoT hub. For more information, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-security. :param resource_group_name: The name of the resource group that contains the IoT hub. :type resource_group_name: str :param resource_name: The name of the IoT hub. :type resource_name: str :param key_name: The name of the shared access policy. :type key_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: SharedAccessSignatureAuthorizationRule, or the result of cls(response) :rtype: ~azure.mgmt.iothub.v2016_02_03.models.SharedAccessSignatureAuthorizationRule :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.SharedAccessSignatureAuthorizationRule"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_keys_for_key_name_request( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, resource_name=resource_name, key_name=key_name, template_url=self.get_keys_for_key_name.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('SharedAccessSignatureAuthorizationRule', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_keys_for_key_name.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/IotHubKeys/{keyName}/listkeys'} # type: ignore @distributed_trace_async async def export_devices( self, resource_group_name: str, resource_name: str, export_devices_parameters: "_models.ExportDevicesRequest", **kwargs: Any ) -> "_models.JobResponse": """Exports all the device identities in the IoT hub identity registry to an Azure Storage blob container. For more information, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry#import-and-export-device-identities. Exports all the device identities in the IoT hub identity registry to an Azure Storage blob container. For more information, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry#import-and-export-device-identities. :param resource_group_name: The name of the resource group that contains the IoT hub. :type resource_group_name: str :param resource_name: The name of the IoT hub. :type resource_name: str :param export_devices_parameters: The parameters that specify the export devices operation. :type export_devices_parameters: ~azure.mgmt.iothub.v2016_02_03.models.ExportDevicesRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: JobResponse, or the result of cls(response) :rtype: ~azure.mgmt.iothub.v2016_02_03.models.JobResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] _json = self._serialize.body(export_devices_parameters, 'ExportDevicesRequest') request = build_export_devices_request( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, resource_name=resource_name, content_type=content_type, json=_json, template_url=self.export_devices.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('JobResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized export_devices.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/exportDevices'} # type: ignore @distributed_trace_async async def import_devices( self, resource_group_name: str, resource_name: str, import_devices_parameters: "_models.ImportDevicesRequest", **kwargs: Any ) -> "_models.JobResponse": """Import, update, or delete device identities in the IoT hub identity registry from a blob. For more information, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry#import-and-export-device-identities. Import, update, or delete device identities in the IoT hub identity registry from a blob. For more information, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry#import-and-export-device-identities. :param resource_group_name: The name of the resource group that contains the IoT hub. :type resource_group_name: str :param resource_name: The name of the IoT hub. :type resource_name: str :param import_devices_parameters: The parameters that specify the import devices operation. :type import_devices_parameters: ~azure.mgmt.iothub.v2016_02_03.models.ImportDevicesRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: JobResponse, or the result of cls(response) :rtype: ~azure.mgmt.iothub.v2016_02_03.models.JobResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] _json = self._serialize.body(import_devices_parameters, 'ImportDevicesRequest') request = build_import_devices_request( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, resource_name=resource_name, content_type=content_type, json=_json, template_url=self.import_devices.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('JobResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized import_devices.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/importDevices'} # type: ignore
Azure/azure-sdk-for-python
sdk/iothub/azure-mgmt-iothub/azure/mgmt/iothub/v2016_02_03/aio/operations/_iot_hub_resource_operations.py
Python
mit
67,149
0.004914
""" This module is about 'property' descriptors. """ import sys from functools import wraps from easypy.caching import cached_property # public import, for back-compat _builtin_property = property def safe_property(fget=None, fset=None, fdel=None, doc=None): """ A pythonic property which raises a RuntimeError when an attribute error is raised within it. This fixes an issue in python where AttributeErrors that occur anywhere _within_ 'property' functions are effectively suppressed, and converted to AttributeErrors for the property itself. This is confusing for the debugger, and also leads to unintended fallback calls to a __getattr__ if defined >>> def i_raise_an_exception(): ... raise AttributeError("blap") >>> class Test(object): ... def some_prop(self): ... return i_raise_an_exception() ... def __getattr__(self, attr): ... assert False ... prop = property(some_prop) ... safe_prop = safe_property(some_prop) >>> t = Test() >>> t.prop Traceback (most recent call last): ... AssertionError >>> t.safe_prop Traceback (most recent call last): ... AttributeError: blap ... During handling of the above exception, another exception occurred: ... Traceback (most recent call last): ... RuntimeError: Attribute error within a property (blap) """ if fget is not None: @wraps(fget) def callable(*args, **kwargs): try: return fget(*args, **kwargs) except AttributeError: _, exc, tb = sys.exc_info() raise RuntimeError("Attribute error within a property (%s)" % exc).with_traceback(tb) return _builtin_property(callable, fset, fdel, doc) else: return _builtin_property(fget, fset, fdel, doc)
weka-io/easypy
easypy/properties.py
Python
bsd-3-clause
1,878
0.002662
#!/usr/bin/env python import subprocess, shutil, shlex, os from sys import exit, argv from netCDF4 import Dataset as NC import numpy as np def process_arguments(): from argparse import ArgumentParser parser = ArgumentParser() parser.add_argument("PISM_PATH") parser.add_argument("MPIEXEC") parser.add_argument("PISM_SOURCE_DIR") return parser.parse_args() def copy_input(opts): shutil.copy(os.path.join(opts.PISM_SOURCE_DIR, "test/test_hydrology/inputforP_regression.nc"), ".") def generate_config(): """Generates the config file with custom ice softness and hydraulic conductivity.""" print "generating testPconfig.nc ..." nc = NC("testPconfig.nc", 'w') pism_overrides = nc.createVariable("pism_overrides", 'b') pism_overrides.standard_gravity = 9.81; pism_overrides.standard_gravity_doc = "m s-2; = g; acceleration due to gravity on Earth geoid"; pism_overrides.fresh_water_density = 1000.0; pism_overrides.fresh_water_density_doc = "kg m-3; = rhow"; pism_overrides.ice_softness = 3.1689e-24 pism_overrides.ice_softness_doc = "Pa-3 s-1; ice softness; NOT DEFAULT" pism_overrides.hydrology_hydraulic_conductivity = 1.0e-2 / (1000.0 * 9.81) pism_overrides.hydrology_hydraulic_conductivity_doc = "= k; NOT DEFAULT" pism_overrides.hydrology_tillwat_max = 0.0; pism_overrides.hydrology_tillwat_max_doc = "m; turn off till water mechanism"; pism_overrides.hydrology_thickness_power_in_flux = 1.0; pism_overrides.hydrology_thickness_power_in_flux_doc = "; = alpha in notes"; pism_overrides.hydrology_gradient_power_in_flux = 2.0; pism_overrides.hydrology_gradient_power_in_flux_doc = "; = beta in notes"; pism_overrides.hydrology_roughness_scale = 1.0; pism_overrides.hydrology_roughness_scale_doc = "m; W_r in notes; roughness scale"; pism_overrides.hydrology_regularizing_porosity = 0.01; pism_overrides.hydrology_regularizing_porosity_doc = "[pure]; phi_0 in notes"; pism_overrides.yield_stress_model = "constant"; pism_overrides.yield_stress_model_doc = "only the constant yield stress model works without till"; pism_overrides.default_tauc = 1e6; pism_overrides.default_tauc_doc = "set default to 'high tauc'"; nc.close() def run_pism(opts): cmd = "%s %s/pismr -config_override testPconfig.nc -boot_file inputforP_regression.nc -Mx %d -My %d -Mz 11 -Lz 4000 -hydrology distributed -report_mass_accounting -y 0.08333333333333 -max_dt 0.01 -no_mass -energy none -stress_balance ssa+sia -ssa_dirichlet_bc -o end.nc" % (opts.MPIEXEC, opts.PISM_PATH, 21, 21) print cmd subprocess.call(shlex.split(cmd)) def check_drift(file1, file2): nc1 = NC(file1) nc2 = NC(file2) stored_drift = {'bwat_max': 0.024263951766380631, 'bwp_max': 81658.173074602877, 'bwp_avg': 7152.4179414459632, 'bwat_avg': 0.004056179416920525} drift = {} for name in ("bwat", "bwp"): var1 = nc1.variables[name] var2 = nc2.variables[name] diff = np.abs(np.squeeze(var1[:]) - np.squeeze(var2[:])) drift["%s_max" % name] = np.max(diff) drift["%s_avg" % name] = np.average(diff) print "drift = ", drift print "stored_drift = ", stored_drift for name in drift.keys(): rel_diff = np.abs(stored_drift[name] - drift[name]) / stored_drift[name] if rel_diff > 1e-3: print "Stored and computed drifts in %s differ: %f != %f" % (name, stored_drift[name], drift[name]) exit(1) def cleanup(): for fname in ("inputforP_regression.nc", "testPconfig.nc", "end.nc"): os.remove(fname) if __name__ == "__main__": opts = process_arguments() print "Copying input files..." copy_input(opts) print "Generating the -config_override file..." generate_config() print "Running PISM..." run_pism(opts) print "Checking the drift..." check_drift("inputforP_regression.nc", "end.nc") print "Cleaning up..." cleanup()
talbrecht/pism_pik06
test/regression/test_29.py
Python
gpl-3.0
4,068
0.009095
from PyQRNative import * from PIL.Image import BILINEAR, BICUBIC, ANTIALIAS, NEAREST from reportlab.pdfgen import canvas from reportlab.lib.pagesizes import portrait, A4 from reportlab.lib.units import cm, mm from StringIO import StringIO from plant.tag import create_tag import time from datetime import datetime QR_TYPE = 4 QR_ECC = QRErrorCorrectLevel.H TAG_FONT = 'Courier-Bold' TAG_FONT_PT = 8 FOOT_FONT = 'Helvetica' FOOT_FONT_PT = 8 TOP_YMARGIN = 0.75*cm LAYOUTS = { 'Long sticks': {'qr_size': 2*cm, 'qr_lxmargin': 1*cm, 'qr_rxmargin': 1*cm, 'qr_ymargin': 5.0*cm, 'created': True, 'paired': False}, 'Sticky labels 70x37mm': {'qr_size': 2.5*cm, 'qr_lxmargin': 0.50*cm, 'qr_rxmargin': 0.50*cm, 'qr_ymargin': 1.2*cm, 'created': False, 'paired': False}, 'Sticky labels 70x37mm (paired)': {'qr_size': 2.5*cm, 'qr_lxmargin': 0.50*cm, 'qr_rxmargin': 0.50*cm, 'qr_ymargin': 1.2*cm, 'created': False, 'paired': True}, # 'Verbose labels ?x?mm': # {'qr_size': 0, 'qr_lxmargin': 0, 'qr_ymargin': 0}, } LAYOUT_LIST = LAYOUTS.keys() DUPLEX_LIST = ['No', 'Short side'] # Typ tre cm verkar vara en rimlig storlek, bade med tanke # pa vad som far plats i verkligheten och analyserna gjorda pa # http://www.qrstuff.com/blog/2011/01/18/what-size-should-a-qr-code-be # Lamplig fontstorlek for taggarna verkar vara 8pt Helvetica def validate_params(layout, duplex): if (layout is None) or (layout not in LAYOUT_LIST): return False if (duplex is None) or (duplex not in DUPLEX_LIST): return False if (layout == 'Verbose labels ?x?mm'): raise NotImplementedError return True def generate_new_qrimage(): tag = create_tag() qr = QRCode(QR_TYPE, QR_ECC) qr.addData('https://YOUR_DOMAIN/' + str(tag.tag)) qr.make() return (qr.makeImage(), tag.tag) def generate_qr_from_layout(layout, duplex, pagesize=A4): if duplex == 'Long side': raise NotImplementedError('only short page duplex implemented') now = datetime.now() qr_size = LAYOUTS[layout]['qr_size'] qr_lxmargin = LAYOUTS[layout]['qr_lxmargin'] qr_rxmargin = LAYOUTS[layout]['qr_rxmargin'] qr_ymargin = LAYOUTS[layout]['qr_ymargin'] created = LAYOUTS[layout]['created'] paired = LAYOUTS[layout]['paired'] x = pagesize[0] - (qr_size + qr_lxmargin) y = pagesize[1] - (qr_size + TOP_YMARGIN) # Validate parameters; this is mostly for debugging if (qr_size < 1) or (qr_lxmargin < 1) or (qr_rxmargin < 1) or (qr_ymargin < 1): raise ValueError(u'Internal error: One of qr size, qr x margin or qr y margin is zero.') # Generate QR codes with positions qrimgs = [] while y >= 0: xnum = 0; while x > 0: xnum += 1 if (not paired) or (xnum % 2 != 0): (qrimg, tag) = generate_new_qrimage() qrimgs.append({'image': qrimg, 'tag': tag, 'x': x, 'y': y}) x -= (qr_size + qr_rxmargin + qr_lxmargin) x = pagesize[0] - (qr_size + qr_lxmargin) y -= (qr_size + qr_ymargin) f = StringIO(); pdf = canvas.Canvas(f, pagesize=portrait(pagesize), pageCompression=0) # Plot QR codes on first side pdf.setFont(TAG_FONT, TAG_FONT_PT) for qrimg in qrimgs: x = qrimg['x'] y = qrimg['y'] # drawImage() seems to crash on PIL objects so we use drawInlineImage() instead, even though it's deprecated. # PyQRNative draws a white margin around the QR code, making it about one eigth smaller than the required size. pdf.drawInlineImage(qrimg['image'], x, y+(qr_size*0.0625), width=qr_size, height=qr_size, preserveAspectRatio=True) pdf.drawCentredString(x + (qr_size/2), y + 0.05*cm, qrimg['tag']) if created: pdf.setFont(FOOT_FONT, FOOT_FONT_PT) pdf.drawString(cm, cm, 'Created on %s' % str(now)) pdf.showPage() if duplex != 'No': pdf.setFont(TAG_FONT, TAG_FONT_PT) pdf.setPageRotation(180) for qrimg in qrimgs: x = portrait(pagesize)[0] - qrimg['x'] - qr_size y = qrimg['y'] pdf.drawInlineImage(qrimg['image'], x, y+(qr_size*0.0625), width=qr_size, height=qr_size, preserveAspectRatio=True) pdf.drawCentredString(x + (qr_size/2), y + 0.05*cm, qrimg['tag']) if created: pdf.setFont(FOOT_FONT, FOOT_FONT_PT) pdf.drawRightString(portrait(pagesize)[0] - cm, cm, 'Created on %s' % str(now)) pdf.showPage() pdf.save() return f
andbof/plantdb
qr/functions.py
Python
gpl-2.0
4,614
0.007152
"""SCons.Tool.m4 Tool-specific initialization for m4. There normally shouldn't be any need to import this module directly. It will usually be imported through the generic SCons.Tool.Tool() selection method. """ # # Copyright (c) 2001 - 2014 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "src/engine/SCons/Tool/m4.py 2014/09/27 12:51:43 garyo" import SCons.Action import SCons.Builder import SCons.Util def generate(env): """Add Builders and construction variables for m4 to an Environment.""" M4Action = SCons.Action.Action('$M4COM', '$M4COMSTR') bld = SCons.Builder.Builder(action = M4Action, src_suffix = '.m4') env['BUILDERS']['M4'] = bld # .m4 files might include other files, and it would be pretty hard # to write a scanner for it, so let's just cd to the dir of the m4 # file and run from there. # The src_suffix setup is like so: file.c.m4 -> file.c, # file.cpp.m4 -> file.cpp etc. env['M4'] = 'm4' env['M4FLAGS'] = SCons.Util.CLVar('-E') env['M4COM'] = 'cd ${SOURCE.rsrcdir} && $M4 $M4FLAGS < ${SOURCE.file} > ${TARGET.abspath}' def exists(env): return env.Detect('m4') # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
stonekyx/binary
vendor/scons-local-2.3.4/SCons/Tool/m4.py
Python
gpl-3.0
2,309
0.003898
# Version: 6.3 # Architecture: i386 # CompanyName: Microsoft Corporation # FileDescription: NT Layer DLL # FileVersion: 6.3.9600.17031 (winblue_gdr.140221-1952) # InternalName: ntdll.dll # LegalCopyright: Microsoft Corporation. All rights reserved. # OriginalFilename: ntdll.dll # ProductName: Microsoft Windows Operating System # ProductVersion: 6.3.9600.17031 # Translation: 78644233 from past.builtins import xrange import vstruct from vstruct.primitives import * EXQUEUEINDEX = v_enum() EXQUEUEINDEX.ExPoolUntrusted = 0 EXQUEUEINDEX.ExPoolTrusted = 1 EXQUEUEINDEX.ExPoolMax = 8 KPROCESS_STATE = v_enum() KPROCESS_STATE.ProcessInMemory = 0 KPROCESS_STATE.ProcessOutOfMemory = 1 KPROCESS_STATE.ProcessInTransition = 2 KPROCESS_STATE.ProcessOutTransition = 3 KPROCESS_STATE.ProcessInSwap = 4 KPROCESS_STATE.ProcessOutSwap = 5 KPROCESS_STATE.ProcessAllSwapStates = 6 EX_GEN_RANDOM_DOMAIN = v_enum() EX_GEN_RANDOM_DOMAIN.ExGenRandomDomainKernel = 0 EX_GEN_RANDOM_DOMAIN.ExGenRandomDomainFirst = 0 EX_GEN_RANDOM_DOMAIN.ExGenRandomDomainUserVisible = 1 EX_GEN_RANDOM_DOMAIN.ExGenRandomDomainMax = 2 WHEA_ERROR_SEVERITY = v_enum() WHEA_ERROR_SEVERITY.WheaErrSevRecoverable = 0 WHEA_ERROR_SEVERITY.WheaErrSevFatal = 1 WHEA_ERROR_SEVERITY.WheaErrSevCorrected = 2 WHEA_ERROR_SEVERITY.WheaErrSevInformational = 3 DEVICE_WAKE_DEPTH = v_enum() DEVICE_WAKE_DEPTH.DeviceWakeDepthNotWakeable = 0 DEVICE_WAKE_DEPTH.DeviceWakeDepthD0 = 1 DEVICE_WAKE_DEPTH.DeviceWakeDepthD1 = 2 DEVICE_WAKE_DEPTH.DeviceWakeDepthD2 = 3 DEVICE_WAKE_DEPTH.DeviceWakeDepthD3hot = 4 DEVICE_WAKE_DEPTH.DeviceWakeDepthD3cold = 5 DEVICE_WAKE_DEPTH.DeviceWakeDepthMaximum = 6 WOW64_SHARED_INFORMATION = v_enum() WOW64_SHARED_INFORMATION.SharedNtdll32LdrInitializeThunk = 0 WOW64_SHARED_INFORMATION.SharedNtdll32KiUserExceptionDispatcher = 1 WOW64_SHARED_INFORMATION.SharedNtdll32KiUserApcDispatcher = 2 WOW64_SHARED_INFORMATION.SharedNtdll32KiUserCallbackDispatcher = 3 WOW64_SHARED_INFORMATION.SharedNtdll32ExpInterlockedPopEntrySListFault = 4 WOW64_SHARED_INFORMATION.SharedNtdll32ExpInterlockedPopEntrySListResume = 5 WOW64_SHARED_INFORMATION.SharedNtdll32ExpInterlockedPopEntrySListEnd = 6 WOW64_SHARED_INFORMATION.SharedNtdll32RtlUserThreadStart = 7 WOW64_SHARED_INFORMATION.SharedNtdll32pQueryProcessDebugInformationRemote = 8 WOW64_SHARED_INFORMATION.SharedNtdll32BaseAddress = 9 WOW64_SHARED_INFORMATION.SharedNtdll32LdrSystemDllInitBlock = 10 WOW64_SHARED_INFORMATION.Wow64SharedPageEntriesCount = 11 REG_NOTIFY_CLASS = v_enum() REG_NOTIFY_CLASS.RegNtDeleteKey = 0 REG_NOTIFY_CLASS.RegNtPreDeleteKey = 0 REG_NOTIFY_CLASS.RegNtSetValueKey = 1 REG_NOTIFY_CLASS.RegNtPreSetValueKey = 1 REG_NOTIFY_CLASS.RegNtDeleteValueKey = 2 REG_NOTIFY_CLASS.RegNtPreDeleteValueKey = 2 REG_NOTIFY_CLASS.RegNtSetInformationKey = 3 REG_NOTIFY_CLASS.RegNtPreSetInformationKey = 3 REG_NOTIFY_CLASS.RegNtRenameKey = 4 REG_NOTIFY_CLASS.RegNtPreRenameKey = 4 REG_NOTIFY_CLASS.RegNtEnumerateKey = 5 REG_NOTIFY_CLASS.RegNtPreEnumerateKey = 5 REG_NOTIFY_CLASS.RegNtEnumerateValueKey = 6 REG_NOTIFY_CLASS.RegNtPreEnumerateValueKey = 6 REG_NOTIFY_CLASS.RegNtQueryKey = 7 REG_NOTIFY_CLASS.RegNtPreQueryKey = 7 REG_NOTIFY_CLASS.RegNtQueryValueKey = 8 REG_NOTIFY_CLASS.RegNtPreQueryValueKey = 8 REG_NOTIFY_CLASS.RegNtQueryMultipleValueKey = 9 REG_NOTIFY_CLASS.RegNtPreQueryMultipleValueKey = 9 REG_NOTIFY_CLASS.RegNtPreCreateKey = 10 REG_NOTIFY_CLASS.RegNtPostCreateKey = 11 REG_NOTIFY_CLASS.RegNtPreOpenKey = 12 REG_NOTIFY_CLASS.RegNtPostOpenKey = 13 REG_NOTIFY_CLASS.RegNtKeyHandleClose = 14 REG_NOTIFY_CLASS.RegNtPreKeyHandleClose = 14 REG_NOTIFY_CLASS.RegNtPostDeleteKey = 15 REG_NOTIFY_CLASS.RegNtPostSetValueKey = 16 REG_NOTIFY_CLASS.RegNtPostDeleteValueKey = 17 REG_NOTIFY_CLASS.RegNtPostSetInformationKey = 18 REG_NOTIFY_CLASS.RegNtPostRenameKey = 19 REG_NOTIFY_CLASS.RegNtPostEnumerateKey = 20 REG_NOTIFY_CLASS.RegNtPostEnumerateValueKey = 21 REG_NOTIFY_CLASS.RegNtPostQueryKey = 22 REG_NOTIFY_CLASS.RegNtPostQueryValueKey = 23 REG_NOTIFY_CLASS.RegNtPostQueryMultipleValueKey = 24 REG_NOTIFY_CLASS.RegNtPostKeyHandleClose = 25 REG_NOTIFY_CLASS.RegNtPreCreateKeyEx = 26 REG_NOTIFY_CLASS.RegNtPostCreateKeyEx = 27 REG_NOTIFY_CLASS.RegNtPreOpenKeyEx = 28 REG_NOTIFY_CLASS.RegNtPostOpenKeyEx = 29 REG_NOTIFY_CLASS.RegNtPreFlushKey = 30 REG_NOTIFY_CLASS.RegNtPostFlushKey = 31 REG_NOTIFY_CLASS.RegNtPreLoadKey = 32 REG_NOTIFY_CLASS.RegNtPostLoadKey = 33 REG_NOTIFY_CLASS.RegNtPreUnLoadKey = 34 REG_NOTIFY_CLASS.RegNtPostUnLoadKey = 35 REG_NOTIFY_CLASS.RegNtPreQueryKeySecurity = 36 REG_NOTIFY_CLASS.RegNtPostQueryKeySecurity = 37 REG_NOTIFY_CLASS.RegNtPreSetKeySecurity = 38 REG_NOTIFY_CLASS.RegNtPostSetKeySecurity = 39 REG_NOTIFY_CLASS.RegNtCallbackObjectContextCleanup = 40 REG_NOTIFY_CLASS.RegNtPreRestoreKey = 41 REG_NOTIFY_CLASS.RegNtPostRestoreKey = 42 REG_NOTIFY_CLASS.RegNtPreSaveKey = 43 REG_NOTIFY_CLASS.RegNtPostSaveKey = 44 REG_NOTIFY_CLASS.RegNtPreReplaceKey = 45 REG_NOTIFY_CLASS.RegNtPostReplaceKey = 46 REG_NOTIFY_CLASS.MaxRegNtNotifyClass = 47 DEVICE_RELATION_TYPE = v_enum() DEVICE_RELATION_TYPE.BusRelations = 0 DEVICE_RELATION_TYPE.EjectionRelations = 1 DEVICE_RELATION_TYPE.PowerRelations = 2 DEVICE_RELATION_TYPE.RemovalRelations = 3 DEVICE_RELATION_TYPE.TargetDeviceRelation = 4 DEVICE_RELATION_TYPE.SingleBusRelations = 5 DEVICE_RELATION_TYPE.TransportRelations = 6 SE_WS_APPX_SIGNATURE_ORIGIN = v_enum() SE_WS_APPX_SIGNATURE_ORIGIN.SE_WS_APPX_SIGNATURE_ORIGIN_NOT_VALIDATED = 0 SE_WS_APPX_SIGNATURE_ORIGIN.SE_WS_APPX_SIGNATURE_ORIGIN_UNKNOWN = 1 SE_WS_APPX_SIGNATURE_ORIGIN.SE_WS_APPX_SIGNATURE_ORIGIN_APPSTORE = 2 SE_WS_APPX_SIGNATURE_ORIGIN.SE_WS_APPX_SIGNATURE_ORIGIN_WINDOWS = 3 FILE_INFORMATION_CLASS = v_enum() FILE_INFORMATION_CLASS.FileDirectoryInformation = 1 FILE_INFORMATION_CLASS.FileFullDirectoryInformation = 2 FILE_INFORMATION_CLASS.FileBothDirectoryInformation = 3 FILE_INFORMATION_CLASS.FileBasicInformation = 4 FILE_INFORMATION_CLASS.FileStandardInformation = 5 FILE_INFORMATION_CLASS.FileInternalInformation = 6 FILE_INFORMATION_CLASS.FileEaInformation = 7 FILE_INFORMATION_CLASS.FileAccessInformation = 8 FILE_INFORMATION_CLASS.FileNameInformation = 9 FILE_INFORMATION_CLASS.FileRenameInformation = 10 FILE_INFORMATION_CLASS.FileLinkInformation = 11 FILE_INFORMATION_CLASS.FileNamesInformation = 12 FILE_INFORMATION_CLASS.FileDispositionInformation = 13 FILE_INFORMATION_CLASS.FilePositionInformation = 14 FILE_INFORMATION_CLASS.FileFullEaInformation = 15 FILE_INFORMATION_CLASS.FileModeInformation = 16 FILE_INFORMATION_CLASS.FileAlignmentInformation = 17 FILE_INFORMATION_CLASS.FileAllInformation = 18 FILE_INFORMATION_CLASS.FileAllocationInformation = 19 FILE_INFORMATION_CLASS.FileEndOfFileInformation = 20 FILE_INFORMATION_CLASS.FileAlternateNameInformation = 21 FILE_INFORMATION_CLASS.FileStreamInformation = 22 FILE_INFORMATION_CLASS.FilePipeInformation = 23 FILE_INFORMATION_CLASS.FilePipeLocalInformation = 24 FILE_INFORMATION_CLASS.FilePipeRemoteInformation = 25 FILE_INFORMATION_CLASS.FileMailslotQueryInformation = 26 FILE_INFORMATION_CLASS.FileMailslotSetInformation = 27 FILE_INFORMATION_CLASS.FileCompressionInformation = 28 FILE_INFORMATION_CLASS.FileObjectIdInformation = 29 FILE_INFORMATION_CLASS.FileCompletionInformation = 30 FILE_INFORMATION_CLASS.FileMoveClusterInformation = 31 FILE_INFORMATION_CLASS.FileQuotaInformation = 32 FILE_INFORMATION_CLASS.FileReparsePointInformation = 33 FILE_INFORMATION_CLASS.FileNetworkOpenInformation = 34 FILE_INFORMATION_CLASS.FileAttributeTagInformation = 35 FILE_INFORMATION_CLASS.FileTrackingInformation = 36 FILE_INFORMATION_CLASS.FileIdBothDirectoryInformation = 37 FILE_INFORMATION_CLASS.FileIdFullDirectoryInformation = 38 FILE_INFORMATION_CLASS.FileValidDataLengthInformation = 39 FILE_INFORMATION_CLASS.FileShortNameInformation = 40 FILE_INFORMATION_CLASS.FileIoCompletionNotificationInformation = 41 FILE_INFORMATION_CLASS.FileIoStatusBlockRangeInformation = 42 FILE_INFORMATION_CLASS.FileIoPriorityHintInformation = 43 FILE_INFORMATION_CLASS.FileSfioReserveInformation = 44 FILE_INFORMATION_CLASS.FileSfioVolumeInformation = 45 FILE_INFORMATION_CLASS.FileHardLinkInformation = 46 FILE_INFORMATION_CLASS.FileProcessIdsUsingFileInformation = 47 FILE_INFORMATION_CLASS.FileNormalizedNameInformation = 48 FILE_INFORMATION_CLASS.FileNetworkPhysicalNameInformation = 49 FILE_INFORMATION_CLASS.FileIdGlobalTxDirectoryInformation = 50 FILE_INFORMATION_CLASS.FileIsRemoteDeviceInformation = 51 FILE_INFORMATION_CLASS.FileUnusedInformation = 52 FILE_INFORMATION_CLASS.FileNumaNodeInformation = 53 FILE_INFORMATION_CLASS.FileStandardLinkInformation = 54 FILE_INFORMATION_CLASS.FileRemoteProtocolInformation = 55 FILE_INFORMATION_CLASS.FileRenameInformationBypassAccessCheck = 56 FILE_INFORMATION_CLASS.FileLinkInformationBypassAccessCheck = 57 FILE_INFORMATION_CLASS.FileVolumeNameInformation = 58 FILE_INFORMATION_CLASS.FileIdInformation = 59 FILE_INFORMATION_CLASS.FileIdExtdDirectoryInformation = 60 FILE_INFORMATION_CLASS.FileReplaceCompletionInformation = 61 FILE_INFORMATION_CLASS.FileHardLinkFullIdInformation = 62 FILE_INFORMATION_CLASS.FileIdExtdBothDirectoryInformation = 63 FILE_INFORMATION_CLASS.FileMaximumInformation = 64 ALTERNATIVE_ARCHITECTURE_TYPE = v_enum() ALTERNATIVE_ARCHITECTURE_TYPE.StandardDesign = 0 ALTERNATIVE_ARCHITECTURE_TYPE.NEC98x86 = 1 ALTERNATIVE_ARCHITECTURE_TYPE.EndAlternatives = 2 PPM_IDLE_BUCKET_TIME_TYPE = v_enum() PPM_IDLE_BUCKET_TIME_TYPE.PpmIdleBucketTimeInQpc = 0 PPM_IDLE_BUCKET_TIME_TYPE.PpmIdleBucketTimeIn100ns = 1 PPM_IDLE_BUCKET_TIME_TYPE.PpmIdleBucketTimeMaximum = 2 KOBJECTS = v_enum() KOBJECTS.EventNotificationObject = 0 KOBJECTS.EventSynchronizationObject = 1 KOBJECTS.MutantObject = 2 KOBJECTS.ProcessObject = 3 KOBJECTS.QueueObject = 4 KOBJECTS.SemaphoreObject = 5 KOBJECTS.ThreadObject = 6 KOBJECTS.GateObject = 7 KOBJECTS.TimerNotificationObject = 8 KOBJECTS.TimerSynchronizationObject = 9 KOBJECTS.Spare2Object = 10 KOBJECTS.Spare3Object = 11 KOBJECTS.Spare4Object = 12 KOBJECTS.Spare5Object = 13 KOBJECTS.Spare6Object = 14 KOBJECTS.Spare7Object = 15 KOBJECTS.Spare8Object = 16 KOBJECTS.ProfileCallbackObject = 17 KOBJECTS.ApcObject = 18 KOBJECTS.DpcObject = 19 KOBJECTS.DeviceQueueObject = 20 KOBJECTS.PriQueueObject = 21 KOBJECTS.InterruptObject = 22 KOBJECTS.ProfileObject = 23 KOBJECTS.Timer2NotificationObject = 24 KOBJECTS.Timer2SynchronizationObject = 25 KOBJECTS.ThreadedDpcObject = 26 KOBJECTS.MaximumKernelObject = 27 NT_PRODUCT_TYPE = v_enum() NT_PRODUCT_TYPE.NtProductWinNt = 1 NT_PRODUCT_TYPE.NtProductLanManNt = 2 NT_PRODUCT_TYPE.NtProductServer = 3 DEVICE_POWER_STATE = v_enum() DEVICE_POWER_STATE.PowerDeviceUnspecified = 0 DEVICE_POWER_STATE.PowerDeviceD0 = 1 DEVICE_POWER_STATE.PowerDeviceD1 = 2 DEVICE_POWER_STATE.PowerDeviceD2 = 3 DEVICE_POWER_STATE.PowerDeviceD3 = 4 DEVICE_POWER_STATE.PowerDeviceMaximum = 5 WHEA_ERROR_SOURCE_TYPE = v_enum() WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeMCE = 0 WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeCMC = 1 WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeCPE = 2 WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeNMI = 3 WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypePCIe = 4 WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeGeneric = 5 WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeINIT = 6 WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeBOOT = 7 WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeSCIGeneric = 8 WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeIPFMCA = 9 WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeIPFCMC = 10 WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeIPFCPE = 11 WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeMax = 12 PERFINFO_MM_STAT = v_enum() PERFINFO_MM_STAT.PerfInfoMMStatNotUsed = 0 PERFINFO_MM_STAT.PerfInfoMMStatAggregatePageCombine = 1 PERFINFO_MM_STAT.PerfInfoMMStatIterationPageCombine = 2 PERFINFO_MM_STAT.PerfInfoMMStatMax = 3 RTL_GENERIC_COMPARE_RESULTS = v_enum() RTL_GENERIC_COMPARE_RESULTS.GenericLessThan = 0 RTL_GENERIC_COMPARE_RESULTS.GenericGreaterThan = 1 RTL_GENERIC_COMPARE_RESULTS.GenericEqual = 2 KWAIT_BLOCK_STATE = v_enum() KWAIT_BLOCK_STATE.WaitBlockBypassStart = 0 KWAIT_BLOCK_STATE.WaitBlockBypassComplete = 1 KWAIT_BLOCK_STATE.WaitBlockSuspendBypassStart = 2 KWAIT_BLOCK_STATE.WaitBlockSuspendBypassComplete = 3 KWAIT_BLOCK_STATE.WaitBlockActive = 4 KWAIT_BLOCK_STATE.WaitBlockInactive = 5 KWAIT_BLOCK_STATE.WaitBlockSuspended = 6 KWAIT_BLOCK_STATE.WaitBlockAllStates = 7 TRACE_INFORMATION_CLASS = v_enum() TRACE_INFORMATION_CLASS.TraceIdClass = 0 TRACE_INFORMATION_CLASS.TraceHandleClass = 1 TRACE_INFORMATION_CLASS.TraceEnableFlagsClass = 2 TRACE_INFORMATION_CLASS.TraceEnableLevelClass = 3 TRACE_INFORMATION_CLASS.GlobalLoggerHandleClass = 4 TRACE_INFORMATION_CLASS.EventLoggerHandleClass = 5 TRACE_INFORMATION_CLASS.AllLoggerHandlesClass = 6 TRACE_INFORMATION_CLASS.TraceHandleByNameClass = 7 TRACE_INFORMATION_CLASS.LoggerEventsLostClass = 8 TRACE_INFORMATION_CLASS.TraceSessionSettingsClass = 9 TRACE_INFORMATION_CLASS.LoggerEventsLoggedClass = 10 TRACE_INFORMATION_CLASS.DiskIoNotifyRoutinesClass = 11 TRACE_INFORMATION_CLASS.TraceInformationClassReserved1 = 12 TRACE_INFORMATION_CLASS.AllPossibleNotifyRoutinesClass = 12 TRACE_INFORMATION_CLASS.FltIoNotifyRoutinesClass = 13 TRACE_INFORMATION_CLASS.TraceInformationClassReserved2 = 14 TRACE_INFORMATION_CLASS.WdfNotifyRoutinesClass = 15 TRACE_INFORMATION_CLASS.MaxTraceInformationClass = 16 KTIMER2_TYPE = v_enum() KTIMER2_TYPE.KTimer2TypeMin = 0 KTIMER2_TYPE.KTimer2Plain = 0 KTIMER2_TYPE.KTimer2IdleResilient = 1 KTIMER2_TYPE.KTimer2HighResolution = 2 KTIMER2_TYPE.KTimer2NoWake = 3 KTIMER2_TYPE.KTimer2NoWakeFinite = 4 KTIMER2_TYPE.KTimer2TypeMax = 5 PF_FILE_ACCESS_TYPE = v_enum() PF_FILE_ACCESS_TYPE.PfFileAccessTypeRead = 0 PF_FILE_ACCESS_TYPE.PfFileAccessTypeWrite = 1 PF_FILE_ACCESS_TYPE.PfFileAccessTypeMax = 2 PROCESSOR_CACHE_TYPE = v_enum() PROCESSOR_CACHE_TYPE.CacheUnified = 0 PROCESSOR_CACHE_TYPE.CacheInstruction = 1 PROCESSOR_CACHE_TYPE.CacheData = 2 PROCESSOR_CACHE_TYPE.CacheTrace = 3 USER_ACTIVITY_PRESENCE = v_enum() USER_ACTIVITY_PRESENCE.PowerUserPresent = 0 USER_ACTIVITY_PRESENCE.PowerUserNotPresent = 1 USER_ACTIVITY_PRESENCE.PowerUserInactive = 2 USER_ACTIVITY_PRESENCE.PowerUserMaximum = 3 USER_ACTIVITY_PRESENCE.PowerUserInvalid = 3 MCA_EXCEPTION_TYPE = v_enum() MCA_EXCEPTION_TYPE.HAL_MCE_RECORD = 0 MCA_EXCEPTION_TYPE.HAL_MCA_RECORD = 1 EVENT_TYPE = v_enum() EVENT_TYPE.NotificationEvent = 0 EVENT_TYPE.SynchronizationEvent = 1 KSPIN_LOCK_QUEUE_NUMBER = v_enum() KSPIN_LOCK_QUEUE_NUMBER.LockQueueUnusedSpare0 = 0 KSPIN_LOCK_QUEUE_NUMBER.LockQueueExpansionLock = 1 KSPIN_LOCK_QUEUE_NUMBER.LockQueueUnusedSpare2 = 2 KSPIN_LOCK_QUEUE_NUMBER.LockQueueUnusedSpare3 = 3 KSPIN_LOCK_QUEUE_NUMBER.LockQueueVacbLock = 4 KSPIN_LOCK_QUEUE_NUMBER.LockQueueMasterLock = 5 KSPIN_LOCK_QUEUE_NUMBER.LockQueueNonPagedPoolLock = 6 KSPIN_LOCK_QUEUE_NUMBER.LockQueueIoCancelLock = 7 KSPIN_LOCK_QUEUE_NUMBER.LockQueueWorkQueueLock = 8 KSPIN_LOCK_QUEUE_NUMBER.LockQueueIoVpbLock = 9 KSPIN_LOCK_QUEUE_NUMBER.LockQueueIoDatabaseLock = 10 KSPIN_LOCK_QUEUE_NUMBER.LockQueueIoCompletionLock = 11 KSPIN_LOCK_QUEUE_NUMBER.LockQueueNtfsStructLock = 12 KSPIN_LOCK_QUEUE_NUMBER.LockQueueAfdWorkQueueLock = 13 KSPIN_LOCK_QUEUE_NUMBER.LockQueueBcbLock = 14 KSPIN_LOCK_QUEUE_NUMBER.LockQueueUnusedSpare15 = 15 KSPIN_LOCK_QUEUE_NUMBER.LockQueueUnusedSpare16 = 16 KSPIN_LOCK_QUEUE_NUMBER.LockQueueMaximumLock = 17 TP_CALLBACK_PRIORITY = v_enum() TP_CALLBACK_PRIORITY.TP_CALLBACK_PRIORITY_HIGH = 0 TP_CALLBACK_PRIORITY.TP_CALLBACK_PRIORITY_NORMAL = 1 TP_CALLBACK_PRIORITY.TP_CALLBACK_PRIORITY_LOW = 2 TP_CALLBACK_PRIORITY.TP_CALLBACK_PRIORITY_INVALID = 3 TP_CALLBACK_PRIORITY.TP_CALLBACK_PRIORITY_COUNT = 3 FSINFOCLASS = v_enum() FSINFOCLASS.FileFsVolumeInformation = 1 FSINFOCLASS.FileFsLabelInformation = 2 FSINFOCLASS.FileFsSizeInformation = 3 FSINFOCLASS.FileFsDeviceInformation = 4 FSINFOCLASS.FileFsAttributeInformation = 5 FSINFOCLASS.FileFsControlInformation = 6 FSINFOCLASS.FileFsFullSizeInformation = 7 FSINFOCLASS.FileFsObjectIdInformation = 8 FSINFOCLASS.FileFsDriverPathInformation = 9 FSINFOCLASS.FileFsVolumeFlagsInformation = 10 FSINFOCLASS.FileFsSectorSizeInformation = 11 FSINFOCLASS.FileFsDataCopyInformation = 12 FSINFOCLASS.FileFsMaximumInformation = 13 INTERRUPT_CONNECTION_TYPE = v_enum() INTERRUPT_CONNECTION_TYPE.InterruptTypeControllerInput = 0 INTERRUPT_CONNECTION_TYPE.InterruptTypeXapicMessage = 1 INTERRUPT_CONNECTION_TYPE.InterruptTypeHypertransport = 2 INTERRUPT_CONNECTION_TYPE.InterruptTypeMessageRequest = 3 WORKING_SET_TYPE = v_enum() WORKING_SET_TYPE.WorkingSetTypeUser = 0 WORKING_SET_TYPE.WorkingSetTypeSession = 1 WORKING_SET_TYPE.WorkingSetTypeSystemTypes = 2 WORKING_SET_TYPE.WorkingSetTypeSystemCache = 2 WORKING_SET_TYPE.WorkingSetTypePagedPool = 3 WORKING_SET_TYPE.WorkingSetTypeSystemPtes = 4 WORKING_SET_TYPE.WorkingSetTypeMaximum = 5 POOL_TYPE = v_enum() POOL_TYPE.NonPagedPool = 0 POOL_TYPE.NonPagedPoolExecute = 0 POOL_TYPE.PagedPool = 1 POOL_TYPE.NonPagedPoolMustSucceed = 2 POOL_TYPE.DontUseThisType = 3 POOL_TYPE.NonPagedPoolCacheAligned = 4 POOL_TYPE.PagedPoolCacheAligned = 5 POOL_TYPE.NonPagedPoolCacheAlignedMustS = 6 POOL_TYPE.MaxPoolType = 7 POOL_TYPE.NonPagedPoolBase = 0 POOL_TYPE.NonPagedPoolBaseMustSucceed = 2 POOL_TYPE.NonPagedPoolBaseCacheAligned = 4 POOL_TYPE.NonPagedPoolBaseCacheAlignedMustS = 6 POOL_TYPE.NonPagedPoolSession = 32 POOL_TYPE.PagedPoolSession = 33 POOL_TYPE.NonPagedPoolMustSucceedSession = 34 POOL_TYPE.DontUseThisTypeSession = 35 POOL_TYPE.NonPagedPoolCacheAlignedSession = 36 POOL_TYPE.PagedPoolCacheAlignedSession = 37 POOL_TYPE.NonPagedPoolCacheAlignedMustSSession = 38 POOL_TYPE.NonPagedPoolNx = 512 POOL_TYPE.NonPagedPoolNxCacheAligned = 516 POOL_TYPE.NonPagedPoolSessionNx = 544 IO_PRIORITY_HINT = v_enum() IO_PRIORITY_HINT.IoPriorityVeryLow = 0 IO_PRIORITY_HINT.IoPriorityLow = 1 IO_PRIORITY_HINT.IoPriorityNormal = 2 IO_PRIORITY_HINT.IoPriorityHigh = 3 IO_PRIORITY_HINT.IoPriorityCritical = 4 IO_PRIORITY_HINT.MaxIoPriorityTypes = 5 MODE = v_enum() MODE.KernelMode = 0 MODE.UserMode = 1 MODE.MaximumMode = 2 FS_FILTER_SECTION_SYNC_TYPE = v_enum() FS_FILTER_SECTION_SYNC_TYPE.SyncTypeOther = 0 FS_FILTER_SECTION_SYNC_TYPE.SyncTypeCreateSection = 1 OB_OPEN_REASON = v_enum() OB_OPEN_REASON.ObCreateHandle = 0 OB_OPEN_REASON.ObOpenHandle = 1 OB_OPEN_REASON.ObDuplicateHandle = 2 OB_OPEN_REASON.ObInheritHandle = 3 OB_OPEN_REASON.ObMaxOpenReason = 4 PERFINFO_KERNELMEMORY_USAGE_TYPE = v_enum() PERFINFO_KERNELMEMORY_USAGE_TYPE.PerfInfoMemUsagePfnMetadata = 0 PERFINFO_KERNELMEMORY_USAGE_TYPE.PerfInfoMemUsageMax = 1 DEVICE_TEXT_TYPE = v_enum() DEVICE_TEXT_TYPE.DeviceTextDescription = 0 DEVICE_TEXT_TYPE.DeviceTextLocationInformation = 1 POWER_STATE_TYPE = v_enum() POWER_STATE_TYPE.SystemPowerState = 0 POWER_STATE_TYPE.DevicePowerState = 1 PS_PROTECTED_SIGNER = v_enum() PS_PROTECTED_SIGNER.PsProtectedSignerNone = 0 PS_PROTECTED_SIGNER.PsProtectedSignerAuthenticode = 1 PS_PROTECTED_SIGNER.PsProtectedSignerCodeGen = 2 PS_PROTECTED_SIGNER.PsProtectedSignerAntimalware = 3 PS_PROTECTED_SIGNER.PsProtectedSignerLsa = 4 PS_PROTECTED_SIGNER.PsProtectedSignerWindows = 5 PS_PROTECTED_SIGNER.PsProtectedSignerWinTcb = 6 PS_PROTECTED_SIGNER.PsProtectedSignerMax = 7 CPU_VENDORS = v_enum() CPU_VENDORS.CPU_NONE = 0 CPU_VENDORS.CPU_INTEL = 1 CPU_VENDORS.CPU_AMD = 2 CPU_VENDORS.CPU_CYRIX = 3 CPU_VENDORS.CPU_TRANSMETA = 4 CPU_VENDORS.CPU_VIA = 5 CPU_VENDORS.CPU_CENTAUR = 5 CPU_VENDORS.CPU_RISE = 6 CPU_VENDORS.CPU_UNKNOWN = 7 IRQ_PRIORITY = v_enum() IRQ_PRIORITY.IrqPriorityUndefined = 0 IRQ_PRIORITY.IrqPriorityLow = 1 IRQ_PRIORITY.IrqPriorityNormal = 2 IRQ_PRIORITY.IrqPriorityHigh = 3 KWAIT_STATE = v_enum() KWAIT_STATE.WaitInProgress = 0 KWAIT_STATE.WaitCommitted = 1 KWAIT_STATE.WaitAborted = 2 KWAIT_STATE.WaitSuspendInProgress = 3 KWAIT_STATE.WaitSuspended = 4 KWAIT_STATE.WaitResumeInProgress = 5 KWAIT_STATE.WaitFirstSuspendState = 3 KWAIT_STATE.WaitLastSuspendState = 5 KWAIT_STATE.MaximumWaitState = 6 SYSTEM_POWER_STATE = v_enum() SYSTEM_POWER_STATE.PowerSystemUnspecified = 0 SYSTEM_POWER_STATE.PowerSystemWorking = 1 SYSTEM_POWER_STATE.PowerSystemSleeping1 = 2 SYSTEM_POWER_STATE.PowerSystemSleeping2 = 3 SYSTEM_POWER_STATE.PowerSystemSleeping3 = 4 SYSTEM_POWER_STATE.PowerSystemHibernate = 5 SYSTEM_POWER_STATE.PowerSystemShutdown = 6 SYSTEM_POWER_STATE.PowerSystemMaximum = 7 LDR_DDAG_STATE = v_enum() LDR_DDAG_STATE.LdrModulesMerged = -5 LDR_DDAG_STATE.LdrModulesInitError = -4 LDR_DDAG_STATE.LdrModulesSnapError = -3 LDR_DDAG_STATE.LdrModulesUnloaded = -2 LDR_DDAG_STATE.LdrModulesUnloading = -1 LDR_DDAG_STATE.LdrModulesPlaceHolder = 0 LDR_DDAG_STATE.LdrModulesMapping = 1 LDR_DDAG_STATE.LdrModulesMapped = 2 LDR_DDAG_STATE.LdrModulesWaitingForDependencies = 3 LDR_DDAG_STATE.LdrModulesSnapping = 4 LDR_DDAG_STATE.LdrModulesSnapped = 5 LDR_DDAG_STATE.LdrModulesCondensed = 6 LDR_DDAG_STATE.LdrModulesReadyToInit = 7 LDR_DDAG_STATE.LdrModulesInitializing = 8 LDR_DDAG_STATE.LdrModulesReadyToRun = 9 EX_BALANCE_OBJECT = v_enum() EX_BALANCE_OBJECT.ExTimerExpiration = 0 EX_BALANCE_OBJECT.ExThreadSetManagerEvent = 1 EX_BALANCE_OBJECT.ExThreadReaperEvent = 2 EX_BALANCE_OBJECT.ExMaximumBalanceObject = 3 PS_WAKE_REASON = v_enum() PS_WAKE_REASON.PsWakeReasonUser = 0 PS_WAKE_REASON.PsWakeReasonExecutionRequired = 1 PS_WAKE_REASON.PsWakeReasonKernel = 2 PS_WAKE_REASON.PsWakeReasonInstrumentation = 3 PS_WAKE_REASON.PsWakeReasonPreserveProcess = 4 PS_WAKE_REASON.PsMaxWakeReasons = 5 PROCESS_SECTION_TYPE = v_enum() PROCESS_SECTION_TYPE.ProcessSectionData = 0 PROCESS_SECTION_TYPE.ProcessSectionImage = 1 PROCESS_SECTION_TYPE.ProcessSectionImageNx = 2 PROCESS_SECTION_TYPE.ProcessSectionPagefileBacked = 3 PROCESS_SECTION_TYPE.ProcessSectionMax = 4 KINTERRUPT_POLARITY = v_enum() KINTERRUPT_POLARITY.InterruptPolarityUnknown = 0 KINTERRUPT_POLARITY.InterruptActiveHigh = 1 KINTERRUPT_POLARITY.InterruptRisingEdge = 1 KINTERRUPT_POLARITY.InterruptActiveLow = 2 KINTERRUPT_POLARITY.InterruptFallingEdge = 2 KINTERRUPT_POLARITY.InterruptActiveBoth = 3 KINTERRUPT_POLARITY.InterruptActiveBothTriggerLow = 3 KINTERRUPT_POLARITY.InterruptActiveBothTriggerHigh = 4 IO_ALLOCATION_ACTION = v_enum() IO_ALLOCATION_ACTION.KeepObject = 1 IO_ALLOCATION_ACTION.DeallocateObject = 2 IO_ALLOCATION_ACTION.DeallocateObjectKeepRegisters = 3 EXCEPTION_DISPOSITION = v_enum() EXCEPTION_DISPOSITION.ExceptionContinueExecution = 0 EXCEPTION_DISPOSITION.ExceptionContinueSearch = 1 EXCEPTION_DISPOSITION.ExceptionNestedException = 2 EXCEPTION_DISPOSITION.ExceptionCollidedUnwind = 3 PROC_HYPERVISOR_STATE = v_enum() PROC_HYPERVISOR_STATE.ProcHypervisorNone = 0 PROC_HYPERVISOR_STATE.ProcHypervisorPresent = 1 PROC_HYPERVISOR_STATE.ProcHypervisorPower = 2 MM_PAGE_ACCESS_TYPE = v_enum() MM_PAGE_ACCESS_TYPE.MmPteAccessType = 0 MM_PAGE_ACCESS_TYPE.MmCcReadAheadType = 1 MM_PAGE_ACCESS_TYPE.MmPfnRepurposeType = 2 MM_PAGE_ACCESS_TYPE.MmMaximumPageAccessType = 3 LDR_DLL_LOAD_REASON = v_enum() LDR_DLL_LOAD_REASON.LoadReasonStaticDependency = 0 LDR_DLL_LOAD_REASON.LoadReasonStaticForwarderDependency = 1 LDR_DLL_LOAD_REASON.LoadReasonDynamicForwarderDependency = 2 LDR_DLL_LOAD_REASON.LoadReasonDelayloadDependency = 3 LDR_DLL_LOAD_REASON.LoadReasonDynamicLoad = 4 LDR_DLL_LOAD_REASON.LoadReasonAsImageLoad = 5 LDR_DLL_LOAD_REASON.LoadReasonAsDataLoad = 6 LDR_DLL_LOAD_REASON.LoadReasonUnknown = -1 SECURITY_OPERATION_CODE = v_enum() SECURITY_OPERATION_CODE.SetSecurityDescriptor = 0 SECURITY_OPERATION_CODE.QuerySecurityDescriptor = 1 SECURITY_OPERATION_CODE.DeleteSecurityDescriptor = 2 SECURITY_OPERATION_CODE.AssignSecurityDescriptor = 3 HEAP_LFH_LOCKMODE = v_enum() HEAP_LFH_LOCKMODE.HeapLockNotHeld = 0 HEAP_LFH_LOCKMODE.HeapLockShared = 1 HEAP_LFH_LOCKMODE.HeapLockExclusive = 2 PP_NPAGED_LOOKASIDE_NUMBER = v_enum() PP_NPAGED_LOOKASIDE_NUMBER.LookasideSmallIrpList = 0 PP_NPAGED_LOOKASIDE_NUMBER.LookasideMediumIrpList = 1 PP_NPAGED_LOOKASIDE_NUMBER.LookasideLargeIrpList = 2 PP_NPAGED_LOOKASIDE_NUMBER.LookasideMdlList = 3 PP_NPAGED_LOOKASIDE_NUMBER.LookasideCreateInfoList = 4 PP_NPAGED_LOOKASIDE_NUMBER.LookasideNameBufferList = 5 PP_NPAGED_LOOKASIDE_NUMBER.LookasideTwilightList = 6 PP_NPAGED_LOOKASIDE_NUMBER.LookasideCompletionList = 7 PP_NPAGED_LOOKASIDE_NUMBER.LookasideScratchBufferList = 8 PP_NPAGED_LOOKASIDE_NUMBER.LookasideMaximumList = 9 WHEA_ERROR_PACKET_DATA_FORMAT = v_enum() WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatIPFSalRecord = 0 WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatXPFMCA = 1 WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatMemory = 2 WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatPCIExpress = 3 WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatNMIPort = 4 WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatPCIXBus = 5 WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatPCIXDevice = 6 WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatGeneric = 7 WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatMax = 8 FS_FILTER_STREAM_FO_NOTIFICATION_TYPE = v_enum() FS_FILTER_STREAM_FO_NOTIFICATION_TYPE.NotifyTypeCreate = 0 FS_FILTER_STREAM_FO_NOTIFICATION_TYPE.NotifyTypeRetired = 1 DISPLAYCONFIG_SCANLINE_ORDERING = v_enum() DISPLAYCONFIG_SCANLINE_ORDERING.DISPLAYCONFIG_SCANLINE_ORDERING_UNSPECIFIED = 0 DISPLAYCONFIG_SCANLINE_ORDERING.DISPLAYCONFIG_SCANLINE_ORDERING_PROGRESSIVE = 1 DISPLAYCONFIG_SCANLINE_ORDERING.DISPLAYCONFIG_SCANLINE_ORDERING_INTERLACED = 2 DISPLAYCONFIG_SCANLINE_ORDERING.DISPLAYCONFIG_SCANLINE_ORDERING_INTERLACED_UPPERFIELDFIRST = 2 DISPLAYCONFIG_SCANLINE_ORDERING.DISPLAYCONFIG_SCANLINE_ORDERING_INTERLACED_LOWERFIELDFIRST = 3 DISPLAYCONFIG_SCANLINE_ORDERING.DISPLAYCONFIG_SCANLINE_ORDERING_FORCE_UINT32 = -1 PROCESS_VA_TYPE = v_enum() PROCESS_VA_TYPE.ProcessVAImage = 0 PROCESS_VA_TYPE.ProcessVASection = 1 PROCESS_VA_TYPE.ProcessVAPrivate = 2 PROCESS_VA_TYPE.ProcessVAMax = 3 SECURITY_IMPERSONATION_LEVEL = v_enum() SECURITY_IMPERSONATION_LEVEL.SecurityAnonymous = 0 SECURITY_IMPERSONATION_LEVEL.SecurityIdentification = 1 SECURITY_IMPERSONATION_LEVEL.SecurityImpersonation = 2 SECURITY_IMPERSONATION_LEVEL.SecurityDelegation = 3 DEVICE_USAGE_NOTIFICATION_TYPE = v_enum() DEVICE_USAGE_NOTIFICATION_TYPE.DeviceUsageTypeUndefined = 0 DEVICE_USAGE_NOTIFICATION_TYPE.DeviceUsageTypePaging = 1 DEVICE_USAGE_NOTIFICATION_TYPE.DeviceUsageTypeHibernation = 2 DEVICE_USAGE_NOTIFICATION_TYPE.DeviceUsageTypeDumpFile = 3 DEVICE_USAGE_NOTIFICATION_TYPE.DeviceUsageTypeBoot = 4 DEVICE_USAGE_NOTIFICATION_TYPE.DeviceUsageTypePostDisplay = 5 BUS_QUERY_ID_TYPE = v_enum() BUS_QUERY_ID_TYPE.BusQueryDeviceID = 0 BUS_QUERY_ID_TYPE.BusQueryHardwareIDs = 1 BUS_QUERY_ID_TYPE.BusQueryCompatibleIDs = 2 BUS_QUERY_ID_TYPE.BusQueryInstanceID = 3 BUS_QUERY_ID_TYPE.BusQueryDeviceSerialNumber = 4 BUS_QUERY_ID_TYPE.BusQueryContainerID = 5 INTERFACE_TYPE = v_enum() INTERFACE_TYPE.InterfaceTypeUndefined = -1 INTERFACE_TYPE.Internal = 0 INTERFACE_TYPE.Isa = 1 INTERFACE_TYPE.Eisa = 2 INTERFACE_TYPE.MicroChannel = 3 INTERFACE_TYPE.TurboChannel = 4 INTERFACE_TYPE.PCIBus = 5 INTERFACE_TYPE.VMEBus = 6 INTERFACE_TYPE.NuBus = 7 INTERFACE_TYPE.PCMCIABus = 8 INTERFACE_TYPE.CBus = 9 INTERFACE_TYPE.MPIBus = 10 INTERFACE_TYPE.MPSABus = 11 INTERFACE_TYPE.ProcessorInternal = 12 INTERFACE_TYPE.InternalPowerBus = 13 INTERFACE_TYPE.PNPISABus = 14 INTERFACE_TYPE.PNPBus = 15 INTERFACE_TYPE.Vmcs = 16 INTERFACE_TYPE.ACPIBus = 17 INTERFACE_TYPE.MaximumInterfaceType = 18 PS_RESOURCE_TYPE = v_enum() PS_RESOURCE_TYPE.PsResourceNonPagedPool = 0 PS_RESOURCE_TYPE.PsResourcePagedPool = 1 PS_RESOURCE_TYPE.PsResourcePageFile = 2 PS_RESOURCE_TYPE.PsResourceWorkingSet = 3 PS_RESOURCE_TYPE.PsResourceMax = 4 HEAP_FAILURE_TYPE = v_enum() HEAP_FAILURE_TYPE.heap_failure_internal = 0 HEAP_FAILURE_TYPE.heap_failure_unknown = 1 HEAP_FAILURE_TYPE.heap_failure_generic = 2 HEAP_FAILURE_TYPE.heap_failure_entry_corruption = 3 HEAP_FAILURE_TYPE.heap_failure_multiple_entries_corruption = 4 HEAP_FAILURE_TYPE.heap_failure_virtual_block_corruption = 5 HEAP_FAILURE_TYPE.heap_failure_buffer_overrun = 6 HEAP_FAILURE_TYPE.heap_failure_buffer_underrun = 7 HEAP_FAILURE_TYPE.heap_failure_block_not_busy = 8 HEAP_FAILURE_TYPE.heap_failure_invalid_argument = 9 HEAP_FAILURE_TYPE.heap_failure_usage_after_free = 10 HEAP_FAILURE_TYPE.heap_failure_cross_heap_operation = 11 HEAP_FAILURE_TYPE.heap_failure_freelists_corruption = 12 HEAP_FAILURE_TYPE.heap_failure_listentry_corruption = 13 HEAP_FAILURE_TYPE.heap_failure_lfh_bitmap_mismatch = 14 HEAP_FAILURE_TYPE.heap_failure_segment_lfh_bitmap_corruption = 15 HEAP_FAILURE_TYPE.heap_failure_segment_lfh_double_free = 16 WHEA_ERROR_TYPE = v_enum() WHEA_ERROR_TYPE.WheaErrTypeProcessor = 0 WHEA_ERROR_TYPE.WheaErrTypeMemory = 1 WHEA_ERROR_TYPE.WheaErrTypePCIExpress = 2 WHEA_ERROR_TYPE.WheaErrTypeNMI = 3 WHEA_ERROR_TYPE.WheaErrTypePCIXBus = 4 WHEA_ERROR_TYPE.WheaErrTypePCIXDevice = 5 WHEA_ERROR_TYPE.WheaErrTypeGeneric = 6 HARDWARE_COUNTER_TYPE = v_enum() HARDWARE_COUNTER_TYPE.PMCCounter = 0 HARDWARE_COUNTER_TYPE.MaxHardwareCounterType = 1 ReplacesCorHdrNumericDefines = v_enum() ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_ILONLY = 1 ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_32BITREQUIRED = 2 ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_IL_LIBRARY = 4 ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_STRONGNAMESIGNED = 8 ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_NATIVE_ENTRYPOINT = 16 ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_TRACKDEBUGDATA = 65536 ReplacesCorHdrNumericDefines.COR_VERSION_MAJOR_V2 = 2 ReplacesCorHdrNumericDefines.COR_VERSION_MAJOR = 2 ReplacesCorHdrNumericDefines.COR_VERSION_MINOR = 5 ReplacesCorHdrNumericDefines.COR_DELETED_NAME_LENGTH = 8 ReplacesCorHdrNumericDefines.COR_VTABLEGAP_NAME_LENGTH = 8 ReplacesCorHdrNumericDefines.NATIVE_TYPE_MAX_CB = 1 ReplacesCorHdrNumericDefines.COR_ILMETHOD_SECT_SMALL_MAX_DATASIZE = 255 ReplacesCorHdrNumericDefines.IMAGE_COR_MIH_METHODRVA = 1 ReplacesCorHdrNumericDefines.IMAGE_COR_MIH_EHRVA = 2 ReplacesCorHdrNumericDefines.IMAGE_COR_MIH_BASICBLOCK = 8 ReplacesCorHdrNumericDefines.COR_VTABLE_32BIT = 1 ReplacesCorHdrNumericDefines.COR_VTABLE_64BIT = 2 ReplacesCorHdrNumericDefines.COR_VTABLE_FROM_UNMANAGED = 4 ReplacesCorHdrNumericDefines.COR_VTABLE_FROM_UNMANAGED_RETAIN_APPDOMAIN = 8 ReplacesCorHdrNumericDefines.COR_VTABLE_CALL_MOST_DERIVED = 16 ReplacesCorHdrNumericDefines.IMAGE_COR_EATJ_THUNK_SIZE = 32 ReplacesCorHdrNumericDefines.MAX_CLASS_NAME = 1024 ReplacesCorHdrNumericDefines.MAX_PACKAGE_NAME = 1024 JOBOBJECTINFOCLASS = v_enum() JOBOBJECTINFOCLASS.JobObjectBasicAccountingInformation = 1 JOBOBJECTINFOCLASS.JobObjectBasicLimitInformation = 2 JOBOBJECTINFOCLASS.JobObjectBasicProcessIdList = 3 JOBOBJECTINFOCLASS.JobObjectBasicUIRestrictions = 4 JOBOBJECTINFOCLASS.JobObjectSecurityLimitInformation = 5 JOBOBJECTINFOCLASS.JobObjectEndOfJobTimeInformation = 6 JOBOBJECTINFOCLASS.JobObjectAssociateCompletionPortInformation = 7 JOBOBJECTINFOCLASS.JobObjectBasicAndIoAccountingInformation = 8 JOBOBJECTINFOCLASS.JobObjectExtendedLimitInformation = 9 JOBOBJECTINFOCLASS.JobObjectJobSetInformation = 10 JOBOBJECTINFOCLASS.JobObjectGroupInformation = 11 JOBOBJECTINFOCLASS.JobObjectNotificationLimitInformation = 12 JOBOBJECTINFOCLASS.JobObjectLimitViolationInformation = 13 JOBOBJECTINFOCLASS.JobObjectGroupInformationEx = 14 JOBOBJECTINFOCLASS.JobObjectCpuRateControlInformation = 15 JOBOBJECTINFOCLASS.JobObjectCompletionFilter = 16 JOBOBJECTINFOCLASS.JobObjectCompletionCounter = 17 JOBOBJECTINFOCLASS.JobObjectFreezeInformation = 18 JOBOBJECTINFOCLASS.JobObjectExtendedAccountingInformation = 19 JOBOBJECTINFOCLASS.JobObjectWakeInformation = 20 JOBOBJECTINFOCLASS.JobObjectBackgroundInformation = 21 JOBOBJECTINFOCLASS.JobObjectSchedulingRankBiasInformation = 22 JOBOBJECTINFOCLASS.JobObjectTimerVirtualizationInformation = 23 JOBOBJECTINFOCLASS.JobObjectCycleTimeNotification = 24 JOBOBJECTINFOCLASS.JobObjectClearEvent = 25 JOBOBJECTINFOCLASS.JobObjectInterferenceInformation = 26 JOBOBJECTINFOCLASS.JobObjectReserved1Information = 18 JOBOBJECTINFOCLASS.JobObjectReserved2Information = 19 JOBOBJECTINFOCLASS.JobObjectReserved3Information = 20 JOBOBJECTINFOCLASS.JobObjectReserved4Information = 21 JOBOBJECTINFOCLASS.JobObjectReserved5Information = 22 JOBOBJECTINFOCLASS.JobObjectReserved6Information = 23 JOBOBJECTINFOCLASS.JobObjectReserved7Information = 24 JOBOBJECTINFOCLASS.JobObjectReserved8Information = 25 JOBOBJECTINFOCLASS.JobObjectReserved9Information = 26 JOBOBJECTINFOCLASS.MaxJobObjectInfoClass = 27 PROC_PERF_UTILITY_TYPE = v_enum() PROC_PERF_UTILITY_TYPE.ProcPerfUtilityTypeIncrease = 0 PROC_PERF_UTILITY_TYPE.ProcPerfUtilityTypeDecrease = 1 PROC_PERF_UTILITY_TYPE.ProcPerfUtilityTypeCoreParking = 2 PROC_PERF_UTILITY_TYPE.ProcPerfUtilityTypeMax = 3 LSA_FOREST_TRUST_RECORD_TYPE = v_enum() LSA_FOREST_TRUST_RECORD_TYPE.ForestTrustTopLevelName = 0 LSA_FOREST_TRUST_RECORD_TYPE.ForestTrustTopLevelNameEx = 1 LSA_FOREST_TRUST_RECORD_TYPE.ForestTrustDomainInfo = 2 LSA_FOREST_TRUST_RECORD_TYPE.ForestTrustRecordTypeLast = 2 MEMORY_CACHING_TYPE_ORIG = v_enum() MEMORY_CACHING_TYPE_ORIG.MmFrameBufferCached = 2 HAL_APIC_DESTINATION_MODE = v_enum() HAL_APIC_DESTINATION_MODE.ApicDestinationModePhysical = 1 HAL_APIC_DESTINATION_MODE.ApicDestinationModeLogicalFlat = 2 HAL_APIC_DESTINATION_MODE.ApicDestinationModeLogicalClustered = 3 HAL_APIC_DESTINATION_MODE.ApicDestinationModeUnknown = 4 POWER_ACTION = v_enum() POWER_ACTION.PowerActionNone = 0 POWER_ACTION.PowerActionReserved = 1 POWER_ACTION.PowerActionSleep = 2 POWER_ACTION.PowerActionHibernate = 3 POWER_ACTION.PowerActionShutdown = 4 POWER_ACTION.PowerActionShutdownReset = 5 POWER_ACTION.PowerActionShutdownOff = 6 POWER_ACTION.PowerActionWarmEject = 7 KINTERRUPT_MODE = v_enum() KINTERRUPT_MODE.LevelSensitive = 0 KINTERRUPT_MODE.Latched = 1 MEMORY_CACHING_TYPE = v_enum() MEMORY_CACHING_TYPE.MmNonCached = 0 MEMORY_CACHING_TYPE.MmCached = 1 MEMORY_CACHING_TYPE.MmWriteCombined = 2 MEMORY_CACHING_TYPE.MmHardwareCoherentCached = 3 MEMORY_CACHING_TYPE.MmNonCachedUnordered = 4 MEMORY_CACHING_TYPE.MmUSWCCached = 5 MEMORY_CACHING_TYPE.MmMaximumCacheType = 6 class KEXECUTE_OPTIONS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ExecuteDisable = v_uint8() class IO_PRIORITY_INFO(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Size = v_uint32() self.ThreadPriority = v_uint32() self.PagePriority = v_uint32() self.IoPriority = v_uint32() class SID(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Revision = v_uint8() self.SubAuthorityCount = v_uint8() self.IdentifierAuthority = SID_IDENTIFIER_AUTHORITY() self.SubAuthority = vstruct.VArray([ v_uint32() for i in xrange(1) ]) class TEB32(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.NtTib = NT_TIB32() self.EnvironmentPointer = v_uint32() self.ClientId = CLIENT_ID32() self.ActiveRpcHandle = v_uint32() self.ThreadLocalStoragePointer = v_uint32() self.ProcessEnvironmentBlock = v_uint32() self.LastErrorValue = v_uint32() self.CountOfOwnedCriticalSections = v_uint32() self.CsrClientThread = v_uint32() self.Win32ThreadInfo = v_uint32() self.User32Reserved = vstruct.VArray([ v_uint32() for i in xrange(26) ]) self.UserReserved = vstruct.VArray([ v_uint32() for i in xrange(5) ]) self.WOW32Reserved = v_uint32() self.CurrentLocale = v_uint32() self.FpSoftwareStatusRegister = v_uint32() self.SystemReserved1 = vstruct.VArray([ v_uint32() for i in xrange(54) ]) self.ExceptionCode = v_uint32() self.ActivationContextStackPointer = v_uint32() self.SpareBytes = vstruct.VArray([ v_uint8() for i in xrange(36) ]) self.TxFsContext = v_uint32() self.GdiTebBatch = GDI_TEB_BATCH32() self.RealClientId = CLIENT_ID32() self.GdiCachedProcessHandle = v_uint32() self.GdiClientPID = v_uint32() self.GdiClientTID = v_uint32() self.GdiThreadLocalInfo = v_uint32() self.Win32ClientInfo = vstruct.VArray([ v_uint32() for i in xrange(62) ]) self.glDispatchTable = vstruct.VArray([ v_uint32() for i in xrange(233) ]) self.glReserved1 = vstruct.VArray([ v_uint32() for i in xrange(29) ]) self.glReserved2 = v_uint32() self.glSectionInfo = v_uint32() self.glSection = v_uint32() self.glTable = v_uint32() self.glCurrentRC = v_uint32() self.glContext = v_uint32() self.LastStatusValue = v_uint32() self.StaticUnicodeString = STRING32() self.StaticUnicodeBuffer = vstruct.VArray([ v_uint16() for i in xrange(261) ]) self._pad0e0c = v_bytes(size=2) self.DeallocationStack = v_uint32() self.TlsSlots = vstruct.VArray([ v_uint32() for i in xrange(64) ]) self.TlsLinks = LIST_ENTRY32() self.Vdm = v_uint32() self.ReservedForNtRpc = v_uint32() self.DbgSsReserved = vstruct.VArray([ v_uint32() for i in xrange(2) ]) self.HardErrorMode = v_uint32() self.Instrumentation = vstruct.VArray([ v_uint32() for i in xrange(9) ]) self.ActivityId = GUID() self.SubProcessTag = v_uint32() self.PerflibData = v_uint32() self.EtwTraceData = v_uint32() self.WinSockData = v_uint32() self.GdiBatchCount = v_uint32() self.CurrentIdealProcessor = PROCESSOR_NUMBER() self.GuaranteedStackBytes = v_uint32() self.ReservedForPerf = v_uint32() self.ReservedForOle = v_uint32() self.WaitingOnLoaderLock = v_uint32() self.SavedPriorityState = v_uint32() self.ReservedForCodeCoverage = v_uint32() self.ThreadPoolData = v_uint32() self.TlsExpansionSlots = v_uint32() self.MuiGeneration = v_uint32() self.IsImpersonating = v_uint32() self.NlsCache = v_uint32() self.pShimData = v_uint32() self.HeapVirtualAffinity = v_uint16() self.LowFragHeapDataSlot = v_uint16() self.CurrentTransactionHandle = v_uint32() self.ActiveFrame = v_uint32() self.FlsData = v_uint32() self.PreferredLanguages = v_uint32() self.UserPrefLanguages = v_uint32() self.MergedPrefLanguages = v_uint32() self.MuiImpersonation = v_uint32() self.CrossTebFlags = v_uint16() self.SameTebFlags = v_uint16() self.TxnScopeEnterCallback = v_uint32() self.TxnScopeExitCallback = v_uint32() self.TxnScopeContext = v_uint32() self.LockCount = v_uint32() self.SpareUlong0 = v_uint32() self.ResourceRetValue = v_uint32() self.ReservedForWdf = v_uint32() class WHEA_ERROR_PACKET_V2(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Signature = v_uint32() self.Version = v_uint32() self.Length = v_uint32() self.Flags = WHEA_ERROR_PACKET_FLAGS() self.ErrorType = v_uint32() self.ErrorSeverity = v_uint32() self.ErrorSourceId = v_uint32() self.ErrorSourceType = v_uint32() self.NotifyType = GUID() self.Context = v_uint64() self.DataFormat = v_uint32() self.Reserved1 = v_uint32() self.DataOffset = v_uint32() self.DataLength = v_uint32() self.PshedDataOffset = v_uint32() self.PshedDataLength = v_uint32() class GROUP_AFFINITY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Mask = v_uint32() self.Group = v_uint16() self.Reserved = vstruct.VArray([ v_uint16() for i in xrange(3) ]) class KTSS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Backlink = v_uint16() self.Reserved0 = v_uint16() self.Esp0 = v_uint32() self.Ss0 = v_uint16() self.Reserved1 = v_uint16() self.NotUsed1 = vstruct.VArray([ v_uint32() for i in xrange(4) ]) self.CR3 = v_uint32() self.Eip = v_uint32() self.EFlags = v_uint32() self.Eax = v_uint32() self.Ecx = v_uint32() self.Edx = v_uint32() self.Ebx = v_uint32() self.Esp = v_uint32() self.Ebp = v_uint32() self.Esi = v_uint32() self.Edi = v_uint32() self.Es = v_uint16() self.Reserved2 = v_uint16() self.Cs = v_uint16() self.Reserved3 = v_uint16() self.Ss = v_uint16() self.Reserved4 = v_uint16() self.Ds = v_uint16() self.Reserved5 = v_uint16() self.Fs = v_uint16() self.Reserved6 = v_uint16() self.Gs = v_uint16() self.Reserved7 = v_uint16() self.LDT = v_uint16() self.Reserved8 = v_uint16() self.Flags = v_uint16() self.IoMapBase = v_uint16() self.IoMaps = vstruct.VArray([ KiIoAccessMap() for i in xrange(1) ]) self.IntDirectionMap = vstruct.VArray([ v_uint8() for i in xrange(32) ]) class CURDIR(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.DosPath = UNICODE_STRING() self.Handle = v_ptr32() class KLOCK_ENTRY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.TreeNode = RTL_BALANCED_NODE() self.ThreadUnsafe = v_ptr32() self.LockState = KLOCK_ENTRY_LOCK_STATE() self.OwnerTree = RTL_RB_TREE() self.WaiterTree = RTL_RB_TREE() self.EntryLock = v_uint32() self.AllBoosts = v_uint16() self.IoNormalPriorityWaiterCount = v_uint16() class PERFINFO_GROUPMASK(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Masks = vstruct.VArray([ v_uint32() for i in xrange(8) ]) class HANDLE_TABLE_ENTRY_INFO(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.AuditMask = v_uint32() class KSCHEDULING_GROUP(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Value = v_uint16() self.Type = v_uint8() self.HardCap = v_uint8() self.RelativeWeight = v_uint32() self.QueryHistoryTimeStamp = v_uint64() self.NotificationCycles = v_uint64() self.SchedulingGroupList = LIST_ENTRY() self.NotificationDpc = v_ptr32() self._pad0040 = v_bytes(size=28) self.PerProcessor = vstruct.VArray([ KSCB() for i in xrange(1) ]) self._pad0140 = v_bytes(size=32) class _unnamed_12482(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Length64 = v_uint32() self.Alignment64 = v_uint32() self.MinimumAddress = LARGE_INTEGER() self.MaximumAddress = LARGE_INTEGER() class WHEA_ERROR_RECORD_SECTION_DESCRIPTOR(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.SectionOffset = v_uint32() self.SectionLength = v_uint32() self.Revision = WHEA_REVISION() self.ValidBits = WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_VALIDBITS() self.Reserved = v_uint8() self.Flags = WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_FLAGS() self.SectionType = GUID() self.FRUId = GUID() self.SectionSeverity = v_uint32() self.FRUText = vstruct.VArray([ v_uint8() for i in xrange(20) ]) class _unnamed_9414(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.IdType = v_uint32() class _unnamed_9419(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.DeviceTextType = v_uint32() self.LocaleId = v_uint32() class PROC_IDLE_POLICY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.PromotePercent = v_uint8() self.DemotePercent = v_uint8() self.PromotePercentBase = v_uint8() self.DemotePercentBase = v_uint8() self.AllowScaling = v_uint8() class RTL_TRACE_SEGMENT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Magic = v_uint32() self.Database = v_ptr32() self.NextSegment = v_ptr32() self.TotalSize = v_uint32() self.SegmentStart = v_ptr32() self.SegmentEnd = v_ptr32() self.SegmentFree = v_ptr32() class PROC_FEEDBACK_COUNTER(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.InstantaneousRead = v_ptr32() self._pad0008 = v_bytes(size=4) self.LastActualCount = v_uint64() self.LastReferenceCount = v_uint64() self.CachedValue = v_uint32() self._pad0020 = v_bytes(size=4) self.Affinitized = v_uint8() self.Differential = v_uint8() self.DisableInterrupts = v_uint8() self._pad0024 = v_bytes(size=1) self.Context = v_uint32() class CM_PARTIAL_RESOURCE_LIST(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Version = v_uint16() self.Revision = v_uint16() self.Count = v_uint32() self.PartialDescriptors = vstruct.VArray([ CM_PARTIAL_RESOURCE_DESCRIPTOR() for i in xrange(1) ]) class RTL_SPARSE_BITMAP_RANGE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Lock = v_uint32() self.RangeBitmap = RTL_BITMAP() class _unnamed_12550(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.RemappedFormat = ULARGE_INTEGER() class DEVICE_CAPABILITIES(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Size = v_uint16() self.Version = v_uint16() self.DeviceD1 = v_uint32() self.Address = v_uint32() self.UINumber = v_uint32() self.DeviceState = vstruct.VArray([ DEVICE_POWER_STATE() for i in xrange(7) ]) self.SystemWake = v_uint32() self.DeviceWake = v_uint32() self.D1Latency = v_uint32() self.D2Latency = v_uint32() self.D3Latency = v_uint32() class _unnamed_12559(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.bits = _unnamed_12568() class _unnamed_9397(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.IoResourceRequirementList = v_ptr32() class KPROCESS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Header = DISPATCHER_HEADER() self.ProfileListHead = LIST_ENTRY() self.DirectoryTableBase = v_uint32() self.LdtDescriptor = KGDTENTRY() self.Int21Descriptor = KIDTENTRY() self.ThreadListHead = LIST_ENTRY() self.ProcessLock = v_uint32() self.Affinity = KAFFINITY_EX() self.ReadyListHead = LIST_ENTRY() self.SwapListEntry = SINGLE_LIST_ENTRY() self.ActiveProcessors = KAFFINITY_EX() self.AutoAlignment = v_uint32() self.BasePriority = v_uint8() self.QuantumReset = v_uint8() self.Visited = v_uint8() self.Flags = KEXECUTE_OPTIONS() self.ThreadSeed = vstruct.VArray([ v_uint32() for i in xrange(1) ]) self.IdealNode = vstruct.VArray([ v_uint16() for i in xrange(1) ]) self.IdealGlobalNode = v_uint16() self.Spare1 = v_uint16() self.IopmOffset = v_uint16() self.SchedulingGroup = v_ptr32() self.StackCount = KSTACK_COUNT() self.ProcessListEntry = LIST_ENTRY() self.CycleTime = v_uint64() self.ContextSwitches = v_uint64() self.FreezeCount = v_uint32() self.KernelTime = v_uint32() self.UserTime = v_uint32() self.VdmTrapcHandler = v_ptr32() class _unnamed_11858(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Start = v_uint32() self.Length = v_uint32() self.Reserved = v_uint32() class DEVICE_OBJECT_POWER_EXTENSION(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) class _unnamed_9257(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Length = v_uint32() class _unnamed_9252(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Length = v_uint32() self.EaList = v_ptr32() self.EaListLength = v_uint32() self.EaIndex = v_uint32() class HEAP_TAG_ENTRY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Allocs = v_uint32() self.Frees = v_uint32() self.Size = v_uint32() self.TagIndex = v_uint16() self.CreatorBackTraceIndex = v_uint16() self.TagName = vstruct.VArray([ v_uint16() for i in xrange(24) ]) class _unnamed_9156(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.SecurityContext = v_ptr32() self.Options = v_uint32() self.Reserved = v_uint16() self.ShareAccess = v_uint16() self.Parameters = v_ptr32() class PROCESSOR_IDLE_PREPARE_INFO(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Context = v_ptr32() self._pad0008 = v_bytes(size=4) self.Constraints = PROCESSOR_IDLE_CONSTRAINTS() self.DependencyCount = v_uint32() self.DependencyUsed = v_uint32() self.DependencyArray = v_ptr32() self.PlatformIdleStateIndex = v_uint32() self.ProcessorIdleStateIndex = v_uint32() self.IdleSelectFailureMask = v_uint32() class WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_FLAGS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Primary = v_uint32() class TP_CALLBACK_ENVIRON_V3(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Version = v_uint32() self.Pool = v_ptr32() self.CleanupGroup = v_ptr32() self.CleanupGroupCancelCallback = v_ptr32() self.RaceDll = v_ptr32() self.ActivationContext = v_ptr32() self.FinalizationCallback = v_ptr32() self.u = _unnamed_6606() self.CallbackPriority = v_uint32() self.Size = v_uint32() class RTL_ACTIVATION_CONTEXT_STACK_FRAME(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Previous = v_ptr32() self.ActivationContext = v_ptr32() self.Flags = v_uint32() class ALPC_PROCESS_CONTEXT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Lock = EX_PUSH_LOCK() self.ViewListHead = LIST_ENTRY() self.PagedPoolQuotaCache = v_uint32() class OBJECT_HANDLE_INFORMATION(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.HandleAttributes = v_uint32() self.GrantedAccess = v_uint32() class PROC_PERF_DOMAIN(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Link = LIST_ENTRY() self.Master = v_ptr32() self.Members = KAFFINITY_EX() self.ProcessorCount = v_uint32() self.Processors = v_ptr32() self.GetFFHThrottleState = v_ptr32() self.BoostPolicyHandler = v_ptr32() self.BoostModeHandler = v_ptr32() self.PerfSelectionHandler = v_ptr32() self.PerfControlHandler = v_ptr32() self.MaxFrequency = v_uint32() self.NominalFrequency = v_uint32() self.MaxPercent = v_uint32() self.MinPerfPercent = v_uint32() self.MinThrottlePercent = v_uint32() self.Coordination = v_uint8() self.HardPlatformCap = v_uint8() self.AffinitizeControl = v_uint8() self._pad004c = v_bytes(size=1) self.SelectedPercent = v_uint32() self.SelectedFrequency = v_uint32() self.DesiredPercent = v_uint32() self.MaxPolicyPercent = v_uint32() self.MinPolicyPercent = v_uint32() self.ConstrainedMaxPercent = v_uint32() self.ConstrainedMinPercent = v_uint32() self.GuaranteedPercent = v_uint32() self.TolerancePercent = v_uint32() self.SelectedState = v_uint64() self.Force = v_uint8() self._pad0080 = v_bytes(size=7) self.PerfChangeTime = v_uint64() self.PerfChangeIntervalCount = v_uint32() self._pad0090 = v_bytes(size=4) class XSTATE_CONFIGURATION(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.EnabledFeatures = v_uint64() self.EnabledVolatileFeatures = v_uint64() self.Size = v_uint32() self.OptimizedSave = v_uint32() self.Features = vstruct.VArray([ XSTATE_FEATURE() for i in xrange(64) ]) class PS_CLIENT_SECURITY_CONTEXT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ImpersonationData = v_uint32() class RTL_AVL_TABLE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.BalancedRoot = RTL_BALANCED_LINKS() self.OrderedPointer = v_ptr32() self.WhichOrderedElement = v_uint32() self.NumberGenericTableElements = v_uint32() self.DepthOfTree = v_uint32() self.RestartKey = v_ptr32() self.DeleteCount = v_uint32() self.CompareRoutine = v_ptr32() self.AllocateRoutine = v_ptr32() self.FreeRoutine = v_ptr32() self.TableContext = v_ptr32() class PROC_PERF_UTILITY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Affinitized = v_uint32() self.Performance = v_uint32() self.Total = v_uint32() class FS_FILTER_CALLBACKS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.SizeOfFsFilterCallbacks = v_uint32() self.Reserved = v_uint32() self.PreAcquireForSectionSynchronization = v_ptr32() self.PostAcquireForSectionSynchronization = v_ptr32() self.PreReleaseForSectionSynchronization = v_ptr32() self.PostReleaseForSectionSynchronization = v_ptr32() self.PreAcquireForCcFlush = v_ptr32() self.PostAcquireForCcFlush = v_ptr32() self.PreReleaseForCcFlush = v_ptr32() self.PostReleaseForCcFlush = v_ptr32() self.PreAcquireForModifiedPageWriter = v_ptr32() self.PostAcquireForModifiedPageWriter = v_ptr32() self.PreReleaseForModifiedPageWriter = v_ptr32() self.PostReleaseForModifiedPageWriter = v_ptr32() class OWNER_ENTRY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.OwnerThread = v_uint32() self.IoPriorityBoosted = v_uint32() class DEVOBJ_EXTENSION(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Type = v_uint16() self.Size = v_uint16() self.DeviceObject = v_ptr32() self.PowerFlags = v_uint32() self.Dope = v_ptr32() self.ExtensionFlags = v_uint32() self.DeviceNode = v_ptr32() self.AttachedTo = v_ptr32() self.StartIoCount = v_uint32() self.StartIoKey = v_uint32() self.StartIoFlags = v_uint32() self.Vpb = v_ptr32() self.DependencyNode = v_ptr32() self.VerifierContext = v_ptr32() class KSTACK_CONTROL(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.StackBase = v_uint32() self.ActualLimit = v_uint32() self.PreviousTrapFrame = v_ptr32() self.PreviousExceptionList = v_ptr32() self.Previous = KERNEL_STACK_SEGMENT() class _unnamed_11829(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Start = LARGE_INTEGER() self.Length = v_uint32() class HEAP_LOCAL_SEGMENT_INFO(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.LocalData = v_ptr32() self.ActiveSubsegment = v_ptr32() self.CachedItems = vstruct.VArray([ v_ptr32() for i in xrange(16) ]) self.SListHeader = SLIST_HEADER() self.Counters = HEAP_BUCKET_COUNTERS() self.LastOpSequence = v_uint32() self.BucketIndex = v_uint16() self.LastUsed = v_uint16() self.NoThrashCount = v_uint16() self._pad0068 = v_bytes(size=6) class HANDLE_TABLE_ENTRY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.VolatileLowValue = v_uint32() self.HighValue = v_uint32() class HEAP_COUNTERS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.TotalMemoryReserved = v_uint32() self.TotalMemoryCommitted = v_uint32() self.TotalMemoryLargeUCR = v_uint32() self.TotalSizeInVirtualBlocks = v_uint32() self.TotalSegments = v_uint32() self.TotalUCRs = v_uint32() self.CommittOps = v_uint32() self.DeCommitOps = v_uint32() self.LockAcquires = v_uint32() self.LockCollisions = v_uint32() self.CommitRate = v_uint32() self.DecommittRate = v_uint32() self.CommitFailures = v_uint32() self.InBlockCommitFailures = v_uint32() self.PollIntervalCounter = v_uint32() self.DecommitsSinceLastCheck = v_uint32() self.HeapPollInterval = v_uint32() self.AllocAndFreeOps = v_uint32() self.AllocationIndicesActive = v_uint32() self.InBlockDeccommits = v_uint32() self.InBlockDeccomitSize = v_uint32() self.HighWatermarkSize = v_uint32() self.LastPolledSize = v_uint32() class MAILSLOT_CREATE_PARAMETERS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.MailslotQuota = v_uint32() self.MaximumMessageSize = v_uint32() self.ReadTimeout = LARGE_INTEGER() self.TimeoutSpecified = v_uint8() self._pad0018 = v_bytes(size=7) class FS_FILTER_CALLBACK_DATA(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.SizeOfFsFilterCallbackData = v_uint32() self.Operation = v_uint8() self.Reserved = v_uint8() self._pad0008 = v_bytes(size=2) self.DeviceObject = v_ptr32() self.FileObject = v_ptr32() self.Parameters = FS_FILTER_PARAMETERS() class PPM_IDLE_STATE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.DomainMembers = KAFFINITY_EX() self.Latency = v_uint32() self.BreakEvenDuration = v_uint32() self.Power = v_uint32() self.StateFlags = v_uint32() self.VetoAccounting = PPM_VETO_ACCOUNTING() self.StateType = v_uint8() self.InterruptsEnabled = v_uint8() self.Interruptible = v_uint8() self.ContextRetained = v_uint8() self.CacheCoherent = v_uint8() self.WakesSpuriously = v_uint8() self.PlatformOnly = v_uint8() self.NoCState = v_uint8() class PROC_FEEDBACK(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Lock = v_uint32() self._pad0008 = v_bytes(size=4) self.CyclesLast = v_uint64() self.CyclesActive = v_uint64() self.Counters = vstruct.VArray([ v_ptr32() for i in xrange(2) ]) self.LastUpdateTime = v_uint64() self.UnscaledTime = v_uint64() self.UnaccountedTime = v_uint64() self.ScaledTime = vstruct.VArray([ v_uint64() for i in xrange(2) ]) self.UnaccountedKernelTime = v_uint64() self.PerformanceScaledKernelTime = v_uint64() self.UserTimeLast = v_uint32() self.KernelTimeLast = v_uint32() self.KernelTimesIndex = v_uint8() self._pad0068 = v_bytes(size=7) class ACCESS_STATE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.OperationID = LUID() self.SecurityEvaluated = v_uint8() self.GenerateAudit = v_uint8() self.GenerateOnClose = v_uint8() self.PrivilegesAllocated = v_uint8() self.Flags = v_uint32() self.RemainingDesiredAccess = v_uint32() self.PreviouslyGrantedAccess = v_uint32() self.OriginalDesiredAccess = v_uint32() self.SubjectSecurityContext = SECURITY_SUBJECT_CONTEXT() self.SecurityDescriptor = v_ptr32() self.AuxData = v_ptr32() self.Privileges = _unnamed_8729() self.AuditPrivileges = v_uint8() self._pad0064 = v_bytes(size=3) self.ObjectName = UNICODE_STRING() self.ObjectTypeName = UNICODE_STRING() class TP_CALLBACK_INSTANCE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) class PROC_IDLE_ACCOUNTING(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.StateCount = v_uint32() self.TotalTransitions = v_uint32() self.ResetCount = v_uint32() self.AbortCount = v_uint32() self.StartTime = v_uint64() self.PriorIdleTime = v_uint64() self.TimeUnit = v_uint32() self._pad0028 = v_bytes(size=4) self.State = vstruct.VArray([ PROC_IDLE_STATE_ACCOUNTING() for i in xrange(1) ]) class GDI_TEB_BATCH(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Offset = v_uint32() self.HDC = v_uint32() self.Buffer = vstruct.VArray([ v_uint32() for i in xrange(310) ]) class STRING32(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Length = v_uint16() self.MaximumLength = v_uint16() self.Buffer = v_uint32() class THREAD_PERFORMANCE_DATA(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Size = v_uint16() self.Version = v_uint16() self.ProcessorNumber = PROCESSOR_NUMBER() self.ContextSwitches = v_uint32() self.HwCountersCount = v_uint32() self.UpdateCount = v_uint64() self.WaitReasonBitMap = v_uint64() self.HardwareCounters = v_uint64() self.CycleTime = COUNTER_READING() self.HwCounters = vstruct.VArray([ COUNTER_READING() for i in xrange(16) ]) class PAGEFAULT_HISTORY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) class ECP_LIST(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) class ENODE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Ncb = KNODE() self.ExWorkQueues = vstruct.VArray([ v_ptr32() for i in xrange(8) ]) self.ExWorkQueue = EX_WORK_QUEUE() self.ExpThreadSetManagerEvent = KEVENT() self.ExpDeadlockTimer = KTIMER() self.ExpThreadReaperEvent = KEVENT() self.WaitBlocks = vstruct.VArray([ KWAIT_BLOCK() for i in xrange(3) ]) self.ExpWorkerThreadBalanceManagerPtr = v_ptr32() self.ExpWorkerSeed = v_uint32() self.ExWorkerFullInit = v_uint32() self._pad0340 = v_bytes(size=12) class NT_TIB64(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ExceptionList = v_uint64() self.StackBase = v_uint64() self.StackLimit = v_uint64() self.SubSystemTib = v_uint64() self.FiberData = v_uint64() self.ArbitraryUserPointer = v_uint64() self.Self = v_uint64() class SECTION_OBJECT_POINTERS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.DataSectionObject = v_ptr32() self.SharedCacheMap = v_ptr32() self.ImageSectionObject = v_ptr32() class MDL(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Next = v_ptr32() self.Size = v_uint16() self.MdlFlags = v_uint16() self.Process = v_ptr32() self.MappedSystemVa = v_ptr32() self.StartVa = v_ptr32() self.ByteCount = v_uint32() self.ByteOffset = v_uint32() class KTRAP_FRAME(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.DbgEbp = v_uint32() self.DbgEip = v_uint32() self.DbgArgMark = v_uint32() self.TempSegCs = v_uint16() self.Logging = v_uint8() self.FrameType = v_uint8() self.TempEsp = v_uint32() self.Dr0 = v_uint32() self.Dr1 = v_uint32() self.Dr2 = v_uint32() self.Dr3 = v_uint32() self.Dr6 = v_uint32() self.Dr7 = v_uint32() self.SegGs = v_uint32() self.SegEs = v_uint32() self.SegDs = v_uint32() self.Edx = v_uint32() self.Ecx = v_uint32() self.Eax = v_uint32() self.PreviousPreviousMode = v_uint8() self.EntropyQueueDpc = v_uint8() self.Reserved = vstruct.VArray([ v_uint8() for i in xrange(2) ]) self.MxCsr = v_uint32() self.ExceptionList = v_ptr32() self.SegFs = v_uint32() self.Edi = v_uint32() self.Esi = v_uint32() self.Ebx = v_uint32() self.Ebp = v_uint32() self.ErrCode = v_uint32() self.Eip = v_uint32() self.SegCs = v_uint32() self.EFlags = v_uint32() self.HardwareEsp = v_uint32() self.HardwareSegSs = v_uint32() self.V86Es = v_uint32() self.V86Ds = v_uint32() self.V86Fs = v_uint32() self.V86Gs = v_uint32() class MCI_ADDR(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Address = v_uint32() self.Reserved = v_uint32() class IO_TIMER(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Type = v_uint16() self.TimerFlag = v_uint16() self.TimerList = LIST_ENTRY() self.TimerRoutine = v_ptr32() self.Context = v_ptr32() self.DeviceObject = v_ptr32() class WHEA_REVISION(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.MinorRevision = v_uint8() self.MajorRevision = v_uint8() class _unnamed_9029(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.AsynchronousParameters = _unnamed_9044() class TP_CLEANUP_GROUP(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) class _unnamed_9027(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.MasterIrp = v_ptr32() class _unnamed_12448(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.MinimumVector = v_uint32() self.MaximumVector = v_uint32() self.AffinityPolicy = v_uint16() self.Group = v_uint16() self.PriorityPolicy = v_uint32() self.TargetedProcessors = v_uint32() class PROC_IDLE_SNAP(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Time = v_uint64() self.Idle = v_uint64() class SECURITY_DESCRIPTOR(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Revision = v_uint8() self.Sbz1 = v_uint8() self.Control = v_uint16() self.Owner = v_ptr32() self.Group = v_ptr32() self.Sacl = v_ptr32() self.Dacl = v_ptr32() class EX_WORK_QUEUE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.WorkPriQueue = KPRIQUEUE() self.Node = v_ptr32() self.WorkItemsProcessed = v_uint32() self.WorkItemsProcessedLastPass = v_uint32() self.ThreadCount = v_uint32() self.MinThreads = v_uint32() self.MaxThreads = v_uint32() self.QueueIndex = v_uint32() class OBJECT_TYPE_INITIALIZER(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Length = v_uint16() self.ObjectTypeFlags = v_uint8() self._pad0004 = v_bytes(size=1) self.ObjectTypeCode = v_uint32() self.InvalidAttributes = v_uint32() self.GenericMapping = GENERIC_MAPPING() self.ValidAccessMask = v_uint32() self.RetainAccess = v_uint32() self.PoolType = v_uint32() self.DefaultPagedPoolCharge = v_uint32() self.DefaultNonPagedPoolCharge = v_uint32() self.DumpProcedure = v_ptr32() self.OpenProcedure = v_ptr32() self.CloseProcedure = v_ptr32() self.DeleteProcedure = v_ptr32() self.ParseProcedure = v_ptr32() self.SecurityProcedure = v_ptr32() self.QueryNameProcedure = v_ptr32() self.OkayToCloseProcedure = v_ptr32() self.WaitObjectFlagMask = v_uint32() self.WaitObjectFlagOffset = v_uint16() self.WaitObjectPointerOffset = v_uint16() class _unnamed_6606(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Flags = v_uint32() class _unnamed_6609(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.LongFunction = v_uint32() class XSTATE_SAVE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Reserved1 = v_uint64() self.Reserved2 = v_uint32() self.Prev = v_ptr32() self.Reserved3 = v_ptr32() self.Thread = v_ptr32() self.Reserved4 = v_ptr32() self.Level = v_uint8() self._pad0020 = v_bytes(size=3) class HEAP_ENTRY_EXTRA(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.AllocatorBackTraceIndex = v_uint16() self.TagIndex = v_uint16() self.Settable = v_uint32() class _unnamed_9291(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.SecurityInformation = v_uint32() self.Length = v_uint32() class HEAP_PSEUDO_TAG_ENTRY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Allocs = v_uint32() self.Frees = v_uint32() self.Size = v_uint32() class EPROCESS_VALUES(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.KernelTime = v_uint64() self.UserTime = v_uint64() self.CycleTime = v_uint64() self.ContextSwitches = v_uint64() self.ReadOperationCount = v_uint64() self.WriteOperationCount = v_uint64() self.OtherOperationCount = v_uint64() self.ReadTransferCount = v_uint64() self.WriteTransferCount = v_uint64() self.OtherTransferCount = v_uint64() class PAGED_LOOKASIDE_LIST(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.L = GENERAL_LOOKASIDE() self.Lock__ObsoleteButDoNotDelete = FAST_MUTEX() self._pad00c0 = v_bytes(size=32) class ETHREAD(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Tcb = KTHREAD() self.CreateTime = LARGE_INTEGER() self.ExitTime = LARGE_INTEGER() self.ChargeOnlySession = v_ptr32() self.PostBlockList = LIST_ENTRY() self.TerminationPort = v_ptr32() self.ActiveTimerListLock = v_uint32() self.ActiveTimerListHead = LIST_ENTRY() self.Cid = CLIENT_ID() self.KeyedWaitSemaphore = KSEMAPHORE() self.ClientSecurity = PS_CLIENT_SECURITY_CONTEXT() self.IrpList = LIST_ENTRY() self.TopLevelIrp = v_uint32() self.DeviceToVerify = v_ptr32() self.Win32StartAddress = v_ptr32() self.LegacyPowerObject = v_ptr32() self.ThreadListEntry = LIST_ENTRY() self.RundownProtect = EX_RUNDOWN_REF() self.ThreadLock = EX_PUSH_LOCK() self.ReadClusterSize = v_uint32() self.MmLockOrdering = v_uint32() self.CmLockOrdering = v_uint32() self.CrossThreadFlags = v_uint32() self.SameThreadPassiveFlags = v_uint32() self.SameThreadApcFlags = v_uint32() self.CacheManagerActive = v_uint8() self.DisablePageFaultClustering = v_uint8() self.ActiveFaultCount = v_uint8() self.LockOrderState = v_uint8() self.AlpcMessageId = v_uint32() self.AlpcMessage = v_ptr32() self.ExitStatus = v_uint32() self.AlpcWaitListEntry = LIST_ENTRY() self.CacheManagerCount = v_uint32() self.IoBoostCount = v_uint32() self.BoostList = LIST_ENTRY() self.DeboostList = LIST_ENTRY() self.BoostListLock = v_uint32() self.IrpListLock = v_uint32() self.ReservedForSynchTracking = v_ptr32() self.CmCallbackListHead = SINGLE_LIST_ENTRY() self.ActivityId = v_ptr32() self.SeLearningModeListHead = SINGLE_LIST_ENTRY() self.VerifierContext = v_ptr32() self.KernelStackReference = v_uint32() self.AdjustedClientToken = v_ptr32() class PROCESS_DISK_COUNTERS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.BytesRead = v_uint64() self.BytesWritten = v_uint64() self.ReadOperationCount = v_uint64() self.WriteOperationCount = v_uint64() self.FlushOperationCount = v_uint64() class KDPC_LIST(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ListHead = SINGLE_LIST_ENTRY() self.LastEntry = v_ptr32() class RTL_BITMAP(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.SizeOfBitMap = v_uint32() self.Buffer = v_ptr32() class LARGE_INTEGER(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.LowPart = v_uint32() self.HighPart = v_uint32() class _unnamed_9294(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.SecurityInformation = v_uint32() self.SecurityDescriptor = v_ptr32() class _unnamed_9447(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.PowerSequence = v_ptr32() class NPAGED_LOOKASIDE_LIST(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.L = GENERAL_LOOKASIDE() self.Lock__ObsoleteButDoNotDelete = v_uint32() self._pad00c0 = v_bytes(size=60) class _unnamed_9441(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.PowerState = v_uint32() class _unnamed_12568(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Mask = v_uint32() class CLIENT_ID32(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.UniqueProcess = v_uint32() self.UniqueThread = v_uint32() class VPB(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Type = v_uint16() self.Size = v_uint16() self.Flags = v_uint16() self.VolumeLabelLength = v_uint16() self.DeviceObject = v_ptr32() self.RealDevice = v_ptr32() self.SerialNumber = v_uint32() self.ReferenceCount = v_uint32() self.VolumeLabel = vstruct.VArray([ v_uint16() for i in xrange(32) ]) class _unnamed_12560(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.bits = _unnamed_12580() class PP_LOOKASIDE_LIST(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.P = v_ptr32() self.L = v_ptr32() class JOBOBJECT_WAKE_FILTER(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.HighEdgeFilter = v_uint32() self.LowEdgeFilter = v_uint32() class OBJECT_NAME_INFORMATION(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Name = UNICODE_STRING() class IO_RESOURCE_LIST(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Version = v_uint16() self.Revision = v_uint16() self.Count = v_uint32() self.Descriptors = vstruct.VArray([ IO_RESOURCE_DESCRIPTOR() for i in xrange(1) ]) class KUSER_SHARED_DATA(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.TickCountLowDeprecated = v_uint32() self.TickCountMultiplier = v_uint32() self.InterruptTime = KSYSTEM_TIME() self.SystemTime = KSYSTEM_TIME() self.TimeZoneBias = KSYSTEM_TIME() self.ImageNumberLow = v_uint16() self.ImageNumberHigh = v_uint16() self.NtSystemRoot = vstruct.VArray([ v_uint16() for i in xrange(260) ]) self.MaxStackTraceDepth = v_uint32() self.CryptoExponent = v_uint32() self.TimeZoneId = v_uint32() self.LargePageMinimum = v_uint32() self.AitSamplingValue = v_uint32() self.AppCompatFlag = v_uint32() self.RNGSeedVersion = v_uint64() self.GlobalValidationRunlevel = v_uint32() self.TimeZoneBiasStamp = v_uint32() self.Reserved2 = v_uint32() self.NtProductType = v_uint32() self.ProductTypeIsValid = v_uint8() self.Reserved0 = vstruct.VArray([ v_uint8() for i in xrange(1) ]) self.NativeProcessorArchitecture = v_uint16() self.NtMajorVersion = v_uint32() self.NtMinorVersion = v_uint32() self.ProcessorFeatures = vstruct.VArray([ v_uint8() for i in xrange(64) ]) self.Reserved1 = v_uint32() self.Reserved3 = v_uint32() self.TimeSlip = v_uint32() self.AlternativeArchitecture = v_uint32() self.AltArchitecturePad = vstruct.VArray([ v_uint32() for i in xrange(1) ]) self.SystemExpirationDate = LARGE_INTEGER() self.SuiteMask = v_uint32() self.KdDebuggerEnabled = v_uint8() self.MitigationPolicies = v_uint8() self.Reserved6 = vstruct.VArray([ v_uint8() for i in xrange(2) ]) self.ActiveConsoleId = v_uint32() self.DismountCount = v_uint32() self.ComPlusPackage = v_uint32() self.LastSystemRITEventTickCount = v_uint32() self.NumberOfPhysicalPages = v_uint32() self.SafeBootMode = v_uint8() self.Reserved12 = vstruct.VArray([ v_uint8() for i in xrange(3) ]) self.SharedDataFlags = v_uint32() self.DataFlagsPad = vstruct.VArray([ v_uint32() for i in xrange(1) ]) self.TestRetInstruction = v_uint64() self.QpcFrequency = v_uint64() self.SystemCallPad = vstruct.VArray([ v_uint64() for i in xrange(3) ]) self.TickCount = KSYSTEM_TIME() self.TickCountPad = vstruct.VArray([ v_uint32() for i in xrange(1) ]) self.Cookie = v_uint32() self.CookiePad = vstruct.VArray([ v_uint32() for i in xrange(1) ]) self.ConsoleSessionForegroundProcessId = v_uint64() self.TimeUpdateLock = v_uint64() self.BaselineSystemTimeQpc = v_uint64() self.BaselineInterruptTimeQpc = v_uint64() self.QpcSystemTimeIncrement = v_uint64() self.QpcInterruptTimeIncrement = v_uint64() self.QpcSystemTimeIncrement32 = v_uint32() self.QpcInterruptTimeIncrement32 = v_uint32() self.QpcSystemTimeIncrementShift = v_uint8() self.QpcInterruptTimeIncrementShift = v_uint8() self.UnparkedProcessorCount = v_uint16() self.Reserved8 = vstruct.VArray([ v_uint8() for i in xrange(12) ]) self.UserModeGlobalLogger = vstruct.VArray([ v_uint16() for i in xrange(16) ]) self.ImageFileExecutionOptions = v_uint32() self.LangGenerationCount = v_uint32() self.Reserved4 = v_uint64() self.InterruptTimeBias = v_uint64() self.QpcBias = v_uint64() self.ActiveProcessorCount = v_uint32() self.ActiveGroupCount = v_uint8() self.Reserved9 = v_uint8() self.QpcData = v_uint16() self.TimeZoneBiasEffectiveStart = LARGE_INTEGER() self.TimeZoneBiasEffectiveEnd = LARGE_INTEGER() self.XState = XSTATE_CONFIGURATION() class SYSTEM_POWER_STATE_CONTEXT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Reserved1 = v_uint32() class SYNCH_COUNTERS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.SpinLockAcquireCount = v_uint32() self.SpinLockContentionCount = v_uint32() self.SpinLockSpinCount = v_uint32() self.IpiSendRequestBroadcastCount = v_uint32() self.IpiSendRequestRoutineCount = v_uint32() self.IpiSendSoftwareInterruptCount = v_uint32() self.ExInitializeResourceCount = v_uint32() self.ExReInitializeResourceCount = v_uint32() self.ExDeleteResourceCount = v_uint32() self.ExecutiveResourceAcquiresCount = v_uint32() self.ExecutiveResourceContentionsCount = v_uint32() self.ExecutiveResourceReleaseExclusiveCount = v_uint32() self.ExecutiveResourceReleaseSharedCount = v_uint32() self.ExecutiveResourceConvertsCount = v_uint32() self.ExAcqResExclusiveAttempts = v_uint32() self.ExAcqResExclusiveAcquiresExclusive = v_uint32() self.ExAcqResExclusiveAcquiresExclusiveRecursive = v_uint32() self.ExAcqResExclusiveWaits = v_uint32() self.ExAcqResExclusiveNotAcquires = v_uint32() self.ExAcqResSharedAttempts = v_uint32() self.ExAcqResSharedAcquiresExclusive = v_uint32() self.ExAcqResSharedAcquiresShared = v_uint32() self.ExAcqResSharedAcquiresSharedRecursive = v_uint32() self.ExAcqResSharedWaits = v_uint32() self.ExAcqResSharedNotAcquires = v_uint32() self.ExAcqResSharedStarveExclusiveAttempts = v_uint32() self.ExAcqResSharedStarveExclusiveAcquiresExclusive = v_uint32() self.ExAcqResSharedStarveExclusiveAcquiresShared = v_uint32() self.ExAcqResSharedStarveExclusiveAcquiresSharedRecursive = v_uint32() self.ExAcqResSharedStarveExclusiveWaits = v_uint32() self.ExAcqResSharedStarveExclusiveNotAcquires = v_uint32() self.ExAcqResSharedWaitForExclusiveAttempts = v_uint32() self.ExAcqResSharedWaitForExclusiveAcquiresExclusive = v_uint32() self.ExAcqResSharedWaitForExclusiveAcquiresShared = v_uint32() self.ExAcqResSharedWaitForExclusiveAcquiresSharedRecursive = v_uint32() self.ExAcqResSharedWaitForExclusiveWaits = v_uint32() self.ExAcqResSharedWaitForExclusiveNotAcquires = v_uint32() self.ExSetResOwnerPointerExclusive = v_uint32() self.ExSetResOwnerPointerSharedNew = v_uint32() self.ExSetResOwnerPointerSharedOld = v_uint32() self.ExTryToAcqExclusiveAttempts = v_uint32() self.ExTryToAcqExclusiveAcquires = v_uint32() self.ExBoostExclusiveOwner = v_uint32() self.ExBoostSharedOwners = v_uint32() self.ExEtwSynchTrackingNotificationsCount = v_uint32() self.ExEtwSynchTrackingNotificationsAccountedCount = v_uint32() class FS_FILTER_PARAMETERS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.AcquireForModifiedPageWriter = _unnamed_11947() self._pad0014 = v_bytes(size=12) class HEAP(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Entry = HEAP_ENTRY() self.SegmentSignature = v_uint32() self.SegmentFlags = v_uint32() self.SegmentListEntry = LIST_ENTRY() self.Heap = v_ptr32() self.BaseAddress = v_ptr32() self.NumberOfPages = v_uint32() self.FirstEntry = v_ptr32() self.LastValidEntry = v_ptr32() self.NumberOfUnCommittedPages = v_uint32() self.NumberOfUnCommittedRanges = v_uint32() self.SegmentAllocatorBackTraceIndex = v_uint16() self.Reserved = v_uint16() self.UCRSegmentList = LIST_ENTRY() self.Flags = v_uint32() self.ForceFlags = v_uint32() self.CompatibilityFlags = v_uint32() self.EncodeFlagMask = v_uint32() self.Encoding = HEAP_ENTRY() self.Interceptor = v_uint32() self.VirtualMemoryThreshold = v_uint32() self.Signature = v_uint32() self.SegmentReserve = v_uint32() self.SegmentCommit = v_uint32() self.DeCommitFreeBlockThreshold = v_uint32() self.DeCommitTotalFreeThreshold = v_uint32() self.TotalFreeSize = v_uint32() self.MaximumAllocationSize = v_uint32() self.ProcessHeapsListIndex = v_uint16() self.HeaderValidateLength = v_uint16() self.HeaderValidateCopy = v_ptr32() self.NextAvailableTagIndex = v_uint16() self.MaximumTagIndex = v_uint16() self.TagEntries = v_ptr32() self.UCRList = LIST_ENTRY() self.AlignRound = v_uint32() self.AlignMask = v_uint32() self.VirtualAllocdBlocks = LIST_ENTRY() self.SegmentList = LIST_ENTRY() self.AllocatorBackTraceIndex = v_uint16() self._pad00b0 = v_bytes(size=2) self.NonDedicatedListLength = v_uint32() self.BlocksIndex = v_ptr32() self.UCRIndex = v_ptr32() self.PseudoTagEntries = v_ptr32() self.FreeLists = LIST_ENTRY() self.LockVariable = v_ptr32() self.CommitRoutine = v_ptr32() self.FrontEndHeap = v_ptr32() self.FrontHeapLockCount = v_uint16() self.FrontEndHeapType = v_uint8() self.RequestedFrontEndHeapType = v_uint8() self.FrontEndHeapUsageData = v_ptr32() self.FrontEndHeapMaximumIndex = v_uint16() self.FrontEndHeapStatusBitmap = vstruct.VArray([ v_uint8() for i in xrange(257) ]) self._pad01e0 = v_bytes(size=1) self.Counters = HEAP_COUNTERS() self.TuningParameters = HEAP_TUNING_PARAMETERS() self._pad0248 = v_bytes(size=4) class IO_STATUS_BLOCK(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Status = v_uint32() self.Information = v_uint32() class PRIVILEGE_SET(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.PrivilegeCount = v_uint32() self.Control = v_uint32() self.Privilege = vstruct.VArray([ LUID_AND_ATTRIBUTES() for i in xrange(1) ]) class CM_RESOURCE_LIST(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Count = v_uint32() self.List = vstruct.VArray([ CM_FULL_RESOURCE_DESCRIPTOR() for i in xrange(1) ]) class WNF_STATE_NAME(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Data = vstruct.VArray([ v_uint32() for i in xrange(2) ]) class EPROCESS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Pcb = KPROCESS() self.ProcessLock = EX_PUSH_LOCK() self._pad00a8 = v_bytes(size=4) self.CreateTime = LARGE_INTEGER() self.RundownProtect = EX_RUNDOWN_REF() self.UniqueProcessId = v_ptr32() self.ActiveProcessLinks = LIST_ENTRY() self.Flags2 = v_uint32() self.Flags = v_uint32() self.ProcessQuotaUsage = vstruct.VArray([ v_uint32() for i in xrange(2) ]) self.ProcessQuotaPeak = vstruct.VArray([ v_uint32() for i in xrange(2) ]) self.PeakVirtualSize = v_uint32() self.VirtualSize = v_uint32() self.SessionProcessLinks = LIST_ENTRY() self.ExceptionPortData = v_ptr32() self.Token = EX_FAST_REF() self.WorkingSetPage = v_uint32() self.AddressCreationLock = EX_PUSH_LOCK() self.PageTableCommitmentLock = EX_PUSH_LOCK() self.RotateInProgress = v_ptr32() self.ForkInProgress = v_ptr32() self.CommitChargeJob = v_ptr32() self.CloneRoot = RTL_AVL_TREE() self.NumberOfPrivatePages = v_uint32() self.NumberOfLockedPages = v_uint32() self.Win32Process = v_ptr32() self.Job = v_ptr32() self.SectionObject = v_ptr32() self.SectionBaseAddress = v_ptr32() self.Cookie = v_uint32() self.VdmObjects = v_ptr32() self.WorkingSetWatch = v_ptr32() self.Win32WindowStation = v_ptr32() self.InheritedFromUniqueProcessId = v_ptr32() self.LdtInformation = v_ptr32() self.OwnerProcessId = v_uint32() self.Peb = v_ptr32() self.Session = v_ptr32() self.AweInfo = v_ptr32() self.QuotaBlock = v_ptr32() self.ObjectTable = v_ptr32() self.DebugPort = v_ptr32() self.PaeTop = v_ptr32() self.DeviceMap = v_ptr32() self.EtwDataSource = v_ptr32() self._pad0168 = v_bytes(size=4) self.PageDirectoryPte = v_uint64() self.ImageFileName = vstruct.VArray([ v_uint8() for i in xrange(15) ]) self.PriorityClass = v_uint8() self.SecurityPort = v_ptr32() self.SeAuditProcessCreationInfo = SE_AUDIT_PROCESS_CREATION_INFO() self.JobLinks = LIST_ENTRY() self.HighestUserAddress = v_ptr32() self.ThreadListHead = LIST_ENTRY() self.ActiveThreads = v_uint32() self.ImagePathHash = v_uint32() self.DefaultHardErrorProcessing = v_uint32() self.LastThreadExitStatus = v_uint32() self.PrefetchTrace = EX_FAST_REF() self.LockedPagesList = v_ptr32() self._pad01b8 = v_bytes(size=4) self.ReadOperationCount = LARGE_INTEGER() self.WriteOperationCount = LARGE_INTEGER() self.OtherOperationCount = LARGE_INTEGER() self.ReadTransferCount = LARGE_INTEGER() self.WriteTransferCount = LARGE_INTEGER() self.OtherTransferCount = LARGE_INTEGER() self.CommitCharge = v_uint32() self.Vm = MMSUPPORT() self.MmProcessLinks = LIST_ENTRY() self.ModifiedPageCount = v_uint32() self.ExitStatus = v_uint32() self.VadRoot = RTL_AVL_TREE() self.VadHint = v_ptr32() self.VadCount = v_uint32() self.VadPhysicalPages = v_uint32() self.VadPhysicalPagesLimit = v_uint32() self.AlpcContext = ALPC_PROCESS_CONTEXT() self.TimerResolutionLink = LIST_ENTRY() self.TimerResolutionStackRecord = v_ptr32() self.RequestedTimerResolution = v_uint32() self.SmallestTimerResolution = v_uint32() self._pad02b0 = v_bytes(size=4) self.ExitTime = LARGE_INTEGER() self.ActiveThreadsHighWatermark = v_uint32() self.LargePrivateVadCount = v_uint32() self.ThreadListLock = EX_PUSH_LOCK() self.WnfContext = v_ptr32() self.Spare0 = v_uint32() self.SignatureLevel = v_uint8() self.SectionSignatureLevel = v_uint8() self.Protection = PS_PROTECTION() self.SpareByte20 = vstruct.VArray([ v_uint8() for i in xrange(1) ]) self.Flags3 = v_uint32() self.SvmReserved = v_uint32() self.SvmReserved1 = v_ptr32() self.SvmReserved2 = v_uint32() self.LastFreezeInterruptTime = v_uint64() self.DiskCounters = v_ptr32() self.KeepAliveCounter = v_uint32() self.NoWakeKeepAliveCounter = v_uint32() self._pad02f8 = v_bytes(size=4) self.DeepFreezeStartTime = v_uint64() self.CommitChargeLimit = v_uint32() self.CommitChargePeak = v_uint32() self.HighPriorityFaultsAllowed = v_uint32() self._pad0310 = v_bytes(size=4) class _unnamed_11862(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.DataSize = v_uint32() self.Reserved1 = v_uint32() self.Reserved2 = v_uint32() class _unnamed_11866(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Start = LARGE_INTEGER() self.Length40 = v_uint32() class _unnamed_11948(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ResourceToRelease = v_ptr32() class POWER_SEQUENCE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.SequenceD1 = v_uint32() self.SequenceD2 = v_uint32() self.SequenceD3 = v_uint32() class EVENT_RECORD(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.EventHeader = EVENT_HEADER() self.BufferContext = ETW_BUFFER_CONTEXT() self.ExtendedDataCount = v_uint16() self.UserDataLength = v_uint16() self.ExtendedData = v_ptr32() self.UserData = v_ptr32() self.UserContext = v_ptr32() self._pad0068 = v_bytes(size=4) class TEB_ACTIVE_FRAME_CONTEXT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Flags = v_uint32() self.FrameName = v_ptr32() class _unnamed_11947(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.EndingOffset = v_ptr32() self.ResourceToRelease = v_ptr32() class _unnamed_11869(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Start = LARGE_INTEGER() self.Length48 = v_uint32() class KTIMER(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Header = DISPATCHER_HEADER() self.DueTime = ULARGE_INTEGER() self.TimerListEntry = LIST_ENTRY() self.Dpc = v_ptr32() self.Period = v_uint32() class _unnamed_9163(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Length = v_uint32() self.Key = v_uint32() self.ByteOffset = LARGE_INTEGER() class CM_PARTIAL_RESOURCE_DESCRIPTOR(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Type = v_uint8() self.ShareDisposition = v_uint8() self.Flags = v_uint16() self.u = _unnamed_11511() class EVENT_HEADER_EXTENDED_DATA_ITEM(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Reserved1 = v_uint16() self.ExtType = v_uint16() self.Linkage = v_uint16() self.DataSize = v_uint16() self.DataPtr = v_uint64() class CM_FULL_RESOURCE_DESCRIPTOR(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.InterfaceType = v_uint32() self.BusNumber = v_uint32() self.PartialResourceList = CM_PARTIAL_RESOURCE_LIST() class KTIMER_TABLE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.TimerExpiry = vstruct.VArray([ v_ptr32() for i in xrange(16) ]) self.TimerEntries = vstruct.VArray([ KTIMER_TABLE_ENTRY() for i in xrange(256) ]) class FAST_IO_DISPATCH(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.SizeOfFastIoDispatch = v_uint32() self.FastIoCheckIfPossible = v_ptr32() self.FastIoRead = v_ptr32() self.FastIoWrite = v_ptr32() self.FastIoQueryBasicInfo = v_ptr32() self.FastIoQueryStandardInfo = v_ptr32() self.FastIoLock = v_ptr32() self.FastIoUnlockSingle = v_ptr32() self.FastIoUnlockAll = v_ptr32() self.FastIoUnlockAllByKey = v_ptr32() self.FastIoDeviceControl = v_ptr32() self.AcquireFileForNtCreateSection = v_ptr32() self.ReleaseFileForNtCreateSection = v_ptr32() self.FastIoDetachDevice = v_ptr32() self.FastIoQueryNetworkOpenInfo = v_ptr32() self.AcquireForModWrite = v_ptr32() self.MdlRead = v_ptr32() self.MdlReadComplete = v_ptr32() self.PrepareMdlWrite = v_ptr32() self.MdlWriteComplete = v_ptr32() self.FastIoReadCompressed = v_ptr32() self.FastIoWriteCompressed = v_ptr32() self.MdlReadCompleteCompressed = v_ptr32() self.MdlWriteCompleteCompressed = v_ptr32() self.FastIoQueryOpen = v_ptr32() self.ReleaseForModWrite = v_ptr32() self.AcquireForCcFlush = v_ptr32() self.ReleaseForCcFlush = v_ptr32() class RTL_DYNAMIC_HASH_TABLE_CONTEXT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ChainHead = v_ptr32() self.PrevLinkage = v_ptr32() self.Signature = v_uint32() class MMWSL(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) class PPM_SELECTION_STATISTICS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.PlatformOnlyCount = v_uint64() self.PreVetoCount = v_uint64() self.VetoCount = v_uint64() self.IdleDurationCount = v_uint64() self.LatencyCount = v_uint64() self.InterruptibleCount = v_uint64() self.DeviceDependencyCount = v_uint64() self.ProcessorDependencyCount = v_uint64() self.WrongProcessorCount = v_uint64() self.LegacyOverrideCount = v_uint64() self.CstateCheckCount = v_uint64() self.NoCStateCount = v_uint64() self.SelectedCount = v_uint64() class PROC_IDLE_STATE_ACCOUNTING(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.TotalTime = v_uint64() self.CancelCount = v_uint32() self.FailureCount = v_uint32() self.SuccessCount = v_uint32() self.InvalidBucketIndex = v_uint32() self.MinTime = v_uint64() self.MaxTime = v_uint64() self.SelectionStatistics = PPM_SELECTION_STATISTICS() self.IdleTimeBuckets = vstruct.VArray([ PROC_IDLE_STATE_BUCKET() for i in xrange(26) ]) class PPM_VETO_ACCOUNTING(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.VetoPresent = v_uint32() self.VetoListHead = LIST_ENTRY() class WORK_QUEUE_ITEM(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.List = LIST_ENTRY() self.WorkerRoutine = v_ptr32() self.Parameter = v_ptr32() class _unnamed_9066(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Create = _unnamed_9128() class KSPECIAL_REGISTERS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Cr0 = v_uint32() self.Cr2 = v_uint32() self.Cr3 = v_uint32() self.Cr4 = v_uint32() self.KernelDr0 = v_uint32() self.KernelDr1 = v_uint32() self.KernelDr2 = v_uint32() self.KernelDr3 = v_uint32() self.KernelDr6 = v_uint32() self.KernelDr7 = v_uint32() self.Gdtr = DESCRIPTOR() self.Idtr = DESCRIPTOR() self.Tr = v_uint16() self.Ldtr = v_uint16() self.Xcr0 = v_uint64() self.ExceptionList = v_uint32() self.Reserved = vstruct.VArray([ v_uint32() for i in xrange(3) ]) class KINTERRUPT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Type = v_uint16() self.Size = v_uint16() self.InterruptListEntry = LIST_ENTRY() self.ServiceRoutine = v_ptr32() self.MessageServiceRoutine = v_ptr32() self.MessageIndex = v_uint32() self.ServiceContext = v_ptr32() self.SpinLock = v_uint32() self.TickCount = v_uint32() self.ActualLock = v_ptr32() self.DispatchAddress = v_ptr32() self.Vector = v_uint32() self.Irql = v_uint8() self.SynchronizeIrql = v_uint8() self.FloatingSave = v_uint8() self.Connected = v_uint8() self.Number = v_uint32() self.ShareVector = v_uint8() self.EmulateActiveBoth = v_uint8() self.ActiveCount = v_uint16() self.InternalState = v_uint32() self.Mode = v_uint32() self.Polarity = v_uint32() self.ServiceCount = v_uint32() self.DispatchCount = v_uint32() self.PassiveEvent = v_ptr32() self.DisconnectData = v_ptr32() self.ServiceThread = v_ptr32() self._pad0060 = v_bytes(size=4) self.IsrDpcStats = ISRDPCSTATS() self.ConnectionData = v_ptr32() self._pad00a8 = v_bytes(size=4) class RTL_CRITICAL_SECTION(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.DebugInfo = v_ptr32() self.LockCount = v_uint32() self.RecursionCount = v_uint32() self.OwningThread = v_ptr32() self.LockSemaphore = v_ptr32() self.SpinCount = v_uint32() class KSYSTEM_TIME(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.LowPart = v_uint32() self.High1Time = v_uint32() self.High2Time = v_uint32() class PROC_IDLE_STATE_BUCKET(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.TotalTime = v_uint64() self.MinTime = v_uint64() self.MaxTime = v_uint64() self.Count = v_uint32() self._pad0020 = v_bytes(size=4) class PPM_IDLE_SYNCHRONIZATION_STATE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.AsLong = v_uint32() class RTL_STD_LIST_HEAD(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Next = v_ptr32() self.Lock = RTL_STACK_DATABASE_LOCK() class PROCESSOR_IDLE_CONSTRAINTS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.TotalTime = v_uint64() self.IdleTime = v_uint64() self.ExpectedIdleDuration = v_uint64() self.MaxIdleDuration = v_uint64() self.OverrideState = v_uint32() self.TimeCheck = v_uint32() self.PromotePercent = v_uint8() self.DemotePercent = v_uint8() self.Parked = v_uint8() self.Interruptible = v_uint8() self.PlatformIdle = v_uint8() self._pad0030 = v_bytes(size=3) self.IRHints = v_uint32() self.IRTruncatedHints = v_uint32() self.ExpectedWakeReason = v_uint8() self._pad0040 = v_bytes(size=7) class FLOATING_SAVE_AREA(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ControlWord = v_uint32() self.StatusWord = v_uint32() self.TagWord = v_uint32() self.ErrorOffset = v_uint32() self.ErrorSelector = v_uint32() self.DataOffset = v_uint32() self.DataSelector = v_uint32() self.RegisterArea = vstruct.VArray([ v_uint8() for i in xrange(80) ]) self.Spare0 = v_uint32() class DPH_HEAP_BLOCK(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.pNextAlloc = v_ptr32() self._pad0010 = v_bytes(size=12) self.pUserAllocation = v_ptr32() self.pVirtualBlock = v_ptr32() self.nVirtualBlockSize = v_uint32() self.nVirtualAccessSize = v_uint32() self.nUserRequestedSize = v_uint32() self.nUserActualSize = v_uint32() self.UserValue = v_ptr32() self.UserFlags = v_uint32() self.StackTrace = v_ptr32() self.AdjacencyEntry = LIST_ENTRY() self.pVirtualRegion = v_ptr32() class KQUEUE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Header = DISPATCHER_HEADER() self.EntryListHead = LIST_ENTRY() self.CurrentCount = v_uint32() self.MaximumCount = v_uint32() self.ThreadListHead = LIST_ENTRY() class _unnamed_9032(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Overlay = _unnamed_9105() self._pad0030 = v_bytes(size=4) class LUID_AND_ATTRIBUTES(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Luid = LUID() self.Attributes = v_uint32() class HEAP_BUCKET(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.BlockUnits = v_uint16() self.SizeIndex = v_uint8() self.UseAffinity = v_uint8() class LDR_SERVICE_TAG_RECORD(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Next = v_ptr32() self.ServiceTag = v_uint32() class IO_SECURITY_CONTEXT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.SecurityQos = v_ptr32() self.AccessState = v_ptr32() self.DesiredAccess = v_uint32() self.FullCreateOptions = v_uint32() class KTHREAD(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Header = DISPATCHER_HEADER() self.SListFaultAddress = v_ptr32() self._pad0018 = v_bytes(size=4) self.QuantumTarget = v_uint64() self.InitialStack = v_ptr32() self.StackLimit = v_ptr32() self.StackBase = v_ptr32() self.ThreadLock = v_uint32() self.CycleTime = v_uint64() self.HighCycleTime = v_uint32() self.ServiceTable = v_ptr32() self.CurrentRunTime = v_uint32() self.ExpectedRunTime = v_uint32() self.KernelStack = v_ptr32() self.StateSaveArea = v_ptr32() self.SchedulingGroup = v_ptr32() self.WaitRegister = KWAIT_STATUS_REGISTER() self.Running = v_uint8() self.Alerted = vstruct.VArray([ v_uint8() for i in xrange(2) ]) self.SpareMiscFlag0 = v_uint32() self.AutoAlignment = v_uint32() self.Spare0 = v_uint32() self.SystemCallNumber = v_uint32() self.FirstArgument = v_ptr32() self.TrapFrame = v_ptr32() self.ApcState = KAPC_STATE() self.UserIdealProcessor = v_uint32() self.ContextSwitches = v_uint32() self.State = v_uint8() self.NpxState = v_uint8() self.WaitIrql = v_uint8() self.WaitMode = v_uint8() self.WaitStatus = v_uint32() self.WaitBlockList = v_ptr32() self.WaitListEntry = LIST_ENTRY() self.Queue = v_ptr32() self.Teb = v_ptr32() self._pad00b0 = v_bytes(size=4) self.RelativeTimerBias = v_uint64() self.Timer = KTIMER() self.WaitBlock = vstruct.VArray([ KWAIT_BLOCK() for i in xrange(4) ]) self.QueueListEntry = LIST_ENTRY() self.NextProcessor = v_uint32() self.QueuePriority = v_uint32() self.Process = v_ptr32() self.UserAffinity = GROUP_AFFINITY() self.Affinity = GROUP_AFFINITY() self.ApcStatePointer = vstruct.VArray([ v_ptr32() for i in xrange(2) ]) self.SavedApcState = KAPC_STATE() self.SuspendCount = v_uint8() self.Saturation = v_uint8() self.SListFaultCount = v_uint16() self.SchedulerApc = KAPC() self.UserTime = v_uint32() self.SuspendEvent = KEVENT() self.ThreadListEntry = LIST_ENTRY() self.MutantListHead = LIST_ENTRY() self.LockEntriesFreeList = SINGLE_LIST_ENTRY() self.LockEntries = vstruct.VArray([ KLOCK_ENTRY() for i in xrange(6) ]) self.PropagateBoostsEntry = SINGLE_LIST_ENTRY() self.IoSelfBoostsEntry = SINGLE_LIST_ENTRY() self.PriorityFloorCounts = vstruct.VArray([ v_uint8() for i in xrange(16) ]) self.PriorityFloorSummary = v_uint32() self.AbCompletedIoBoostCount = v_uint32() self.AbReferenceCount = v_uint16() self.AbFreeEntryCount = v_uint8() self.AbWaitEntryCount = v_uint8() self.ForegroundLossTime = v_uint32() self.GlobalForegroundListEntry = LIST_ENTRY() class _unnamed_12539(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Address = LARGE_INTEGER() self.DataPayload = v_uint32() self._pad0010 = v_bytes(size=4) class _unnamed_12538(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Gsiv = v_uint32() self.WakeInterrupt = v_uint32() class _unnamed_9400(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.WhichSpace = v_uint32() self.Buffer = v_ptr32() self.Offset = v_uint32() self.Length = v_uint32() class CONTEXT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ContextFlags = v_uint32() self.Dr0 = v_uint32() self.Dr1 = v_uint32() self.Dr2 = v_uint32() self.Dr3 = v_uint32() self.Dr6 = v_uint32() self.Dr7 = v_uint32() self.FloatSave = FLOATING_SAVE_AREA() self.SegGs = v_uint32() self.SegFs = v_uint32() self.SegEs = v_uint32() self.SegDs = v_uint32() self.Edi = v_uint32() self.Esi = v_uint32() self.Ebx = v_uint32() self.Edx = v_uint32() self.Ecx = v_uint32() self.Eax = v_uint32() self.Ebp = v_uint32() self.Eip = v_uint32() self.SegCs = v_uint32() self.EFlags = v_uint32() self.Esp = v_uint32() self.SegSs = v_uint32() self.ExtendedRegisters = vstruct.VArray([ v_uint8() for i in xrange(512) ]) class _unnamed_9405(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Lock = v_uint8() class MCI_STATS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.MciStats = _unnamed_10886() class _unnamed_6578(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.LowPart = v_uint32() self.HighPart = v_uint32() class PS_PROTECTION(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Level = v_uint8() class PROC_PERF_LOAD(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.BusyPercentage = v_uint8() self.FrequencyPercentage = v_uint8() class AUX_ACCESS_DATA(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.PrivilegesUsed = v_ptr32() self.GenericMapping = GENERIC_MAPPING() self.AccessesToAudit = v_uint32() self.MaximumAuditMask = v_uint32() self.TransactionId = GUID() self.NewSecurityDescriptor = v_ptr32() self.ExistingSecurityDescriptor = v_ptr32() self.ParentSecurityDescriptor = v_ptr32() self.DeRefSecurityDescriptor = v_ptr32() self.SDLock = v_ptr32() self.AccessReasons = ACCESS_REASONS() self.GenerateStagingEvents = v_uint8() self._pad00c4 = v_bytes(size=3) class _unnamed_9494(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Argument1 = v_ptr32() self.Argument2 = v_ptr32() self.Argument3 = v_ptr32() self.Argument4 = v_ptr32() class _unnamed_8729(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.InitialPrivilegeSet = INITIAL_PRIVILEGE_SET() class HEAP_LOCAL_DATA(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.DeletedSubSegments = SLIST_HEADER() self.CrtZone = v_ptr32() self.LowFragHeap = v_ptr32() self.Sequence = v_uint32() self.DeleteRateThreshold = v_uint32() class DPH_BLOCK_INFORMATION(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.StartStamp = v_uint32() self.Heap = v_ptr32() self.RequestedSize = v_uint32() self.ActualSize = v_uint32() self.FreeQueue = LIST_ENTRY() self.StackTrace = v_ptr32() self.EndStamp = v_uint32() class _unnamed_12580(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ExtendedDestination = v_uint32() class _unnamed_9241(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Length = v_uint32() self.FileInformationClass = v_uint32() class PF_KERNEL_GLOBALS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.AccessBufferAgeThreshold = v_uint64() self.AccessBufferRef = EX_RUNDOWN_REF() self.AccessBufferExistsEvent = KEVENT() self.AccessBufferMax = v_uint32() self.AccessBufferList = SLIST_HEADER() self.StreamSequenceNumber = v_uint32() self.Flags = v_uint32() self.ScenarioPrefetchCount = v_uint32() self._pad0040 = v_bytes(size=12) class PEB32(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.InheritedAddressSpace = v_uint8() self.ReadImageFileExecOptions = v_uint8() self.BeingDebugged = v_uint8() self.BitField = v_uint8() self.Mutant = v_uint32() self.ImageBaseAddress = v_uint32() self.Ldr = v_uint32() self.ProcessParameters = v_uint32() self.SubSystemData = v_uint32() self.ProcessHeap = v_uint32() self.FastPebLock = v_uint32() self.AtlThunkSListPtr = v_uint32() self.IFEOKey = v_uint32() self.CrossProcessFlags = v_uint32() self.KernelCallbackTable = v_uint32() self.SystemReserved = vstruct.VArray([ v_uint32() for i in xrange(1) ]) self.AtlThunkSListPtr32 = v_uint32() self.ApiSetMap = v_uint32() self.TlsExpansionCounter = v_uint32() self.TlsBitmap = v_uint32() self.TlsBitmapBits = vstruct.VArray([ v_uint32() for i in xrange(2) ]) self.ReadOnlySharedMemoryBase = v_uint32() self.SparePvoid0 = v_uint32() self.ReadOnlyStaticServerData = v_uint32() self.AnsiCodePageData = v_uint32() self.OemCodePageData = v_uint32() self.UnicodeCaseTableData = v_uint32() self.NumberOfProcessors = v_uint32() self.NtGlobalFlag = v_uint32() self._pad0070 = v_bytes(size=4) self.CriticalSectionTimeout = LARGE_INTEGER() self.HeapSegmentReserve = v_uint32() self.HeapSegmentCommit = v_uint32() self.HeapDeCommitTotalFreeThreshold = v_uint32() self.HeapDeCommitFreeBlockThreshold = v_uint32() self.NumberOfHeaps = v_uint32() self.MaximumNumberOfHeaps = v_uint32() self.ProcessHeaps = v_uint32() self.GdiSharedHandleTable = v_uint32() self.ProcessStarterHelper = v_uint32() self.GdiDCAttributeList = v_uint32() self.LoaderLock = v_uint32() self.OSMajorVersion = v_uint32() self.OSMinorVersion = v_uint32() self.OSBuildNumber = v_uint16() self.OSCSDVersion = v_uint16() self.OSPlatformId = v_uint32() self.ImageSubsystem = v_uint32() self.ImageSubsystemMajorVersion = v_uint32() self.ImageSubsystemMinorVersion = v_uint32() self.ActiveProcessAffinityMask = v_uint32() self.GdiHandleBuffer = vstruct.VArray([ v_uint32() for i in xrange(34) ]) self.PostProcessInitRoutine = v_uint32() self.TlsExpansionBitmap = v_uint32() self.TlsExpansionBitmapBits = vstruct.VArray([ v_uint32() for i in xrange(32) ]) self.SessionId = v_uint32() self.AppCompatFlags = ULARGE_INTEGER() self.AppCompatFlagsUser = ULARGE_INTEGER() self.pShimData = v_uint32() self.AppCompatInfo = v_uint32() self.CSDVersion = STRING32() self.ActivationContextData = v_uint32() self.ProcessAssemblyStorageMap = v_uint32() self.SystemDefaultActivationContextData = v_uint32() self.SystemAssemblyStorageMap = v_uint32() self.MinimumStackCommit = v_uint32() self.FlsCallback = v_uint32() self.FlsListHead = LIST_ENTRY32() self.FlsBitmap = v_uint32() self.FlsBitmapBits = vstruct.VArray([ v_uint32() for i in xrange(4) ]) self.FlsHighIndex = v_uint32() self.WerRegistrationData = v_uint32() self.WerShipAssertPtr = v_uint32() self.pUnused = v_uint32() self.pImageHeaderHash = v_uint32() self.TracingFlags = v_uint32() self._pad0248 = v_bytes(size=4) self.CsrServerReadOnlySharedMemoryBase = v_uint64() class _unnamed_9244(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Length = v_uint32() self.FileInformationClass = v_uint32() self.FileObject = v_ptr32() self.ReplaceIfExists = v_uint8() self.AdvanceOnly = v_uint8() self._pad0010 = v_bytes(size=2) class _unnamed_12587(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.MessageAddressLow = v_uint32() self.MessageData = v_uint16() self.Reserved = v_uint16() class RTL_HEAP_WALK_ENTRY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.DataAddress = v_ptr32() self.DataSize = v_uint32() self.OverheadBytes = v_uint8() self.SegmentIndex = v_uint8() self.Flags = v_uint16() self.Block = _unnamed_10404() class _unnamed_9128(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.SecurityContext = v_ptr32() self.Options = v_uint32() self.FileAttributes = v_uint16() self.ShareAccess = v_uint16() self.EaLength = v_uint32() class EVENT_DATA_DESCRIPTOR(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Ptr = v_uint64() self.Size = v_uint32() self.Reserved = v_uint32() class IO_DRIVER_CREATE_CONTEXT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Size = v_uint16() self._pad0004 = v_bytes(size=2) self.ExtraCreateParameter = v_ptr32() self.DeviceObjectHint = v_ptr32() self.TxnParameters = v_ptr32() class CLIENT_ID64(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.UniqueProcess = v_uint64() self.UniqueThread = v_uint64() class TEB64(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.NtTib = NT_TIB64() self.EnvironmentPointer = v_uint64() self.ClientId = CLIENT_ID64() self.ActiveRpcHandle = v_uint64() self.ThreadLocalStoragePointer = v_uint64() self.ProcessEnvironmentBlock = v_uint64() self.LastErrorValue = v_uint32() self.CountOfOwnedCriticalSections = v_uint32() self.CsrClientThread = v_uint64() self.Win32ThreadInfo = v_uint64() self.User32Reserved = vstruct.VArray([ v_uint32() for i in xrange(26) ]) self.UserReserved = vstruct.VArray([ v_uint32() for i in xrange(5) ]) self._pad0100 = v_bytes(size=4) self.WOW32Reserved = v_uint64() self.CurrentLocale = v_uint32() self.FpSoftwareStatusRegister = v_uint32() self.SystemReserved1 = vstruct.VArray([ v_uint64() for i in xrange(54) ]) self.ExceptionCode = v_uint32() self.Padding0 = vstruct.VArray([ v_uint8() for i in xrange(4) ]) self.ActivationContextStackPointer = v_uint64() self.SpareBytes = vstruct.VArray([ v_uint8() for i in xrange(24) ]) self.TxFsContext = v_uint32() self.Padding1 = vstruct.VArray([ v_uint8() for i in xrange(4) ]) self.GdiTebBatch = GDI_TEB_BATCH64() self.RealClientId = CLIENT_ID64() self.GdiCachedProcessHandle = v_uint64() self.GdiClientPID = v_uint32() self.GdiClientTID = v_uint32() self.GdiThreadLocalInfo = v_uint64() self.Win32ClientInfo = vstruct.VArray([ v_uint64() for i in xrange(62) ]) self.glDispatchTable = vstruct.VArray([ v_uint64() for i in xrange(233) ]) self.glReserved1 = vstruct.VArray([ v_uint64() for i in xrange(29) ]) self.glReserved2 = v_uint64() self.glSectionInfo = v_uint64() self.glSection = v_uint64() self.glTable = v_uint64() self.glCurrentRC = v_uint64() self.glContext = v_uint64() self.LastStatusValue = v_uint32() self.Padding2 = vstruct.VArray([ v_uint8() for i in xrange(4) ]) self.StaticUnicodeString = STRING64() self.StaticUnicodeBuffer = vstruct.VArray([ v_uint16() for i in xrange(261) ]) self.Padding3 = vstruct.VArray([ v_uint8() for i in xrange(6) ]) self.DeallocationStack = v_uint64() self.TlsSlots = vstruct.VArray([ v_uint64() for i in xrange(64) ]) self.TlsLinks = LIST_ENTRY64() self.Vdm = v_uint64() self.ReservedForNtRpc = v_uint64() self.DbgSsReserved = vstruct.VArray([ v_uint64() for i in xrange(2) ]) self.HardErrorMode = v_uint32() self.Padding4 = vstruct.VArray([ v_uint8() for i in xrange(4) ]) self.Instrumentation = vstruct.VArray([ v_uint64() for i in xrange(11) ]) self.ActivityId = GUID() self.SubProcessTag = v_uint64() self.PerflibData = v_uint64() self.EtwTraceData = v_uint64() self.WinSockData = v_uint64() self.GdiBatchCount = v_uint32() self.CurrentIdealProcessor = PROCESSOR_NUMBER() self.GuaranteedStackBytes = v_uint32() self.Padding5 = vstruct.VArray([ v_uint8() for i in xrange(4) ]) self.ReservedForPerf = v_uint64() self.ReservedForOle = v_uint64() self.WaitingOnLoaderLock = v_uint32() self.Padding6 = vstruct.VArray([ v_uint8() for i in xrange(4) ]) self.SavedPriorityState = v_uint64() self.ReservedForCodeCoverage = v_uint64() self.ThreadPoolData = v_uint64() self.TlsExpansionSlots = v_uint64() self.DeallocationBStore = v_uint64() self.BStoreLimit = v_uint64() self.MuiGeneration = v_uint32() self.IsImpersonating = v_uint32() self.NlsCache = v_uint64() self.pShimData = v_uint64() self.HeapVirtualAffinity = v_uint16() self.LowFragHeapDataSlot = v_uint16() self.Padding7 = vstruct.VArray([ v_uint8() for i in xrange(4) ]) self.CurrentTransactionHandle = v_uint64() self.ActiveFrame = v_uint64() self.FlsData = v_uint64() self.PreferredLanguages = v_uint64() self.UserPrefLanguages = v_uint64() self.MergedPrefLanguages = v_uint64() self.MuiImpersonation = v_uint32() self.CrossTebFlags = v_uint16() self.SameTebFlags = v_uint16() self.TxnScopeEnterCallback = v_uint64() self.TxnScopeExitCallback = v_uint64() self.TxnScopeContext = v_uint64() self.LockCount = v_uint32() self.SpareUlong0 = v_uint32() self.ResourceRetValue = v_uint64() self.ReservedForWdf = v_uint64() class EJOB(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Event = KEVENT() self.JobLinks = LIST_ENTRY() self.ProcessListHead = LIST_ENTRY() self.JobLock = ERESOURCE() self.TotalUserTime = LARGE_INTEGER() self.TotalKernelTime = LARGE_INTEGER() self.TotalCycleTime = LARGE_INTEGER() self.ThisPeriodTotalUserTime = LARGE_INTEGER() self.ThisPeriodTotalKernelTime = LARGE_INTEGER() self.TotalContextSwitches = v_uint64() self.TotalPageFaultCount = v_uint32() self.TotalProcesses = v_uint32() self.ActiveProcesses = v_uint32() self.TotalTerminatedProcesses = v_uint32() self.PerProcessUserTimeLimit = LARGE_INTEGER() self.PerJobUserTimeLimit = LARGE_INTEGER() self.MinimumWorkingSetSize = v_uint32() self.MaximumWorkingSetSize = v_uint32() self.LimitFlags = v_uint32() self.ActiveProcessLimit = v_uint32() self.Affinity = KAFFINITY_EX() self.AccessState = v_ptr32() self.AccessStateQuotaReference = v_ptr32() self.UIRestrictionsClass = v_uint32() self.EndOfJobTimeAction = v_uint32() self.CompletionPort = v_ptr32() self.CompletionKey = v_ptr32() self._pad00e0 = v_bytes(size=4) self.CompletionCount = v_uint64() self.SessionId = v_uint32() self.SchedulingClass = v_uint32() self.ReadOperationCount = v_uint64() self.WriteOperationCount = v_uint64() self.OtherOperationCount = v_uint64() self.ReadTransferCount = v_uint64() self.WriteTransferCount = v_uint64() self.OtherTransferCount = v_uint64() self.DiskIoInfo = PROCESS_DISK_COUNTERS() self.ProcessMemoryLimit = v_uint32() self.JobMemoryLimit = v_uint32() self.PeakProcessMemoryUsed = v_uint32() self.PeakJobMemoryUsed = v_uint32() self.EffectiveAffinity = KAFFINITY_EX() self._pad0168 = v_bytes(size=4) self.EffectivePerProcessUserTimeLimit = LARGE_INTEGER() self.EffectiveMinimumWorkingSetSize = v_uint32() self.EffectiveMaximumWorkingSetSize = v_uint32() self.EffectiveProcessMemoryLimit = v_uint32() self.EffectiveProcessMemoryLimitJob = v_ptr32() self.EffectivePerProcessUserTimeLimitJob = v_ptr32() self.EffectiveLimitFlags = v_uint32() self.EffectiveSchedulingClass = v_uint32() self.EffectiveFreezeCount = v_uint32() self.EffectiveBackgroundCount = v_uint32() self.EffectiveSwapCount = v_uint32() self.EffectiveNotificationLimitCount = v_uint32() self.EffectivePriorityClass = v_uint8() self.PriorityClass = v_uint8() self.Reserved1 = vstruct.VArray([ v_uint8() for i in xrange(2) ]) self.CompletionFilter = v_uint32() self._pad01a8 = v_bytes(size=4) self.WakeChannel = WNF_STATE_NAME() self._pad01e0 = v_bytes(size=48) self.WakeFilter = JOBOBJECT_WAKE_FILTER() self.LowEdgeLatchFilter = v_uint32() self.OwnedHighEdgeFilters = v_uint32() self.NotificationLink = v_ptr32() self._pad01f8 = v_bytes(size=4) self.CurrentJobMemoryUsed = v_uint64() self.NotificationInfo = v_ptr32() self.NotificationInfoQuotaReference = v_ptr32() self.NotificationPacket = v_ptr32() self.CpuRateControl = v_ptr32() self.EffectiveSchedulingGroup = v_ptr32() self._pad0218 = v_bytes(size=4) self.ReadyTime = v_uint64() self.MemoryLimitsLock = EX_PUSH_LOCK() self.SiblingJobLinks = LIST_ENTRY() self.ChildJobListHead = LIST_ENTRY() self.ParentJob = v_ptr32() self.RootJob = v_ptr32() self.IteratorListHead = LIST_ENTRY() self.AncestorCount = v_uint32() self.Ancestors = v_ptr32() self._pad0250 = v_bytes(size=4) self.Accounting = EPROCESS_VALUES() self.ShadowActiveProcessCount = v_uint32() self.SequenceNumber = v_uint32() self.TimerListLock = v_uint32() self.TimerListHead = LIST_ENTRY() self.JobFlags = v_uint32() self.EffectiveHighEdgeFilters = v_uint32() self._pad02c0 = v_bytes(size=4) class HEAP_LFH_CONTEXT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.BackendCtx = v_ptr32() self.Callbacks = HEAP_LFH_CALLBACKS() self.Buckets = vstruct.VArray([ HEAP_LFH_BUCKET() for i in xrange(129) ]) self.AffinitySlots = vstruct.VArray([ HEAP_LFH_AFFINITY_SLOT() for i in xrange(129) ]) class HANDLE_TRACE_DEBUG_INFO(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.RefCount = v_uint32() self.TableSize = v_uint32() self.BitMaskFlags = v_uint32() self.CloseCompactionLock = FAST_MUTEX() self.CurrentStackIndex = v_uint32() self.TraceDb = vstruct.VArray([ HANDLE_TRACE_DB_ENTRY() for i in xrange(1) ]) class KPROCESSOR_STATE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ContextFrame = CONTEXT() self.SpecialRegisters = KSPECIAL_REGISTERS() class KiIoAccessMap(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.DirectionMap = vstruct.VArray([ v_uint8() for i in xrange(32) ]) self.IoMap = vstruct.VArray([ v_uint8() for i in xrange(8196) ]) class HEAP_LFH_SUBSEGMENT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ListEntry = LIST_ENTRY() self.Owner = v_ptr32() self.BitmapLock = RTL_SRWLOCK() self.CommitLock = RTL_SRWLOCK() self.FreeCount = v_uint16() self.BlockCount = v_uint16() self.FreeHint = v_uint16() self.BlockSize = v_uint16() self.CommitBitmap = v_uint32() self.FirstBlockOffset = v_uint16() self.Location = v_uint8() self.DecommitInProgress = v_uint8() self.AllocationBitmap = vstruct.VArray([ v_uint32() for i in xrange(1) ]) class KAPC(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Type = v_uint8() self.SpareByte0 = v_uint8() self.Size = v_uint8() self.SpareByte1 = v_uint8() self.SpareLong0 = v_uint32() self.Thread = v_ptr32() self.ApcListEntry = LIST_ENTRY() self.KernelRoutine = v_ptr32() self.RundownRoutine = v_ptr32() self.NormalRoutine = v_ptr32() self.NormalContext = v_ptr32() self.SystemArgument1 = v_ptr32() self.SystemArgument2 = v_ptr32() self.ApcStateIndex = v_uint8() self.ApcMode = v_uint8() self.Inserted = v_uint8() self._pad0030 = v_bytes(size=1) class ETW_BUFFER_CONTEXT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ProcessorNumber = v_uint8() self.Alignment = v_uint8() self.LoggerId = v_uint16() class _unnamed_11832(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Level = v_uint16() self.Group = v_uint16() self.Vector = v_uint32() self.Affinity = v_uint32() class RTL_STACK_DATABASE_LOCK(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Lock = RTL_SRWLOCK() class SID_IDENTIFIER_AUTHORITY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Value = vstruct.VArray([ v_uint8() for i in xrange(6) ]) class _unnamed_9238(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Length = v_uint32() self.CompletionFilter = v_uint32() class PROC_PERF_HISTORY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Count = v_uint32() self.Slot = v_uint32() self.HistoryList = vstruct.VArray([ PROC_PERF_HISTORY_ENTRY() for i in xrange(1) ]) self._pad0010 = v_bytes(size=2) class _unnamed_9333(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Type = v_uint32() class _unnamed_11837(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Group = v_uint16() self.MessageCount = v_uint16() self.Vector = v_uint32() self.Affinity = v_uint32() class _unnamed_9232(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Length = v_uint32() self.FileName = v_ptr32() self.FileInformationClass = v_uint32() self.FileIndex = v_uint32() class LDRP_DLL_SNAP_CONTEXT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) class XSTATE_FEATURE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Offset = v_uint32() self.Size = v_uint32() class GDI_TEB_BATCH32(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Offset = v_uint32() self.HDC = v_uint32() self.Buffer = vstruct.VArray([ v_uint32() for i in xrange(310) ]) class WHEA_TIMESTAMP(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Seconds = v_uint64() class ACTIVATION_CONTEXT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) class _unnamed_10405(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.CommittedSize = v_uint32() self.UnCommittedSize = v_uint32() self.FirstEntry = v_ptr32() self.LastEntry = v_ptr32() class _unnamed_10404(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Settable = v_uint32() self.TagIndex = v_uint16() self.AllocatorBackTraceIndex = v_uint16() self.Reserved = vstruct.VArray([ v_uint32() for i in xrange(2) ]) class RTL_CRITICAL_SECTION_DEBUG(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Type = v_uint16() self.CreatorBackTraceIndex = v_uint16() self.CriticalSection = v_ptr32() self.ProcessLocksList = LIST_ENTRY() self.EntryCount = v_uint32() self.ContentionCount = v_uint32() self.Flags = v_uint32() self.CreatorBackTraceIndexHigh = v_uint16() self.SpareUSHORT = v_uint16() class _unnamed_9993(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.CriticalSection = RTL_CRITICAL_SECTION() class DISPATCHER_HEADER(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Lock = v_uint32() self.SignalState = v_uint32() self.WaitListHead = LIST_ENTRY() class _unnamed_11949(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.SyncType = v_uint32() self.PageProtection = v_uint32() class ASSEMBLY_STORAGE_MAP(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) class PROCESSOR_POWER_STATE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.IdleStates = v_ptr32() self.IdleAccounting = v_ptr32() self.IdleTimeLast = v_uint64() self.IdleTimeTotal = v_uint64() self.IdleTimeEntry = v_uint64() self.Reserved = v_uint64() self.IdlePolicy = PROC_IDLE_POLICY() self._pad0030 = v_bytes(size=3) self.Synchronization = PPM_IDLE_SYNCHRONIZATION_STATE() self._pad0038 = v_bytes(size=4) self.PerfFeedback = PROC_FEEDBACK() self.Hypervisor = v_uint32() self.LastSysTime = v_uint32() self.WmiDispatchPtr = v_uint32() self.WmiInterfaceEnabled = v_uint32() self.FFHThrottleStateInfo = PPM_FFH_THROTTLE_STATE_INFO() self.PerfActionDpc = KDPC() self.PerfActionMask = v_uint32() self._pad00f8 = v_bytes(size=4) self.HvIdleCheck = PROC_IDLE_SNAP() self.PerfCheck = PROC_PERF_SNAP() self.Domain = v_ptr32() self.PerfConstraint = v_ptr32() self.Concurrency = v_ptr32() self.Load = v_ptr32() self.PerfHistory = v_ptr32() self.GuaranteedPerformancePercent = v_uint8() self.HvTargetState = v_uint8() self.Parked = v_uint8() self.OverUtilized = v_uint8() self.LatestPerformancePercent = v_uint32() self.LatestAffinitizedPercent = v_uint32() self.ExpectedUtility = v_uint32() self.Utility = vstruct.VArray([ PROC_PERF_UTILITY() for i in xrange(3) ]) class PS_WAKE_INFORMATION(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.NotificationChannel = v_uint64() self.WakeCounters = vstruct.VArray([ v_uint64() for i in xrange(5) ]) self.NoWakeCounter = v_uint64() class DESCRIPTOR(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Pad = v_uint16() self.Limit = v_uint16() self.Base = v_uint32() class JOB_ACCESS_STATE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) class SECURITY_QUALITY_OF_SERVICE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Length = v_uint32() self.ImpersonationLevel = v_uint32() self.ContextTrackingMode = v_uint8() self.EffectiveOnly = v_uint8() self._pad000c = v_bytes(size=2) class COMPRESSED_DATA_INFO(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.CompressionFormatAndEngine = v_uint16() self.CompressionUnitShift = v_uint8() self.ChunkShift = v_uint8() self.ClusterShift = v_uint8() self.Reserved = v_uint8() self.NumberOfChunks = v_uint16() self.CompressedChunkSizes = vstruct.VArray([ v_uint32() for i in xrange(1) ]) class _unnamed_12472(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Length40 = v_uint32() self.Alignment40 = v_uint32() self.MinimumAddress = LARGE_INTEGER() self.MaximumAddress = LARGE_INTEGER() class WHEA_ERROR_RECORD(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Header = WHEA_ERROR_RECORD_HEADER() self.SectionDescriptor = vstruct.VArray([ WHEA_ERROR_RECORD_SECTION_DESCRIPTOR() for i in xrange(1) ]) class _unnamed_12477(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Length48 = v_uint32() self.Alignment48 = v_uint32() self.MinimumAddress = LARGE_INTEGER() self.MaximumAddress = LARGE_INTEGER() class PROC_PERF_CONSTRAINT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Prcb = v_ptr32() self.PerfContext = v_uint32() self.PlatformCap = v_uint32() self.ThermalCap = v_uint32() self.LimitReasons = v_uint32() self._pad0018 = v_bytes(size=4) self.PlatformCapStartTime = v_uint64() self.TargetPercent = v_uint32() self.DesiredPercent = v_uint32() self.SelectedPercent = v_uint32() self.SelectedFrequency = v_uint32() self.PreviousFrequency = v_uint32() self.PreviousPercent = v_uint32() self.LatestFrequencyPercent = v_uint32() self._pad0040 = v_bytes(size=4) self.SelectedState = v_uint64() self.Force = v_uint8() self._pad0050 = v_bytes(size=7) class LUID(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.LowPart = v_uint32() self.HighPart = v_uint32() class KTIMER_EXPIRATION_TRACE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.InterruptTime = v_uint64() self.PerformanceCounter = LARGE_INTEGER() class CLIENT_ID(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.UniqueProcess = v_ptr32() self.UniqueThread = v_ptr32() class RTL_STACK_TRACE_ENTRY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.HashChain = RTL_STD_LIST_ENTRY() self.TraceCount = v_uint16() self.IndexHigh = v_uint16() self.Index = v_uint16() self.Depth = v_uint16() self.BackTrace = vstruct.VArray([ v_ptr32() for i in xrange(32) ]) class OBJECT_DUMP_CONTROL(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Stream = v_ptr32() self.Detail = v_uint32() class HANDLE_TRACE_DB_ENTRY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ClientId = CLIENT_ID() self.Handle = v_ptr32() self.Type = v_uint32() self.StackTrace = vstruct.VArray([ v_ptr32() for i in xrange(16) ]) class GENERAL_LOOKASIDE_POOL(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ListHead = SLIST_HEADER() self.Depth = v_uint16() self.MaximumDepth = v_uint16() self.TotalAllocates = v_uint32() self.AllocateMisses = v_uint32() self.TotalFrees = v_uint32() self.FreeMisses = v_uint32() self.Type = v_uint32() self.Tag = v_uint32() self.Size = v_uint32() self.AllocateEx = v_ptr32() self.FreeEx = v_ptr32() self.ListEntry = LIST_ENTRY() self.LastTotalAllocates = v_uint32() self.LastAllocateMisses = v_uint32() self.Future = vstruct.VArray([ v_uint32() for i in xrange(2) ]) class _unnamed_11856(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Data = vstruct.VArray([ v_uint32() for i in xrange(3) ]) class LDRP_CSLIST(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Tail = v_ptr32() class _unnamed_10879(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Mca = _unnamed_10891() class _unnamed_10975(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.BaseMid = v_uint32() class _unnamed_9311(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Srb = v_ptr32() class RTL_SRWLOCK(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Locked = v_uint32() class STRING(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Length = v_uint16() self.MaximumLength = v_uint16() self.Buffer = v_ptr32() class _unnamed_9286(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.OutputBufferLength = v_uint32() self.InputBufferLength = v_uint32() self.IoControlCode = v_uint32() self.Type3InputBuffer = v_ptr32() class PROCESSOR_PLATFORM_STATE_RESIDENCY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Residency = v_uint64() self.TransitionCount = v_uint64() class _unnamed_9281(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Length = v_ptr32() self.Key = v_uint32() self.ByteOffset = LARGE_INTEGER() class LIST_ENTRY32(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Flink = v_uint32() self.Blink = v_uint32() class _unnamed_9474(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.SystemContext = v_uint32() self.Type = v_uint32() self.State = POWER_STATE() self.ShutdownType = v_uint32() class _unnamed_11785(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Port = _unnamed_12438() class SINGLE_LIST_ENTRY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Next = v_ptr32() class PPM_FFH_THROTTLE_STATE_INFO(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.EnableLogging = v_uint8() self._pad0004 = v_bytes(size=3) self.MismatchCount = v_uint32() self.Initialized = v_uint8() self._pad0010 = v_bytes(size=7) self.LastValue = v_uint64() self.LastLogTickCount = LARGE_INTEGER() class KDEVICE_QUEUE_ENTRY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.DeviceListEntry = LIST_ENTRY() self.SortKey = v_uint32() self.Inserted = v_uint8() self._pad0010 = v_bytes(size=3) class EXT_SET_PARAMETERS_V0(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Version = v_uint32() self.Reserved = v_uint32() self.NoWakeTolerance = v_uint64() class HEAP_FAILURE_INFORMATION(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Version = v_uint32() self.StructureSize = v_uint32() self.FailureType = v_uint32() self.HeapAddress = v_ptr32() self.Address = v_ptr32() self.Param1 = v_ptr32() self.Param2 = v_ptr32() self.Param3 = v_ptr32() self.PreviousBlock = v_ptr32() self.NextBlock = v_ptr32() self.ExpectedEncodedEntry = HEAP_ENTRY() self.ExpectedDecodedEntry = HEAP_ENTRY() self.StackTrace = vstruct.VArray([ v_ptr32() for i in xrange(32) ]) class EX_FAST_REF(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Object = v_ptr32() class INTERLOCK_SEQ(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Depth = v_uint16() self.Hint = v_uint16() class KSPIN_LOCK_QUEUE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Next = v_ptr32() self.Lock = v_ptr32() class WHEA_ERROR_PACKET_FLAGS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.PreviousError = v_uint32() class _unnamed_9273(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Length = v_uint32() self.FsInformationClass = v_uint32() class _unnamed_11872(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Start = LARGE_INTEGER() self.Length64 = v_uint32() class _unnamed_11875(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Class = v_uint8() self.Type = v_uint8() self.Reserved1 = v_uint8() self.Reserved2 = v_uint8() self.IdLowPart = v_uint32() self.IdHighPart = v_uint32() class LDR_DDAG_NODE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Modules = LIST_ENTRY() self.ServiceTagList = v_ptr32() self.LoadCount = v_uint32() self.ReferenceCount = v_uint32() self.DependencyCount = v_uint32() self.Dependencies = LDRP_CSLIST() self.IncomingDependencies = LDRP_CSLIST() self.State = v_uint32() self.CondenseLink = SINGLE_LIST_ENTRY() self.PreorderNumber = v_uint32() self.LowestLink = v_uint32() class _unnamed_9276(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.OutputBufferLength = v_uint32() self.InputBufferLength = v_uint32() self.FsControlCode = v_uint32() self.Type3InputBuffer = v_ptr32() class MM_DRIVER_VERIFIER_DATA(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Level = v_uint32() self.RaiseIrqls = v_uint32() self.AcquireSpinLocks = v_uint32() self.SynchronizeExecutions = v_uint32() self.AllocationsAttempted = v_uint32() self.AllocationsSucceeded = v_uint32() self.AllocationsSucceededSpecialPool = v_uint32() self.AllocationsWithNoTag = v_uint32() self.TrimRequests = v_uint32() self.Trims = v_uint32() self.AllocationsFailed = v_uint32() self.AllocationsFailedDeliberately = v_uint32() self.Loads = v_uint32() self.Unloads = v_uint32() self.UnTrackedPool = v_uint32() self.UserTrims = v_uint32() self.CurrentPagedPoolAllocations = v_uint32() self.CurrentNonPagedPoolAllocations = v_uint32() self.PeakPagedPoolAllocations = v_uint32() self.PeakNonPagedPoolAllocations = v_uint32() self.PagedBytes = v_uint32() self.NonPagedBytes = v_uint32() self.PeakPagedBytes = v_uint32() self.PeakNonPagedBytes = v_uint32() self.BurstAllocationsFailedDeliberately = v_uint32() self.SessionTrims = v_uint32() self.OptionChanges = v_uint32() self.VerifyMode = v_uint32() self.PreviousBucketName = UNICODE_STRING() self.ActivityCounter = v_uint32() self.PreviousActivityCounter = v_uint32() self.WorkerTrimRequests = v_uint32() class IO_RESOURCE_DESCRIPTOR(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Option = v_uint8() self.Type = v_uint8() self.ShareDisposition = v_uint8() self.Spare1 = v_uint8() self.Flags = v_uint16() self.Spare2 = v_uint16() self.u = _unnamed_11785() class INTERRUPT_VECTOR_DATA(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Type = v_uint32() self.Vector = v_uint32() self.Irql = v_uint8() self._pad000c = v_bytes(size=3) self.Polarity = v_uint32() self.Mode = v_uint32() self.TargetProcessors = GROUP_AFFINITY() self.IntRemapInfo = INTERRUPT_REMAPPING_INFO() self.ControllerInput = _unnamed_12538() self._pad0040 = v_bytes(size=8) class WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_VALIDBITS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.FRUId = v_uint8() class RTL_TRACE_BLOCK(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Magic = v_uint32() self.Count = v_uint32() self.Size = v_uint32() self.UserCount = v_uint32() self.UserSize = v_uint32() self.UserContext = v_ptr32() self.Next = v_ptr32() self.Trace = v_ptr32() class KPRIQUEUE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Header = DISPATCHER_HEADER() self.EntryListHead = vstruct.VArray([ LIST_ENTRY() for i in xrange(32) ]) self.CurrentCount = vstruct.VArray([ v_uint32() for i in xrange(32) ]) self.MaximumCount = v_uint32() self.ThreadListHead = LIST_ENTRY() class IMAGE_OPTIONAL_HEADER(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Magic = v_uint16() self.MajorLinkerVersion = v_uint8() self.MinorLinkerVersion = v_uint8() self.SizeOfCode = v_uint32() self.SizeOfInitializedData = v_uint32() self.SizeOfUninitializedData = v_uint32() self.AddressOfEntryPoint = v_uint32() self.BaseOfCode = v_uint32() self.BaseOfData = v_uint32() self.ImageBase = v_uint32() self.SectionAlignment = v_uint32() self.FileAlignment = v_uint32() self.MajorOperatingSystemVersion = v_uint16() self.MinorOperatingSystemVersion = v_uint16() self.MajorImageVersion = v_uint16() self.MinorImageVersion = v_uint16() self.MajorSubsystemVersion = v_uint16() self.MinorSubsystemVersion = v_uint16() self.Win32VersionValue = v_uint32() self.SizeOfImage = v_uint32() self.SizeOfHeaders = v_uint32() self.CheckSum = v_uint32() self.Subsystem = v_uint16() self.DllCharacteristics = v_uint16() self.SizeOfStackReserve = v_uint32() self.SizeOfStackCommit = v_uint32() self.SizeOfHeapReserve = v_uint32() self.SizeOfHeapCommit = v_uint32() self.LoaderFlags = v_uint32() self.NumberOfRvaAndSizes = v_uint32() self.DataDirectory = vstruct.VArray([ IMAGE_DATA_DIRECTORY() for i in xrange(16) ]) class SCSI_REQUEST_BLOCK(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) class STRING64(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Length = v_uint16() self.MaximumLength = v_uint16() self._pad0008 = v_bytes(size=4) self.Buffer = v_uint64() class JOB_NOTIFICATION_INFORMATION(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) class SEGMENT_HEAP(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.TotalReservedPages = v_uint32() self.TotalCommittedPages = v_uint32() self.Signature = v_uint32() self.GlobalFlags = v_uint32() self.ProcessHeapListIndex = v_uint16() self._pad0014 = v_bytes(size=2) self.FreeCommittedPages = v_uint32() self.LargeMetadataLock = RTL_SRWLOCK() self.LargeAllocMetadata = RTL_RB_TREE() self.LargeReservedPages = v_uint32() self.LargeCommittedPages = v_uint32() self.SegmentAllocatorLock = HEAP_LOCK() self.SegmentListHead = LIST_ENTRY() self.SegmentCount = v_uint32() self.FreePageRanges = RTL_RB_TREE() self.NextLocalInfoAddress = v_ptr32() self.NextUncommitedAddress = v_ptr32() self.ReservedLimit = v_ptr32() self.LfhContext = HEAP_LFH_CONTEXT() class FAST_MUTEX(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Count = v_uint32() self.Owner = v_ptr32() self.Contention = v_uint32() self.Event = KEVENT() self.OldIrql = v_uint32() class WHEA_ERROR_RECORD_HEADER_VALIDBITS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.PlatformId = v_uint32() class KDEVICE_QUEUE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Type = v_uint16() self.Size = v_uint16() self.DeviceListHead = LIST_ENTRY() self.Lock = v_uint32() self.Busy = v_uint8() self._pad0014 = v_bytes(size=3) class NT_TIB32(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ExceptionList = v_uint32() self.StackBase = v_uint32() self.StackLimit = v_uint32() self.SubSystemTib = v_uint32() self.FiberData = v_uint32() self.ArbitraryUserPointer = v_uint32() self.Self = v_uint32() class LFH_RANDOM_DATA(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Bytes = vstruct.VArray([ v_uint8() for i in xrange(256) ]) class _unnamed_9307(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Vpb = v_ptr32() self.DeviceObject = v_ptr32() class TERMINATION_PORT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Next = v_ptr32() self.Port = v_ptr32() class PROCESSOR_PLATFORM_STATE_RESIDENCIES(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Count = v_uint32() self._pad0008 = v_bytes(size=4) self.States = vstruct.VArray([ PROCESSOR_PLATFORM_STATE_RESIDENCY() for i in xrange(1) ]) class IO_CLIENT_EXTENSION(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.NextExtension = v_ptr32() self.ClientIdentificationAddress = v_ptr32() class INITIAL_PRIVILEGE_SET(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.PrivilegeCount = v_uint32() self.Control = v_uint32() self.Privilege = vstruct.VArray([ LUID_AND_ATTRIBUTES() for i in xrange(3) ]) class GDI_TEB_BATCH64(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Offset = v_uint32() self._pad0008 = v_bytes(size=4) self.HDC = v_uint64() self.Buffer = vstruct.VArray([ v_uint32() for i in xrange(310) ]) class WHEA_ERROR_RECORD_HEADER_FLAGS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Recovered = v_uint32() class KTIMER_TABLE_ENTRY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Lock = v_uint32() self.Entry = LIST_ENTRY() self._pad0010 = v_bytes(size=4) self.Time = ULARGE_INTEGER() class KWAIT_BLOCK(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.WaitListEntry = LIST_ENTRY() self.WaitType = v_uint8() self.BlockState = v_uint8() self.WaitKey = v_uint16() self.Thread = v_ptr32() self.Object = v_ptr32() self.SparePtr = v_ptr32() class ACTIVATION_CONTEXT_DATA(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) class RTL_BALANCED_NODE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Children = vstruct.VArray([ v_ptr32() for i in xrange(2) ]) self.Red = v_uint8() self._pad000c = v_bytes(size=3) class FILE_NETWORK_OPEN_INFORMATION(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.CreationTime = LARGE_INTEGER() self.LastAccessTime = LARGE_INTEGER() self.LastWriteTime = LARGE_INTEGER() self.ChangeTime = LARGE_INTEGER() self.AllocationSize = LARGE_INTEGER() self.EndOfFile = LARGE_INTEGER() self.FileAttributes = v_uint32() self._pad0038 = v_bytes(size=4) class DPH_HEAP_ROOT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Signature = v_uint32() self.HeapFlags = v_uint32() self.HeapCritSect = v_ptr32() self.nRemoteLockAcquired = v_uint32() self.pVirtualStorageListHead = v_ptr32() self.pVirtualStorageListTail = v_ptr32() self.nVirtualStorageRanges = v_uint32() self.nVirtualStorageBytes = v_uint32() self.BusyNodesTable = RTL_AVL_TABLE() self.NodeToAllocate = v_ptr32() self.nBusyAllocations = v_uint32() self.nBusyAllocationBytesCommitted = v_uint32() self.pFreeAllocationListHead = v_ptr32() self.pFreeAllocationListTail = v_ptr32() self.nFreeAllocations = v_uint32() self.nFreeAllocationBytesCommitted = v_uint32() self.AvailableAllocationHead = LIST_ENTRY() self.nAvailableAllocations = v_uint32() self.nAvailableAllocationBytesCommitted = v_uint32() self.pUnusedNodeListHead = v_ptr32() self.pUnusedNodeListTail = v_ptr32() self.nUnusedNodes = v_uint32() self.nBusyAllocationBytesAccessible = v_uint32() self.pNodePoolListHead = v_ptr32() self.pNodePoolListTail = v_ptr32() self.nNodePools = v_uint32() self.nNodePoolBytes = v_uint32() self.NextHeap = LIST_ENTRY() self.ExtraFlags = v_uint32() self.Seed = v_uint32() self.NormalHeap = v_ptr32() self.CreateStackTrace = v_ptr32() self.FirstThread = v_ptr32() class HEAP_USERDATA_HEADER(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.SFreeListEntry = SINGLE_LIST_ENTRY() self.Reserved = v_ptr32() self.SizeIndexAndPadding = v_uint32() self.Signature = v_uint32() self.EncodedOffsets = HEAP_USERDATA_OFFSETS() self.BusyBitmap = RTL_BITMAP() self.BitmapData = vstruct.VArray([ v_uint32() for i in xrange(1) ]) class RTL_DRIVE_LETTER_CURDIR(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Flags = v_uint16() self.Length = v_uint16() self.TimeStamp = v_uint32() self.DosPath = STRING() class PROC_PERF_SNAP(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Time = v_uint64() self.LastTime = v_uint64() self.Active = v_uint64() self.LastActive = v_uint64() self.FrequencyScaledActive = v_uint64() self.PerformanceScaledActive = v_uint64() self.CyclesActive = v_uint64() self.CyclesAffinitized = v_uint64() class CACHE_DESCRIPTOR(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Level = v_uint8() self.Associativity = v_uint8() self.LineSize = v_uint16() self.Size = v_uint32() self.Type = v_uint32() class RTL_TRACE_DATABASE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Magic = v_uint32() self.Flags = v_uint32() self.Tag = v_uint32() self.SegmentList = v_ptr32() self.MaximumSize = v_uint32() self.CurrentSize = v_uint32() self.Owner = v_ptr32() self.Lock = RTL_CRITICAL_SECTION() self.NoOfBuckets = v_uint32() self.Buckets = v_ptr32() self.HashFunction = v_ptr32() self.NoOfTraces = v_uint32() self.NoOfHits = v_uint32() self.HashCounter = vstruct.VArray([ v_uint32() for i in xrange(16) ]) class ULARGE_INTEGER(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.LowPart = v_uint32() self.HighPart = v_uint32() class TEB_ACTIVE_FRAME(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Flags = v_uint32() self.Previous = v_ptr32() self.Context = v_ptr32() class GENERAL_LOOKASIDE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ListHead = SLIST_HEADER() self.Depth = v_uint16() self.MaximumDepth = v_uint16() self.TotalAllocates = v_uint32() self.AllocateMisses = v_uint32() self.TotalFrees = v_uint32() self.FreeMisses = v_uint32() self.Type = v_uint32() self.Tag = v_uint32() self.Size = v_uint32() self.AllocateEx = v_ptr32() self.FreeEx = v_ptr32() self.ListEntry = LIST_ENTRY() self.LastTotalAllocates = v_uint32() self.LastAllocateMisses = v_uint32() self.Future = vstruct.VArray([ v_uint32() for i in xrange(2) ]) self._pad0080 = v_bytes(size=56) class _unnamed_10898(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Address = v_uint64() self.Type = v_uint64() class INTERRUPT_CONNECTION_DATA(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Count = v_uint32() self.OriginalAffinity = GROUP_AFFINITY() self.SteeringListEntry = LIST_ENTRY() self.SteeringListRoot = v_ptr32() self._pad0020 = v_bytes(size=4) self.IsrTime = v_uint64() self.DpcTime = v_uint64() self.IsrLoad = v_uint32() self.DpcLoad = v_uint32() self.IsPrimaryInterrupt = v_uint8() self._pad003c = v_bytes(size=3) self.InterruptObjectArray = v_ptr32() self.InterruptObjectCount = v_uint32() self._pad0048 = v_bytes(size=4) self.Vectors = vstruct.VArray([ INTERRUPT_VECTOR_DATA() for i in xrange(1) ]) class KLOCK_ENTRY_LOCK_STATE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Waiting = v_uint32() self.SessionState = v_ptr32() class PPM_CONCURRENCY_ACCOUNTING(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Lock = v_uint32() self.Processors = v_uint32() self.ActiveProcessors = v_uint32() self._pad0010 = v_bytes(size=4) self.LastUpdateTime = v_uint64() self.TotalTime = v_uint64() self.AccumulatedTime = vstruct.VArray([ v_uint64() for i in xrange(1) ]) class KWAIT_STATUS_REGISTER(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Flags = v_uint8() class PROC_PERF_HISTORY_ENTRY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Utility = v_uint16() self.AffinitizedUtility = v_uint16() self.Frequency = v_uint8() self.Reserved = v_uint8() class KGDTENTRY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.LimitLow = v_uint16() self.BaseLow = v_uint16() self.HighWord = _unnamed_7384() class NAMED_PIPE_CREATE_PARAMETERS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.NamedPipeType = v_uint32() self.ReadMode = v_uint32() self.CompletionMode = v_uint32() self.MaximumInstances = v_uint32() self.InboundQuota = v_uint32() self.OutboundQuota = v_uint32() self.DefaultTimeout = LARGE_INTEGER() self.TimeoutSpecified = v_uint8() self._pad0028 = v_bytes(size=7) class JOB_CPU_RATE_CONTROL(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) class NT_TIB(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ExceptionList = v_ptr32() self.StackBase = v_ptr32() self.StackLimit = v_ptr32() self.SubSystemTib = v_ptr32() self.FiberData = v_ptr32() self.ArbitraryUserPointer = v_ptr32() self.Self = v_ptr32() class HEAP_LFH_UNUSED_BYTES_INFO(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.UnusedBytes = v_uint16() class _unnamed_9489(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ProviderId = v_uint32() self.DataPath = v_ptr32() self.BufferSize = v_uint32() self.Buffer = v_ptr32() class RTL_STD_LIST_ENTRY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Next = v_ptr32() class POWER_STATE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.SystemState = v_uint32() class UNICODE_STRING(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Length = v_uint16() self.MaximumLength = v_uint16() self.Buffer = v_ptr32() class _unnamed_12438(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Length = v_uint32() self.Alignment = v_uint32() self.MinimumAddress = LARGE_INTEGER() self.MaximumAddress = LARGE_INTEGER() class HEAP_LIST_LOOKUP(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ExtendedLookup = v_ptr32() self.ArraySize = v_uint32() self.ExtraItem = v_uint32() self.ItemCount = v_uint32() self.OutOfRangeItems = v_uint32() self.BaseIndex = v_uint32() self.ListHead = v_ptr32() self.ListsInUseUlong = v_ptr32() self.ListHints = v_ptr32() class _unnamed_9485(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.AllocatedResources = v_ptr32() self.AllocatedResourcesTranslated = v_ptr32() class INTERRUPT_REMAPPING_INFO(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.IrtIndex = v_uint32() self._pad0008 = v_bytes(size=4) self.u = _unnamed_12550() class EPROCESS_QUOTA_BLOCK(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) class HEAP_DEBUGGING_INFORMATION(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.InterceptorFunction = v_ptr32() self.InterceptorValue = v_uint16() self._pad0008 = v_bytes(size=2) self.ExtendedOptions = v_uint32() self.StackTraceDepth = v_uint32() self.MinTotalBlockSize = v_uint32() self.MaxTotalBlockSize = v_uint32() self.HeapLeakEnumerationRoutine = v_ptr32() class _unnamed_10891(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.BankNumber = v_uint8() self.Reserved2 = vstruct.VArray([ v_uint8() for i in xrange(7) ]) self.Status = MCI_STATS() self.Address = MCI_ADDR() self.Misc = v_uint64() class ACCESS_REASONS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Data = vstruct.VArray([ v_uint32() for i in xrange(32) ]) class MMSUPPORT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ExitGate = v_ptr32() self.AccessLog = v_ptr32() self.WorkingSetMutex = EX_PUSH_LOCK() self.WorkingSetExpansionLinks = LIST_ENTRY() self.AgeDistribution = vstruct.VArray([ v_uint32() for i in xrange(7) ]) self.MinimumWorkingSetSize = v_uint32() self.WorkingSetLeafSize = v_uint32() self.WorkingSetLeafPrivateSize = v_uint32() self.WorkingSetSize = v_uint32() self.WorkingSetPrivateSize = v_uint32() self.MaximumWorkingSetSize = v_uint32() self.ChargedWslePages = v_uint32() self.ActualWslePages = v_uint32() self.WorkingSetSizeOverhead = v_uint32() self.PeakWorkingSetSize = v_uint32() self.HardFaultCount = v_uint32() self.VmWorkingSetList = v_ptr32() self.NextPageColor = v_uint16() self.LastTrimStamp = v_uint16() self.PageFaultCount = v_uint32() self.TrimmedPageCount = v_uint32() self.ForceTrimPages = v_uint32() self.Flags = MMSUPPORT_FLAGS() self.WsSwapSupport = v_ptr32() class HEAP_LFH_CALLBACKS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Allocate = v_ptr32() self.Free = v_ptr32() self.Commit = v_ptr32() self.Decommit = v_ptr32() class KDPC(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.TargetInfoAsUlong = v_uint32() self.DpcListEntry = SINGLE_LIST_ENTRY() self.ProcessorHistory = v_uint32() self.DeferredRoutine = v_ptr32() self.DeferredContext = v_ptr32() self.SystemArgument1 = v_ptr32() self.SystemArgument2 = v_ptr32() self.DpcData = v_ptr32() class _unnamed_10794(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.FilePointerIndex = v_uint32() class EVENT_HEADER(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Size = v_uint16() self.HeaderType = v_uint16() self.Flags = v_uint16() self.EventProperty = v_uint16() self.ThreadId = v_uint32() self.ProcessId = v_uint32() self.TimeStamp = LARGE_INTEGER() self.ProviderId = GUID() self.EventDescriptor = EVENT_DESCRIPTOR() self.KernelTime = v_uint32() self.UserTime = v_uint32() self.ActivityId = GUID() class KEVENT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Header = DISPATCHER_HEADER() class KSEMAPHORE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Header = DISPATCHER_HEADER() self.Limit = v_uint32() class MM_PAGE_ACCESS_INFO_HEADER(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Link = SINGLE_LIST_ENTRY() self.Type = v_uint32() self.EmptySequenceNumber = v_uint32() self._pad0010 = v_bytes(size=4) self.CreateTime = v_uint64() self.EmptyTime = v_uint64() self.PageEntry = v_ptr32() self.FileEntry = v_ptr32() self.FirstFileEntry = v_ptr32() self.Process = v_ptr32() self.SessionId = v_uint32() self._pad0038 = v_bytes(size=4) class HEAP_LFH_SUBSEGMENT_DELAY_FREE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Count = v_uint32() class _unnamed_6565(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.LowPart = v_uint32() self.HighPart = v_uint32() class OBJECT_TYPE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.TypeList = LIST_ENTRY() self.Name = UNICODE_STRING() self.DefaultObject = v_ptr32() self.Index = v_uint8() self._pad0018 = v_bytes(size=3) self.TotalNumberOfObjects = v_uint32() self.TotalNumberOfHandles = v_uint32() self.HighWaterNumberOfObjects = v_uint32() self.HighWaterNumberOfHandles = v_uint32() self.TypeInfo = OBJECT_TYPE_INITIALIZER() self.TypeLock = EX_PUSH_LOCK() self.Key = v_uint32() self.CallbackList = LIST_ENTRY() class _unnamed_9341(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.InterfaceType = v_ptr32() self.Size = v_uint16() self.Version = v_uint16() self.Interface = v_ptr32() self.InterfaceSpecificData = v_ptr32() class HANDLE_TABLE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.NextHandleNeedingPool = v_uint32() self.ExtraInfoPages = v_uint32() self.TableCode = v_uint32() self.QuotaProcess = v_ptr32() self.HandleTableList = LIST_ENTRY() self.UniqueProcessId = v_uint32() self.Flags = v_uint32() self.HandleContentionEvent = EX_PUSH_LOCK() self.HandleTableLock = EX_PUSH_LOCK() self.FreeLists = vstruct.VArray([ HANDLE_TABLE_FREE_LIST() for i in xrange(1) ]) class MMSUPPORT_FLAGS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.WorkingSetType = v_uint8() self.SessionMaster = v_uint8() self.MemoryPriority = v_uint8() self.WsleDeleted = v_uint8() class HEAP_LOCK(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Lock = _unnamed_9993() class EXCEPTION_REGISTRATION_RECORD(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Next = v_ptr32() self.Handler = v_ptr32() class RTL_SPLAY_LINKS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Parent = v_ptr32() self.LeftChild = v_ptr32() self.RightChild = v_ptr32() class FILE_BASIC_INFORMATION(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.CreationTime = LARGE_INTEGER() self.LastAccessTime = LARGE_INTEGER() self.LastWriteTime = LARGE_INTEGER() self.ChangeTime = LARGE_INTEGER() self.FileAttributes = v_uint32() self._pad0028 = v_bytes(size=4) class LIST_ENTRY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Flink = v_ptr32() self.Blink = v_ptr32() class M128A(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Low = v_uint64() self.High = v_uint64() class RTL_DYNAMIC_HASH_TABLE_ENUMERATOR(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.HashEntry = RTL_DYNAMIC_HASH_TABLE_ENTRY() self.ChainHead = v_ptr32() self.BucketIndex = v_uint32() class HEAP_LFH_BUCKET(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.State = HEAP_LFH_SUBSEGMENT_OWNER() self.AffinitySlots = v_ptr32() self.TotalBlockCount = v_uint32() self.TotalSubsegmentCount = v_uint32() class GUID(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Data1 = v_uint32() self.Data2 = v_uint16() self.Data3 = v_uint16() self.Data4 = vstruct.VArray([ v_uint8() for i in xrange(8) ]) class HEAP_UCR_DESCRIPTOR(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ListEntry = LIST_ENTRY() self.SegmentEntry = LIST_ENTRY() self.Address = v_ptr32() self.Size = v_uint32() class MCA_EXCEPTION(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.VersionNumber = v_uint32() self.ExceptionType = v_uint32() self.TimeStamp = LARGE_INTEGER() self.ProcessorNumber = v_uint32() self.Reserved1 = v_uint32() self.u = _unnamed_10879() self.ExtCnt = v_uint32() self.Reserved3 = v_uint32() self.ExtReg = vstruct.VArray([ v_uint64() for i in xrange(24) ]) class KAPC_STATE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ApcListHead = vstruct.VArray([ LIST_ENTRY() for i in xrange(2) ]) self.Process = v_ptr32() self.InProgressFlags = v_uint8() self.KernelApcPending = v_uint8() self.UserApcPending = v_uint8() self._pad0018 = v_bytes(size=1) class COUNTER_READING(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Type = v_uint32() self.Index = v_uint32() self.Start = v_uint64() self.Total = v_uint64() class RTL_AVL_TREE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Root = v_ptr32() class PEBS_DS_SAVE_AREA(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.BtsBufferBase = v_uint64() self.BtsIndex = v_uint64() self.BtsAbsoluteMaximum = v_uint64() self.BtsInterruptThreshold = v_uint64() self.PebsBufferBase = v_uint64() self.PebsIndex = v_uint64() self.PebsAbsoluteMaximum = v_uint64() self.PebsInterruptThreshold = v_uint64() self.PebsCounterReset0 = v_uint64() self.PebsCounterReset1 = v_uint64() self.PebsCounterReset2 = v_uint64() self.PebsCounterReset3 = v_uint64() class KDPC_DATA(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.DpcList = KDPC_LIST() self.DpcLock = v_uint32() self.DpcQueueDepth = v_uint32() self.DpcCount = v_uint32() self.ActiveDpc = v_ptr32() class KIDTENTRY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Offset = v_uint16() self.Selector = v_uint16() self.Access = v_uint16() self.ExtendedOffset = v_uint16() class XSAVE_AREA(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.LegacyState = XSAVE_FORMAT() self.Header = XSAVE_AREA_HEADER() class GENERIC_MAPPING(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.GenericRead = v_uint32() self.GenericWrite = v_uint32() self.GenericExecute = v_uint32() self.GenericAll = v_uint32() class IRP(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Type = v_uint16() self.Size = v_uint16() self.MdlAddress = v_ptr32() self.Flags = v_uint32() self.AssociatedIrp = _unnamed_9027() self.ThreadListEntry = LIST_ENTRY() self.IoStatus = IO_STATUS_BLOCK() self.RequestorMode = v_uint8() self.PendingReturned = v_uint8() self.StackCount = v_uint8() self.CurrentLocation = v_uint8() self.Cancel = v_uint8() self.CancelIrql = v_uint8() self.ApcEnvironment = v_uint8() self.AllocationFlags = v_uint8() self.UserIosb = v_ptr32() self.UserEvent = v_ptr32() self.Overlay = _unnamed_9029() self.CancelRoutine = v_ptr32() self.UserBuffer = v_ptr32() self.Tail = _unnamed_9032() class KTHREAD_COUNTERS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.WaitReasonBitMap = v_uint64() self.UserData = v_ptr32() self.Flags = v_uint32() self.ContextSwitches = v_uint32() self._pad0018 = v_bytes(size=4) self.CycleTimeBias = v_uint64() self.HardwareCounters = v_uint64() self.HwCounter = vstruct.VArray([ COUNTER_READING() for i in xrange(16) ]) class _unnamed_9044(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.UserApcRoutine = v_ptr32() self.UserApcContext = v_ptr32() class DRIVER_OBJECT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Type = v_uint16() self.Size = v_uint16() self.DeviceObject = v_ptr32() self.Flags = v_uint32() self.DriverStart = v_ptr32() self.DriverSize = v_uint32() self.DriverSection = v_ptr32() self.DriverExtension = v_ptr32() self.DriverName = UNICODE_STRING() self.HardwareDatabase = v_ptr32() self.FastIoDispatch = v_ptr32() self.DriverInit = v_ptr32() self.DriverStartIo = v_ptr32() self.DriverUnload = v_ptr32() self.MajorFunction = vstruct.VArray([ v_ptr32() for i in xrange(28) ]) class IO_MINI_COMPLETION_PACKET_USER(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ListEntry = LIST_ENTRY() self.PacketType = v_uint32() self.KeyContext = v_ptr32() self.ApcContext = v_ptr32() self.IoStatus = v_uint32() self.IoStatusInformation = v_uint32() self.MiniPacketCallback = v_ptr32() self.Context = v_ptr32() self.Allocated = v_uint8() self._pad0028 = v_bytes(size=3) class FILE_GET_QUOTA_INFORMATION(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.NextEntryOffset = v_uint32() self.SidLength = v_uint32() self.Sid = SID() class KGATE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Header = DISPATCHER_HEADER() class IO_COMPLETION_CONTEXT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Port = v_ptr32() self.Key = v_ptr32() class DRIVER_EXTENSION(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.DriverObject = v_ptr32() self.AddDevice = v_ptr32() self.Count = v_uint32() self.ServiceKeyName = UNICODE_STRING() self.ClientDriverExtension = v_ptr32() self.FsFilterCallbacks = v_ptr32() self.KseCallbacks = v_ptr32() self.DvCallbacks = v_ptr32() self.VerifierContext = v_ptr32() class flags(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Removable = v_uint8() class MM_PAGE_ACCESS_INFO_FLAGS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.File = _unnamed_10793() class _unnamed_8810(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ListEntry = LIST_ENTRY() self._pad0028 = v_bytes(size=32) class WHEA_PERSISTENCE_INFO(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Signature = v_uint64() class HEAP_VIRTUAL_ALLOC_ENTRY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Entry = LIST_ENTRY() self.ExtraStuff = HEAP_ENTRY_EXTRA() self.CommitSize = v_uint32() self.ReserveSize = v_uint32() self.BusyBlock = HEAP_ENTRY() class INTERRUPT_HT_INTR_INFO(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.LowPart = _unnamed_12559() self.HighPart = _unnamed_12560() class KENTROPY_TIMING_STATE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.EntropyCount = v_uint32() self.Buffer = vstruct.VArray([ v_uint32() for i in xrange(64) ]) self.Dpc = KDPC() self.LastDeliveredBuffer = v_uint32() class EXCEPTION_RECORD(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ExceptionCode = v_uint32() self.ExceptionFlags = v_uint32() self.ExceptionRecord = v_ptr32() self.ExceptionAddress = v_ptr32() self.NumberParameters = v_uint32() self.ExceptionInformation = vstruct.VArray([ v_uint32() for i in xrange(15) ]) class EXT_DELETE_PARAMETERS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Version = v_uint32() self.Reserved = v_uint32() self.DeleteCallback = v_ptr32() self.DeleteContext = v_ptr32() class PROCESSOR_NUMBER(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Group = v_uint16() self.Number = v_uint8() self.Reserved = v_uint8() class MM_PAGE_ACCESS_INFO(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Flags = MM_PAGE_ACCESS_INFO_FLAGS() self.PointerProtoPte = v_ptr32() class KPCR(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.NtTib = NT_TIB() self.SelfPcr = v_ptr32() self.Prcb = v_ptr32() self.Irql = v_uint8() self._pad0028 = v_bytes(size=3) self.IRR = v_uint32() self.IrrActive = v_uint32() self.IDR = v_uint32() self.KdVersionBlock = v_ptr32() self.IDT = v_ptr32() self.GDT = v_ptr32() self.TSS = v_ptr32() self.MajorVersion = v_uint16() self.MinorVersion = v_uint16() self.SetMember = v_uint32() self.StallScaleFactor = v_uint32() self.SpareUnused = v_uint8() self.Number = v_uint8() self.Spare0 = v_uint8() self.SecondLevelCacheAssociativity = v_uint8() self.VdmAlert = v_uint32() self.KernelReserved = vstruct.VArray([ v_uint32() for i in xrange(14) ]) self.SecondLevelCacheSize = v_uint32() self.HalReserved = vstruct.VArray([ v_uint32() for i in xrange(16) ]) self.InterruptMode = v_uint32() self.Spare1 = v_uint8() self._pad00dc = v_bytes(size=3) self.KernelReserved2 = vstruct.VArray([ v_uint32() for i in xrange(17) ]) self.PrcbData = KPRCB() class RTL_RB_TREE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Root = v_ptr32() self.Min = v_ptr32() class IMAGE_FILE_HEADER(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Machine = v_uint16() self.NumberOfSections = v_uint16() self.TimeDateStamp = v_uint32() self.PointerToSymbolTable = v_uint32() self.NumberOfSymbols = v_uint32() self.SizeOfOptionalHeader = v_uint16() self.Characteristics = v_uint16() class LFH_BLOCK_ZONE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ListEntry = LIST_ENTRY() self.NextIndex = v_uint32() class FILE_STANDARD_INFORMATION(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.AllocationSize = LARGE_INTEGER() self.EndOfFile = LARGE_INTEGER() self.NumberOfLinks = v_uint32() self.DeletePending = v_uint8() self.Directory = v_uint8() self._pad0018 = v_bytes(size=2) class _unnamed_10970(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.BaseMid = v_uint8() self.Flags1 = v_uint8() self.Flags2 = v_uint8() self.BaseHi = v_uint8() class _unnamed_12540(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.IntrInfo = INTERRUPT_HT_INTR_INFO() class _unnamed_12541(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.DestinationMode = v_uint32() class _unnamed_12468(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Priority = v_uint32() self.Reserved1 = v_uint32() self.Reserved2 = v_uint32() class LFH_HEAP(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Lock = RTL_SRWLOCK() self.SubSegmentZones = LIST_ENTRY() self.Heap = v_ptr32() self.NextSegmentInfoArrayAddress = v_ptr32() self.FirstUncommittedAddress = v_ptr32() self.ReservedAddressLimit = v_ptr32() self.SegmentCreate = v_uint32() self.SegmentDelete = v_uint32() self.MinimumCacheDepth = v_uint32() self.CacheShiftThreshold = v_uint32() self.SizeInCache = v_uint32() self.RunInfo = HEAP_BUCKET_RUN_INFO() self.UserBlockCache = vstruct.VArray([ USER_MEMORY_CACHE_ENTRY() for i in xrange(12) ]) self.MemoryPolicies = HEAP_LFH_MEM_POLICIES() self.Buckets = vstruct.VArray([ HEAP_BUCKET() for i in xrange(129) ]) self.SegmentInfoArrays = vstruct.VArray([ v_ptr32() for i in xrange(129) ]) self.AffinitizedInfoArrays = vstruct.VArray([ v_ptr32() for i in xrange(129) ]) self.SegmentAllocator = v_ptr32() self._pad07d0 = v_bytes(size=4) self.LocalData = vstruct.VArray([ HEAP_LOCAL_DATA() for i in xrange(1) ]) class _unnamed_12463(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Length = v_uint32() self.MinBusNumber = v_uint32() self.MaxBusNumber = v_uint32() self.Reserved = v_uint32() class IOP_IRP_STACK_PROFILER(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Profile = vstruct.VArray([ v_uint32() for i in xrange(20) ]) self.TotalIrps = v_uint32() class HEAP_BUCKET_RUN_INFO(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Bucket = v_uint32() self.RunLength = v_uint32() class PEB_LDR_DATA(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Length = v_uint32() self.Initialized = v_uint8() self._pad0008 = v_bytes(size=3) self.SsHandle = v_ptr32() self.InLoadOrderModuleList = LIST_ENTRY() self.InMemoryOrderModuleList = LIST_ENTRY() self.InInitializationOrderModuleList = LIST_ENTRY() self.EntryInProgress = v_ptr32() self.ShutdownInProgress = v_uint8() self._pad002c = v_bytes(size=3) self.ShutdownThreadId = v_ptr32() class HEAP_ENTRY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Size = v_uint16() self.Flags = v_uint8() self.SmallTagIndex = v_uint8() self.PreviousSize = v_uint16() self.SegmentOffset = v_uint8() self.UnusedBytes = v_uint8() class _unnamed_11849(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Channel = v_uint32() self.RequestLine = v_uint32() self.TransferWidth = v_uint8() self.Reserved1 = v_uint8() self.Reserved2 = v_uint8() self.Reserved3 = v_uint8() class _unnamed_9383(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Capabilities = v_ptr32() class SECURITY_SUBJECT_CONTEXT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ClientToken = v_ptr32() self.ImpersonationLevel = v_uint32() self.PrimaryToken = v_ptr32() self.ProcessAuditId = v_ptr32() class _unnamed_11845(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Channel = v_uint32() self.Port = v_uint32() self.Reserved1 = v_uint32() class PROCESSOR_PROFILE_CONTROL_AREA(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.PebsDsSaveArea = PEBS_DS_SAVE_AREA() class _unnamed_11842(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Raw = _unnamed_11837() class _unnamed_11511(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Generic = _unnamed_11829() class _unnamed_9144(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.SecurityContext = v_ptr32() self.Options = v_uint32() self.Reserved = v_uint16() self.ShareAccess = v_uint16() self.Parameters = v_ptr32() class INTERFACE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Size = v_uint16() self.Version = v_uint16() self.Context = v_ptr32() self.InterfaceReference = v_ptr32() self.InterfaceDereference = v_ptr32() class SLIST_HEADER(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Alignment = v_uint64() class IMAGE_DATA_DIRECTORY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.VirtualAddress = v_uint32() self.Size = v_uint32() class FILE_OBJECT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Type = v_uint16() self.Size = v_uint16() self.DeviceObject = v_ptr32() self.Vpb = v_ptr32() self.FsContext = v_ptr32() self.FsContext2 = v_ptr32() self.SectionObjectPointer = v_ptr32() self.PrivateCacheMap = v_ptr32() self.FinalStatus = v_uint32() self.RelatedFileObject = v_ptr32() self.LockOperation = v_uint8() self.DeletePending = v_uint8() self.ReadAccess = v_uint8() self.WriteAccess = v_uint8() self.DeleteAccess = v_uint8() self.SharedRead = v_uint8() self.SharedWrite = v_uint8() self.SharedDelete = v_uint8() self.Flags = v_uint32() self.FileName = UNICODE_STRING() self.CurrentByteOffset = LARGE_INTEGER() self.Waiters = v_uint32() self.Busy = v_uint32() self.LastLock = v_ptr32() self.Lock = KEVENT() self.Event = KEVENT() self.CompletionContext = v_ptr32() self.IrpListLock = v_uint32() self.IrpList = LIST_ENTRY() self.FileObjectExtension = v_ptr32() class PPM_IDLE_STATES(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.InterfaceVersion = v_uint8() self.ForceIdle = v_uint8() self.EstimateIdleDuration = v_uint8() self.ExitLatencyTraceEnabled = v_uint8() self.NonInterruptibleTransition = v_uint8() self._pad0008 = v_bytes(size=3) self.ExitLatencyCountdown = v_uint32() self.TargetState = v_uint32() self.ActualState = v_uint32() self.OldState = v_uint32() self.OverrideIndex = v_uint32() self.ProcessorIdleCount = v_uint32() self.Type = v_uint32() self.ReasonFlags = v_uint16() self._pad0028 = v_bytes(size=2) self.InitiateWakeStamp = v_uint64() self.PreviousStatus = v_uint32() self.PreviousCancelReason = v_uint32() self.PrimaryProcessorMask = KAFFINITY_EX() self.SecondaryProcessorMask = KAFFINITY_EX() self.IdlePrepare = v_ptr32() self.IdleExecute = v_ptr32() self.IdlePreselect = v_ptr32() self.IdleTest = v_ptr32() self.IdleComplete = v_ptr32() self.IdleCancel = v_ptr32() self.IdleIsHalted = v_ptr32() self.IdleInitiateWake = v_ptr32() self.QueryPlatformStateResidency = v_ptr32() self._pad0078 = v_bytes(size=4) self.PrepareInfo = PROCESSOR_IDLE_PREPARE_INFO() self.Tracing = v_ptr32() self.State = vstruct.VArray([ PPM_IDLE_STATE() for i in xrange(1) ]) self._pad0110 = v_bytes(size=4) class HEAP_SUBSEGMENT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.LocalInfo = v_ptr32() self.UserBlocks = v_ptr32() self.DelayFreeList = SLIST_HEADER() self.AggregateExchg = INTERLOCK_SEQ() self.BlockSize = v_uint16() self.Flags = v_uint16() self.BlockCount = v_uint16() self.SizeIndex = v_uint8() self.AffinityIndex = v_uint8() self.Lock = v_uint32() self.SFreeListEntry = SINGLE_LIST_ENTRY() self._pad0028 = v_bytes(size=4) class ERESOURCE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.SystemResourcesList = LIST_ENTRY() self.OwnerTable = v_ptr32() self.ActiveCount = v_uint16() self.Flag = v_uint16() self.SharedWaiters = v_ptr32() self.ExclusiveWaiters = v_ptr32() self.OwnerEntry = OWNER_ENTRY() self.ActiveEntries = v_uint32() self.ContentionCount = v_uint32() self.NumberOfSharedWaiters = v_uint32() self.NumberOfExclusiveWaiters = v_uint32() self.Address = v_ptr32() self.SpinLock = v_uint32() class _unnamed_9319(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Length = v_uint32() self.StartSid = v_ptr32() self.SidList = v_ptr32() self.SidListLength = v_uint32() class _unnamed_7384(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Bytes = _unnamed_10970() class PROCESSOR_IDLE_DEPENDENCY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ProcessorIndex = v_uint32() self.ExpectedState = v_uint8() self.AllowDeeperStates = v_uint8() self.LooseDependency = v_uint8() self._pad0008 = v_bytes(size=1) class HEAP_LFH_SUBSEGMENT_OWNER(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.BucketIndex = v_uint8() self.IsBucket = v_uint8() self.SlotIndex = v_uint16() self.AvailableSubsegmentCount = v_uint32() self.Lock = RTL_SRWLOCK() self.AvailableSubsegmentList = LIST_ENTRY() self.FullSubsegmentList = LIST_ENTRY() class PEB(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.InheritedAddressSpace = v_uint8() self.ReadImageFileExecOptions = v_uint8() self.BeingDebugged = v_uint8() self.BitField = v_uint8() self.Mutant = v_ptr32() self.ImageBaseAddress = v_ptr32() self.Ldr = v_ptr32() self.ProcessParameters = v_ptr32() self.SubSystemData = v_ptr32() self.ProcessHeap = v_ptr32() self.FastPebLock = v_ptr32() self.AtlThunkSListPtr = v_ptr32() self.IFEOKey = v_ptr32() self.CrossProcessFlags = v_uint32() self.KernelCallbackTable = v_ptr32() self.SystemReserved = vstruct.VArray([ v_uint32() for i in xrange(1) ]) self.AtlThunkSListPtr32 = v_uint32() self.ApiSetMap = v_ptr32() self.TlsExpansionCounter = v_uint32() self.TlsBitmap = v_ptr32() self.TlsBitmapBits = vstruct.VArray([ v_uint32() for i in xrange(2) ]) self.ReadOnlySharedMemoryBase = v_ptr32() self.SparePvoid0 = v_ptr32() self.ReadOnlyStaticServerData = v_ptr32() self.AnsiCodePageData = v_ptr32() self.OemCodePageData = v_ptr32() self.UnicodeCaseTableData = v_ptr32() self.NumberOfProcessors = v_uint32() self.NtGlobalFlag = v_uint32() self._pad0070 = v_bytes(size=4) self.CriticalSectionTimeout = LARGE_INTEGER() self.HeapSegmentReserve = v_uint32() self.HeapSegmentCommit = v_uint32() self.HeapDeCommitTotalFreeThreshold = v_uint32() self.HeapDeCommitFreeBlockThreshold = v_uint32() self.NumberOfHeaps = v_uint32() self.MaximumNumberOfHeaps = v_uint32() self.ProcessHeaps = v_ptr32() self.GdiSharedHandleTable = v_ptr32() self.ProcessStarterHelper = v_ptr32() self.GdiDCAttributeList = v_uint32() self.LoaderLock = v_ptr32() self.OSMajorVersion = v_uint32() self.OSMinorVersion = v_uint32() self.OSBuildNumber = v_uint16() self.OSCSDVersion = v_uint16() self.OSPlatformId = v_uint32() self.ImageSubsystem = v_uint32() self.ImageSubsystemMajorVersion = v_uint32() self.ImageSubsystemMinorVersion = v_uint32() self.ActiveProcessAffinityMask = v_uint32() self.GdiHandleBuffer = vstruct.VArray([ v_uint32() for i in xrange(34) ]) self.PostProcessInitRoutine = v_ptr32() self.TlsExpansionBitmap = v_ptr32() self.TlsExpansionBitmapBits = vstruct.VArray([ v_uint32() for i in xrange(32) ]) self.SessionId = v_uint32() self.AppCompatFlags = ULARGE_INTEGER() self.AppCompatFlagsUser = ULARGE_INTEGER() self.pShimData = v_ptr32() self.AppCompatInfo = v_ptr32() self.CSDVersion = UNICODE_STRING() self.ActivationContextData = v_ptr32() self.ProcessAssemblyStorageMap = v_ptr32() self.SystemDefaultActivationContextData = v_ptr32() self.SystemAssemblyStorageMap = v_ptr32() self.MinimumStackCommit = v_uint32() self.FlsCallback = v_ptr32() self.FlsListHead = LIST_ENTRY() self.FlsBitmap = v_ptr32() self.FlsBitmapBits = vstruct.VArray([ v_uint32() for i in xrange(4) ]) self.FlsHighIndex = v_uint32() self.WerRegistrationData = v_ptr32() self.WerShipAssertPtr = v_ptr32() self.pUnused = v_ptr32() self.pImageHeaderHash = v_ptr32() self.TracingFlags = v_uint32() self._pad0248 = v_bytes(size=4) self.CsrServerReadOnlySharedMemoryBase = v_uint64() class KSCB(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.GenerationCycles = v_uint64() self.UnderQuotaCycleTarget = v_uint64() self.RankCycleTarget = v_uint64() self.LongTermCycles = v_uint64() self.LastReportedCycles = v_uint64() self.OverQuotaHistory = v_uint64() self.ReadyTime = v_uint64() self.InsertTime = v_uint64() self.PerProcessorList = LIST_ENTRY() self.QueueNode = RTL_BALANCED_NODE() self.Inserted = v_uint8() self.Spare2 = v_uint8() self.ReadySummary = v_uint16() self.Rank = v_uint32() self.ReadyListHead = vstruct.VArray([ LIST_ENTRY() for i in xrange(16) ]) self._pad00e0 = v_bytes(size=4) class TP_POOL(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) class KSHARED_READY_QUEUE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Lock = v_uint32() self.ReadySummary = v_uint32() self.ReadyListHead = vstruct.VArray([ LIST_ENTRY() for i in xrange(32) ]) self.RunningSummary = vstruct.VArray([ v_uint8() for i in xrange(32) ]) self.Span = v_uint32() self.LowProcIndex = v_uint32() self.QueueIndex = v_uint32() self.ProcCount = v_uint32() self.Affinity = v_uint32() class RTL_BALANCED_LINKS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Parent = v_ptr32() self.LeftChild = v_ptr32() self.RightChild = v_ptr32() self.Balance = v_uint8() self.Reserved = vstruct.VArray([ v_uint8() for i in xrange(3) ]) class EX_PUSH_LOCK(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Locked = v_uint32() class XSTATE_CONTEXT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Mask = v_uint64() self.Length = v_uint32() self.Reserved1 = v_uint32() self.Area = v_ptr32() self.Reserved2 = v_uint32() self.Buffer = v_ptr32() self.Reserved3 = v_uint32() class HEAP_FREE_ENTRY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Size = v_uint16() self.Flags = v_uint8() self.SmallTagIndex = v_uint8() self.PreviousSize = v_uint16() self.SegmentOffset = v_uint8() self.UnusedBytes = v_uint8() self.FreeList = LIST_ENTRY() class KSTACK_COUNT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Value = v_uint32() class STACK_TRACE_DATABASE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Reserved = vstruct.VArray([ v_uint8() for i in xrange(56) ]) self.Reserved2 = v_ptr32() self.PeakHashCollisionListLength = v_uint32() self.LowerMemoryStart = v_ptr32() self.PreCommitted = v_uint8() self.DumpInProgress = v_uint8() self._pad0048 = v_bytes(size=2) self.CommitBase = v_ptr32() self.CurrentLowerCommitLimit = v_ptr32() self.CurrentUpperCommitLimit = v_ptr32() self.NextFreeLowerMemory = v_ptr32() self.NextFreeUpperMemory = v_ptr32() self.NumberOfEntriesLookedUp = v_uint32() self.NumberOfEntriesAdded = v_uint32() self.EntryIndexArray = v_ptr32() self.NumberOfEntriesAllocated = v_uint32() self.NumberOfEntriesAvailable = v_uint32() self.NumberOfAllocationFailures = v_uint32() self._pad0078 = v_bytes(size=4) self.FreeLists = vstruct.VArray([ SLIST_HEADER() for i in xrange(32) ]) self.NumberOfBuckets = v_uint32() self.Buckets = vstruct.VArray([ RTL_STD_LIST_HEAD() for i in xrange(1) ]) self._pad0188 = v_bytes(size=4) class HEAP_SEGMENT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Entry = HEAP_ENTRY() self.SegmentSignature = v_uint32() self.SegmentFlags = v_uint32() self.SegmentListEntry = LIST_ENTRY() self.Heap = v_ptr32() self.BaseAddress = v_ptr32() self.NumberOfPages = v_uint32() self.FirstEntry = v_ptr32() self.LastValidEntry = v_ptr32() self.NumberOfUnCommittedPages = v_uint32() self.NumberOfUnCommittedRanges = v_uint32() self.SegmentAllocatorBackTraceIndex = v_uint16() self.Reserved = v_uint16() self.UCRSegmentList = LIST_ENTRY() class PERFINFO_PPM_STATE_SELECTION(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.SelectedState = v_uint32() self.VetoedStates = v_uint32() self.VetoReason = vstruct.VArray([ v_uint32() for i in xrange(1) ]) class HANDLE_TABLE_FREE_LIST(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.FreeListLock = EX_PUSH_LOCK() self.FirstFreeHandleEntry = v_ptr32() self.LastFreeHandleEntry = v_ptr32() self.HandleCount = v_uint32() self.HighWaterMark = v_uint32() self.Reserved = vstruct.VArray([ v_uint32() for i in xrange(8) ]) class WHEA_ERROR_RECORD_HEADER(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Signature = v_uint32() self.Revision = WHEA_REVISION() self.SignatureEnd = v_uint32() self.SectionCount = v_uint16() self.Severity = v_uint32() self.ValidBits = WHEA_ERROR_RECORD_HEADER_VALIDBITS() self.Length = v_uint32() self.Timestamp = WHEA_TIMESTAMP() self.PlatformId = GUID() self.PartitionId = GUID() self.CreatorId = GUID() self.NotifyType = GUID() self.RecordId = v_uint64() self.Flags = WHEA_ERROR_RECORD_HEADER_FLAGS() self.PersistenceInfo = WHEA_PERSISTENCE_INFO() self.Reserved = vstruct.VArray([ v_uint8() for i in xrange(12) ]) class EVENT_DESCRIPTOR(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Id = v_uint16() self.Version = v_uint8() self.Channel = v_uint8() self.Level = v_uint8() self.Opcode = v_uint8() self.Task = v_uint16() self.Keyword = v_uint64() class _unnamed_9429(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.InPath = v_uint8() self.Reserved = vstruct.VArray([ v_uint8() for i in xrange(3) ]) self.Type = v_uint32() class _unnamed_12455(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.MinimumChannel = v_uint32() self.MaximumChannel = v_uint32() class FLS_CALLBACK_INFO(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) class _unnamed_12458(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.RequestLine = v_uint32() self.Reserved = v_uint32() self.Channel = v_uint32() self.TransferWidth = v_uint32() class ACL(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.AclRevision = v_uint8() self.Sbz1 = v_uint8() self.AclSize = v_uint16() self.AceCount = v_uint16() self.Sbz2 = v_uint16() class _unnamed_10886(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.McaCod = v_uint16() self.MsCod = v_uint16() self.OtherInfo = v_uint32() class LIST_ENTRY64(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Flink = v_uint64() self.Blink = v_uint64() class EXHANDLE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.TagBits = v_uint32() class WAIT_CONTEXT_BLOCK(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.WaitQueueEntry = KDEVICE_QUEUE_ENTRY() self.DeviceRoutine = v_ptr32() self.DeviceContext = v_ptr32() self.NumberOfMapRegisters = v_uint32() self.DeviceObject = v_ptr32() self.CurrentIrp = v_ptr32() self.BufferChainingDpc = v_ptr32() class SE_AUDIT_PROCESS_CREATION_INFO(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ImageFileName = v_ptr32() class ACTIVATION_CONTEXT_STACK(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ActiveFrame = v_ptr32() self.FrameListCache = LIST_ENTRY() self.Flags = v_uint32() self.NextCookieSequenceNumber = v_uint32() self.StackId = v_uint32() class LDR_DATA_TABLE_ENTRY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.InLoadOrderLinks = LIST_ENTRY() self.InMemoryOrderLinks = LIST_ENTRY() self.InInitializationOrderLinks = LIST_ENTRY() self.DllBase = v_ptr32() self.EntryPoint = v_ptr32() self.SizeOfImage = v_uint32() self.FullDllName = UNICODE_STRING() self.BaseDllName = UNICODE_STRING() self.FlagGroup = vstruct.VArray([ v_uint8() for i in xrange(4) ]) self.ObsoleteLoadCount = v_uint16() self.TlsIndex = v_uint16() self.HashLinks = LIST_ENTRY() self.TimeDateStamp = v_uint32() self.EntryPointActivationContext = v_ptr32() self.Spare = v_ptr32() self.DdagNode = v_ptr32() self.NodeModuleLink = LIST_ENTRY() self.SnapContext = v_ptr32() self.ParentDllBase = v_ptr32() self.SwitchBackContext = v_ptr32() self.BaseAddressIndexNode = RTL_BALANCED_NODE() self.MappingInfoIndexNode = RTL_BALANCED_NODE() self.OriginalBase = v_uint32() self._pad0088 = v_bytes(size=4) self.LoadTime = LARGE_INTEGER() self.BaseNameHashValue = v_uint32() self.LoadReason = v_uint32() self.ImplicitPathOptions = v_uint32() self._pad00a0 = v_bytes(size=4) class LOOKASIDE_LIST_EX(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.L = GENERAL_LOOKASIDE_POOL() class TEB(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.NtTib = NT_TIB() self.EnvironmentPointer = v_ptr32() self.ClientId = CLIENT_ID() self.ActiveRpcHandle = v_ptr32() self.ThreadLocalStoragePointer = v_ptr32() self.ProcessEnvironmentBlock = v_ptr32() self.LastErrorValue = v_uint32() self.CountOfOwnedCriticalSections = v_uint32() self.CsrClientThread = v_ptr32() self.Win32ThreadInfo = v_ptr32() self.User32Reserved = vstruct.VArray([ v_uint32() for i in xrange(26) ]) self.UserReserved = vstruct.VArray([ v_uint32() for i in xrange(5) ]) self.WOW32Reserved = v_ptr32() self.CurrentLocale = v_uint32() self.FpSoftwareStatusRegister = v_uint32() self.SystemReserved1 = vstruct.VArray([ v_ptr32() for i in xrange(54) ]) self.ExceptionCode = v_uint32() self.ActivationContextStackPointer = v_ptr32() self.SpareBytes = vstruct.VArray([ v_uint8() for i in xrange(36) ]) self.TxFsContext = v_uint32() self.GdiTebBatch = GDI_TEB_BATCH() self.RealClientId = CLIENT_ID() self.GdiCachedProcessHandle = v_ptr32() self.GdiClientPID = v_uint32() self.GdiClientTID = v_uint32() self.GdiThreadLocalInfo = v_ptr32() self.Win32ClientInfo = vstruct.VArray([ v_uint32() for i in xrange(62) ]) self.glDispatchTable = vstruct.VArray([ v_ptr32() for i in xrange(233) ]) self.glReserved1 = vstruct.VArray([ v_uint32() for i in xrange(29) ]) self.glReserved2 = v_ptr32() self.glSectionInfo = v_ptr32() self.glSection = v_ptr32() self.glTable = v_ptr32() self.glCurrentRC = v_ptr32() self.glContext = v_ptr32() self.LastStatusValue = v_uint32() self.StaticUnicodeString = UNICODE_STRING() self.StaticUnicodeBuffer = vstruct.VArray([ v_uint16() for i in xrange(261) ]) self._pad0e0c = v_bytes(size=2) self.DeallocationStack = v_ptr32() self.TlsSlots = vstruct.VArray([ v_ptr32() for i in xrange(64) ]) self.TlsLinks = LIST_ENTRY() self.Vdm = v_ptr32() self.ReservedForNtRpc = v_ptr32() self.DbgSsReserved = vstruct.VArray([ v_ptr32() for i in xrange(2) ]) self.HardErrorMode = v_uint32() self.Instrumentation = vstruct.VArray([ v_ptr32() for i in xrange(9) ]) self.ActivityId = GUID() self.SubProcessTag = v_ptr32() self.PerflibData = v_ptr32() self.EtwTraceData = v_ptr32() self.WinSockData = v_ptr32() self.GdiBatchCount = v_uint32() self.CurrentIdealProcessor = PROCESSOR_NUMBER() self.GuaranteedStackBytes = v_uint32() self.ReservedForPerf = v_ptr32() self.ReservedForOle = v_ptr32() self.WaitingOnLoaderLock = v_uint32() self.SavedPriorityState = v_ptr32() self.ReservedForCodeCoverage = v_uint32() self.ThreadPoolData = v_ptr32() self.TlsExpansionSlots = v_ptr32() self.MuiGeneration = v_uint32() self.IsImpersonating = v_uint32() self.NlsCache = v_ptr32() self.pShimData = v_ptr32() self.HeapVirtualAffinity = v_uint16() self.LowFragHeapDataSlot = v_uint16() self.CurrentTransactionHandle = v_ptr32() self.ActiveFrame = v_ptr32() self.FlsData = v_ptr32() self.PreferredLanguages = v_ptr32() self.UserPrefLanguages = v_ptr32() self.MergedPrefLanguages = v_ptr32() self.MuiImpersonation = v_uint32() self.CrossTebFlags = v_uint16() self.SameTebFlags = v_uint16() self.TxnScopeEnterCallback = v_ptr32() self.TxnScopeExitCallback = v_ptr32() self.TxnScopeContext = v_ptr32() self.LockCount = v_uint32() self.SpareUlong0 = v_uint32() self.ResourceRetValue = v_ptr32() self.ReservedForWdf = v_ptr32() class EX_RUNDOWN_REF(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Count = v_uint32() class CPU_INFO(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.AsUINT32 = vstruct.VArray([ v_uint32() for i in xrange(4) ]) class XSAVE_FORMAT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ControlWord = v_uint16() self.StatusWord = v_uint16() self.TagWord = v_uint8() self.Reserved1 = v_uint8() self.ErrorOpcode = v_uint16() self.ErrorOffset = v_uint32() self.ErrorSelector = v_uint16() self.Reserved2 = v_uint16() self.DataOffset = v_uint32() self.DataSelector = v_uint16() self.Reserved3 = v_uint16() self.MxCsr = v_uint32() self.MxCsr_Mask = v_uint32() self.FloatRegisters = vstruct.VArray([ M128A() for i in xrange(8) ]) self.XmmRegisters = vstruct.VArray([ M128A() for i in xrange(8) ]) self.Reserved4 = vstruct.VArray([ v_uint8() for i in xrange(224) ]) class HEAP_LFH_AFFINITY_SLOT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.State = HEAP_LFH_SUBSEGMENT_OWNER() class PO_DIAG_STACK_RECORD(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.StackDepth = v_uint32() self.Stack = vstruct.VArray([ v_ptr32() for i in xrange(1) ]) class IMAGE_DOS_HEADER(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.e_magic = v_uint16() self.e_cblp = v_uint16() self.e_cp = v_uint16() self.e_crlc = v_uint16() self.e_cparhdr = v_uint16() self.e_minalloc = v_uint16() self.e_maxalloc = v_uint16() self.e_ss = v_uint16() self.e_sp = v_uint16() self.e_csum = v_uint16() self.e_ip = v_uint16() self.e_cs = v_uint16() self.e_lfarlc = v_uint16() self.e_ovno = v_uint16() self.e_res = vstruct.VArray([ v_uint16() for i in xrange(4) ]) self.e_oemid = v_uint16() self.e_oeminfo = v_uint16() self.e_res2 = vstruct.VArray([ v_uint16() for i in xrange(10) ]) self.e_lfanew = v_uint32() class RTL_DYNAMIC_HASH_TABLE_ENTRY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Linkage = LIST_ENTRY() self.Signature = v_uint32() class TXN_PARAMETER_BLOCK(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Length = v_uint16() self.TxFsContext = v_uint16() self.TransactionObject = v_ptr32() class HEAP_USERDATA_OFFSETS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.FirstAllocationOffset = v_uint16() self.BlockStride = v_uint16() class QUAD(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.UseThisFieldToCopy = v_uint64() class _unnamed_9105(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.DeviceQueueEntry = KDEVICE_QUEUE_ENTRY() self.Thread = v_ptr32() self.AuxiliaryBuffer = v_ptr32() self.ListEntry = LIST_ENTRY() self.CurrentStackLocation = v_ptr32() self.OriginalFileObject = v_ptr32() self.IrpExtension = v_ptr32() class HEAP_TUNING_PARAMETERS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.CommittThresholdShift = v_uint32() self.MaxPreCommittThreshold = v_uint32() class KPRCB(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.MinorVersion = v_uint16() self.MajorVersion = v_uint16() self.CurrentThread = v_ptr32() self.NextThread = v_ptr32() self.IdleThread = v_ptr32() self.LegacyNumber = v_uint8() self.NestingLevel = v_uint8() self.BuildType = v_uint16() self.CpuType = v_uint8() self.CpuID = v_uint8() self.CpuStep = v_uint16() self.ProcessorState = KPROCESSOR_STATE() self.ParentNode = v_ptr32() self.PriorityState = v_ptr32() self.KernelReserved = vstruct.VArray([ v_uint32() for i in xrange(14) ]) self.HalReserved = vstruct.VArray([ v_uint32() for i in xrange(16) ]) self.CFlushSize = v_uint32() self.CoresPerPhysicalProcessor = v_uint8() self.LogicalProcessorsPerCore = v_uint8() self.CpuVendor = v_uint8() self.PrcbPad0 = vstruct.VArray([ v_uint8() for i in xrange(1) ]) self.MHz = v_uint32() self.GroupIndex = v_uint8() self.Group = v_uint8() self.PrcbPad05 = vstruct.VArray([ v_uint8() for i in xrange(2) ]) self.GroupSetMember = v_uint32() self.Number = v_uint32() self.ClockOwner = v_uint8() self.PendingTickFlags = v_uint8() self.PrcbPad10 = vstruct.VArray([ v_uint8() for i in xrange(70) ]) self.LockQueue = vstruct.VArray([ KSPIN_LOCK_QUEUE() for i in xrange(17) ]) self.InterruptCount = v_uint32() self.KernelTime = v_uint32() self.UserTime = v_uint32() self.DpcTime = v_uint32() self.DpcTimeCount = v_uint32() self.InterruptTime = v_uint32() self.AdjustDpcThreshold = v_uint32() self.PageColor = v_uint32() self.DebuggerSavedIRQL = v_uint8() self.NodeColor = v_uint8() self.PrcbPad20 = vstruct.VArray([ v_uint8() for i in xrange(6) ]) self.NodeShiftedColor = v_uint32() self.SecondaryColorMask = v_uint32() self.DpcTimeLimit = v_uint32() self.PrcbPad21 = vstruct.VArray([ v_uint32() for i in xrange(3) ]) self.CcFastReadNoWait = v_uint32() self.CcFastReadWait = v_uint32() self.CcFastReadNotPossible = v_uint32() self.CcCopyReadNoWait = v_uint32() self.CcCopyReadWait = v_uint32() self.CcCopyReadNoWaitMiss = v_uint32() self.MmSpinLockOrdering = v_uint32() self.IoReadOperationCount = v_uint32() self.IoWriteOperationCount = v_uint32() self.IoOtherOperationCount = v_uint32() self.IoReadTransferCount = LARGE_INTEGER() self.IoWriteTransferCount = LARGE_INTEGER() self.IoOtherTransferCount = LARGE_INTEGER() self.CcFastMdlReadNoWait = v_uint32() self.CcFastMdlReadWait = v_uint32() self.CcFastMdlReadNotPossible = v_uint32() self.CcMapDataNoWait = v_uint32() self.CcMapDataWait = v_uint32() self.CcPinMappedDataCount = v_uint32() self.CcPinReadNoWait = v_uint32() self.CcPinReadWait = v_uint32() self.CcMdlReadNoWait = v_uint32() self.CcMdlReadWait = v_uint32() self.CcLazyWriteHotSpots = v_uint32() self.CcLazyWriteIos = v_uint32() self.CcLazyWritePages = v_uint32() self.CcDataFlushes = v_uint32() self.CcDataPages = v_uint32() self.CcLostDelayedWrites = v_uint32() self.CcFastReadResourceMiss = v_uint32() self.CcCopyReadWaitMiss = v_uint32() self.CcFastMdlReadResourceMiss = v_uint32() self.CcMapDataNoWaitMiss = v_uint32() self.CcMapDataWaitMiss = v_uint32() self.CcPinReadNoWaitMiss = v_uint32() self.CcPinReadWaitMiss = v_uint32() self.CcMdlReadNoWaitMiss = v_uint32() self.CcMdlReadWaitMiss = v_uint32() self.CcReadAheadIos = v_uint32() self.KeAlignmentFixupCount = v_uint32() self.KeExceptionDispatchCount = v_uint32() self.KeSystemCalls = v_uint32() self.AvailableTime = v_uint32() self.PrcbPad22 = vstruct.VArray([ v_uint32() for i in xrange(2) ]) self.PPLookasideList = vstruct.VArray([ PP_LOOKASIDE_LIST() for i in xrange(16) ]) self.PPNxPagedLookasideList = vstruct.VArray([ GENERAL_LOOKASIDE_POOL() for i in xrange(32) ]) self.PPNPagedLookasideList = vstruct.VArray([ GENERAL_LOOKASIDE_POOL() for i in xrange(32) ]) self.PPPagedLookasideList = vstruct.VArray([ GENERAL_LOOKASIDE_POOL() for i in xrange(32) ]) self.PacketBarrier = v_uint32() self.ReverseStall = v_uint32() self.IpiFrame = v_ptr32() self.PrcbPad3 = vstruct.VArray([ v_uint8() for i in xrange(52) ]) self.CurrentPacket = vstruct.VArray([ v_ptr32() for i in xrange(3) ]) self.TargetSet = v_uint32() self.WorkerRoutine = v_ptr32() self.IpiFrozen = v_uint32() self.PrcbPad4 = vstruct.VArray([ v_uint8() for i in xrange(40) ]) self.RequestSummary = v_uint32() self.SignalDone = v_ptr32() self.PrcbPad50 = vstruct.VArray([ v_uint8() for i in xrange(40) ]) self.InterruptLastCount = v_uint32() self.InterruptRate = v_uint32() self.DeviceInterrupts = v_uint32() self.IsrDpcStats = v_ptr32() self.DpcData = vstruct.VArray([ KDPC_DATA() for i in xrange(2) ]) self.DpcStack = v_ptr32() self.MaximumDpcQueueDepth = v_uint32() self.DpcRequestRate = v_uint32() self.MinimumDpcRate = v_uint32() self.DpcLastCount = v_uint32() self.PrcbLock = v_uint32() self.DpcGate = KGATE() self.ThreadDpcEnable = v_uint8() self.QuantumEnd = v_uint8() self.DpcRoutineActive = v_uint8() self.IdleSchedule = v_uint8() self.DpcRequestSummary = v_uint32() self.LastTimerHand = v_uint32() self.LastTick = v_uint32() self.PeriodicCount = v_uint32() self.PeriodicBias = v_uint32() self.ClockInterrupts = v_uint32() self.ReadyScanTick = v_uint32() self.GroupSchedulingOverQuota = v_uint8() self.PrcbPad41 = vstruct.VArray([ v_uint8() for i in xrange(3) ]) self._pad2260 = v_bytes(size=4) self.TimerTable = KTIMER_TABLE() self.CallDpc = KDPC() self.ClockKeepAlive = v_uint32() self.PrcbPad6 = vstruct.VArray([ v_uint8() for i in xrange(4) ]) self.DpcWatchdogPeriod = v_uint32() self.DpcWatchdogCount = v_uint32() self.KeSpinLockOrdering = v_uint32() self.PrcbPad70 = vstruct.VArray([ v_uint32() for i in xrange(1) ]) self.QueueIndex = v_uint32() self.DeferredReadyListHead = SINGLE_LIST_ENTRY() self.ReadySummary = v_uint32() self.AffinitizedSelectionMask = v_uint32() self.WaitLock = v_uint32() self.WaitListHead = LIST_ENTRY() self.ScbOffset = v_uint32() self.StartCycles = v_uint64() self.GenerationTarget = v_uint64() self.CycleTime = v_uint64() self.AffinitizedCycles = v_uint64() self.HighCycleTime = v_uint32() self.PrcbPad71 = v_uint32() self.DispatcherReadyListHead = vstruct.VArray([ LIST_ENTRY() for i in xrange(32) ]) self.ChainedInterruptList = v_ptr32() self.LookasideIrpFloat = v_uint32() self.ScbQueue = RTL_RB_TREE() self.ScbList = LIST_ENTRY() self.MmPageFaultCount = v_uint32() self.MmCopyOnWriteCount = v_uint32() self.MmTransitionCount = v_uint32() self.MmCacheTransitionCount = v_uint32() self.MmDemandZeroCount = v_uint32() self.MmPageReadCount = v_uint32() self.MmPageReadIoCount = v_uint32() self.MmCacheReadCount = v_uint32() self.MmCacheIoCount = v_uint32() self.MmDirtyPagesWriteCount = v_uint32() self.MmDirtyWriteIoCount = v_uint32() self.MmMappedPagesWriteCount = v_uint32() self.MmMappedWriteIoCount = v_uint32() self.CachedCommit = v_uint32() self.CachedResidentAvailable = v_uint32() self.HyperPte = v_ptr32() self.PrcbPad8 = vstruct.VArray([ v_uint8() for i in xrange(4) ]) self.VendorString = vstruct.VArray([ v_uint8() for i in xrange(13) ]) self.InitialApicId = v_uint8() self.LogicalProcessorsPerPhysicalProcessor = v_uint8() self.PrcbPad9 = vstruct.VArray([ v_uint8() for i in xrange(5) ]) self.FeatureBits = v_uint32() self._pad3c98 = v_bytes(size=4) self.UpdateSignature = LARGE_INTEGER() self.IsrTime = v_uint64() self.PrcbPad90 = vstruct.VArray([ v_uint32() for i in xrange(2) ]) self.PowerState = PROCESSOR_POWER_STATE() self.PrcbPad91 = vstruct.VArray([ v_uint32() for i in xrange(13) ]) self.DpcWatchdogDpc = KDPC() self._pad3e98 = v_bytes(size=4) self.DpcWatchdogTimer = KTIMER() self.HypercallPageList = SLIST_HEADER() self.HypercallPageVirtual = v_ptr32() self.VirtualApicAssist = v_ptr32() self.StatisticsPage = v_ptr32() self.Cache = vstruct.VArray([ CACHE_DESCRIPTOR() for i in xrange(5) ]) self.CacheCount = v_uint32() self.PackageProcessorSet = KAFFINITY_EX() self.SharedReadyQueueMask = v_uint32() self.SharedReadyQueue = v_ptr32() self.CoreProcessorSet = v_uint32() self.ScanSiblingMask = v_uint32() self.LLCMask = v_uint32() self.CacheProcessorMask = vstruct.VArray([ v_uint32() for i in xrange(5) ]) self.ScanSiblingIndex = v_uint32() self.WheaInfo = v_ptr32() self.EtwSupport = v_ptr32() self._pad3f58 = v_bytes(size=4) self.InterruptObjectPool = SLIST_HEADER() self.SharedReadyQueueOffset = v_uint32() self.PrcbPad92 = vstruct.VArray([ v_uint32() for i in xrange(2) ]) self.PteBitCache = v_uint32() self.PteBitOffset = v_uint32() self.PrcbPad93 = v_uint32() self.ProcessorProfileControlArea = v_ptr32() self.ProfileEventIndexAddress = v_ptr32() self.TimerExpirationDpc = KDPC() self.SynchCounters = SYNCH_COUNTERS() self.FsCounters = FILESYSTEM_DISK_COUNTERS() self.Context = v_ptr32() self.ContextFlagsInit = v_uint32() self.ExtendedState = v_ptr32() self.EntropyTimingState = KENTROPY_TIMING_STATE() self.IsrStack = v_ptr32() self.VectorToInterruptObject = vstruct.VArray([ v_ptr32() for i in xrange(208) ]) self.AbSelfIoBoostsList = SINGLE_LIST_ENTRY() self.AbPropagateBoostsList = SINGLE_LIST_ENTRY() self.AbDpc = KDPC() self.IoIrpStackProfilerCurrent = IOP_IRP_STACK_PROFILER() self.IoIrpStackProfilerPrevious = IOP_IRP_STACK_PROFILER() self.TimerExpirationTrace = vstruct.VArray([ KTIMER_EXPIRATION_TRACE() for i in xrange(16) ]) self.TimerExpirationTraceCount = v_uint32() self._pad46b8 = v_bytes(size=4) class _unnamed_10793(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.FilePointerIndex = v_uint32() class RTL_DYNAMIC_HASH_TABLE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Flags = v_uint32() self.Shift = v_uint32() self.TableSize = v_uint32() self.Pivot = v_uint32() self.DivisorMask = v_uint32() self.NumEntries = v_uint32() self.NonEmptyBuckets = v_uint32() self.NumEnumerators = v_uint32() self.Directory = v_ptr32() class KAFFINITY_EX(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Count = v_uint16() self.Size = v_uint16() self.Reserved = v_uint32() self.Bitmap = vstruct.VArray([ v_uint32() for i in xrange(1) ]) class DEVICE_OBJECT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Type = v_uint16() self.Size = v_uint16() self.ReferenceCount = v_uint32() self.DriverObject = v_ptr32() self.NextDevice = v_ptr32() self.AttachedDevice = v_ptr32() self.CurrentIrp = v_ptr32() self.Timer = v_ptr32() self.Flags = v_uint32() self.Characteristics = v_uint32() self.Vpb = v_ptr32() self.DeviceExtension = v_ptr32() self.DeviceType = v_uint32() self.StackSize = v_uint8() self._pad0034 = v_bytes(size=3) self.Queue = _unnamed_8810() self.AlignmentRequirement = v_uint32() self.DeviceQueue = KDEVICE_QUEUE() self.Dpc = KDPC() self.ActiveThreadCount = v_uint32() self.SecurityDescriptor = v_ptr32() self.DeviceLock = KEVENT() self.SectorSize = v_uint16() self.Spare1 = v_uint16() self.DeviceObjectExtension = v_ptr32() self.Reserved = v_ptr32() class USER_MEMORY_CACHE_ENTRY(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.UserBlocks = SLIST_HEADER() self.AvailableBlocks = v_uint32() self.MinimumDepth = v_uint32() self.CacheShiftThreshold = v_uint32() self.Allocations = v_uint16() self.Frees = v_uint16() self.CacheHits = v_uint16() self._pad0020 = v_bytes(size=6) class KERNEL_STACK_SEGMENT(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.StackBase = v_uint32() self.StackLimit = v_uint32() self.KernelStack = v_uint32() self.InitialStack = v_uint32() class SEGMENT_HEAP_EXTRA(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.AllocatorBackTraceIndex = v_uint16() self.InterceptorIndex = v_uint8() self.ExtraSizeInUnits = v_uint8() self.Settable = v_ptr32() class ISRDPCSTATS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.IsrTime = v_uint64() self.IsrTimeStart = v_uint64() self.IsrCount = v_uint64() self.DpcTime = v_uint64() self.DpcTimeStart = v_uint64() self.DpcCount = v_uint64() self.IsrActive = v_uint8() self.Reserved = vstruct.VArray([ v_uint8() for i in xrange(15) ]) class _unnamed_11950(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.NotificationType = v_uint32() self.SafeToRecurse = v_uint8() self._pad0008 = v_bytes(size=3) class _unnamed_11951(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Argument1 = v_ptr32() self.Argument2 = v_ptr32() self.Argument3 = v_ptr32() self.Argument4 = v_ptr32() self.Argument5 = v_ptr32() class IMAGE_NT_HEADERS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Signature = v_uint32() self.FileHeader = IMAGE_FILE_HEADER() self.OptionalHeader = IMAGE_OPTIONAL_HEADER() class IO_STACK_LOCATION(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.MajorFunction = v_uint8() self.MinorFunction = v_uint8() self.Flags = v_uint8() self.Control = v_uint8() self.Parameters = _unnamed_9066() self.DeviceObject = v_ptr32() self.FileObject = v_ptr32() self.CompletionRoutine = v_ptr32() self.Context = v_ptr32() class KNODE(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.DeepIdleSet = v_uint32() self.SharedReadyQueueLeaders = v_uint32() self._pad0040 = v_bytes(size=56) self.ProximityId = v_uint32() self.NodeNumber = v_uint16() self.PrimaryNodeNumber = v_uint16() self.MaximumProcessors = v_uint8() self.Flags = flags() self.Stride = v_uint8() self.LowIndex = v_uint8() self.Affinity = GROUP_AFFINITY() self.IdleCpuSet = v_uint32() self.IdleSmtSet = v_uint32() self._pad0080 = v_bytes(size=32) self.NonParkedSet = v_uint32() self.Seed = v_uint32() self.Lowest = v_uint32() self.Highest = v_uint32() self.ParkLock = v_uint32() self._pad00c0 = v_bytes(size=44) class XSAVE_AREA_HEADER(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Mask = v_uint64() self.Reserved = vstruct.VArray([ v_uint64() for i in xrange(7) ]) class RTL_SPARSE_BITMAP_CTX(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.Lock = v_uint32() self.BitmapRanges = v_ptr32() self.RangeArrayCommitStatus = RTL_BITMAP() self.AllocateRoutine = v_ptr32() self.FreeRoutine = v_ptr32() self.RangeCount = v_uint32() self.RangeIndexLimit = v_uint32() self.BitsPerRange = v_uint32() self.RangeCountMax = v_uint32() self.RangeMetadataOffset = v_uint32() self.MetadataSizePerBit = v_uint32() self.DefaultBitsSet = v_uint32() class RTL_USER_PROCESS_PARAMETERS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.MaximumLength = v_uint32() self.Length = v_uint32() self.Flags = v_uint32() self.DebugFlags = v_uint32() self.ConsoleHandle = v_ptr32() self.ConsoleFlags = v_uint32() self.StandardInput = v_ptr32() self.StandardOutput = v_ptr32() self.StandardError = v_ptr32() self.CurrentDirectory = CURDIR() self.DllPath = UNICODE_STRING() self.ImagePathName = UNICODE_STRING() self.CommandLine = UNICODE_STRING() self.Environment = v_ptr32() self.StartingX = v_uint32() self.StartingY = v_uint32() self.CountX = v_uint32() self.CountY = v_uint32() self.CountCharsX = v_uint32() self.CountCharsY = v_uint32() self.FillAttribute = v_uint32() self.WindowFlags = v_uint32() self.ShowWindowFlags = v_uint32() self.WindowTitle = UNICODE_STRING() self.DesktopInfo = UNICODE_STRING() self.ShellInfo = UNICODE_STRING() self.RuntimeData = UNICODE_STRING() self.CurrentDirectores = vstruct.VArray([ RTL_DRIVE_LETTER_CURDIR() for i in xrange(32) ]) self.EnvironmentSize = v_uint32() self.EnvironmentVersion = v_uint32() self.PackageDependencyData = v_ptr32() self.ProcessGroupId = v_uint32() class FILESYSTEM_DISK_COUNTERS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.FsBytesRead = v_uint64() self.FsBytesWritten = v_uint64() class HEAP_LFH_MEM_POLICIES(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.DisableAffinity = v_uint32() class IO_RESOURCE_REQUIREMENTS_LIST(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.ListSize = v_uint32() self.InterfaceType = v_uint32() self.BusNumber = v_uint32() self.SlotNumber = v_uint32() self.Reserved = vstruct.VArray([ v_uint32() for i in xrange(3) ]) self.AlternativeLists = v_uint32() self.List = vstruct.VArray([ IO_RESOURCE_LIST() for i in xrange(1) ]) class HEAP_BUCKET_COUNTERS(vstruct.VStruct): def __init__(self): vstruct.VStruct.__init__(self) self.TotalBlocks = v_uint32() self.SubSegmentCounts = v_uint32()
atlas0fd00m/CanCat
cancat/vstruct/defs/windows/win_6_3_i386/ntdll.py
Python
bsd-2-clause
239,574
0.002162
from say import * from say.styling import StyleDef import six def test_basic_styling(): fmt = Fmt() assert fmt('this', style='green+underline') == six.u('\x1b[32;4mthis\x1b[0m') assert fmt('this', style='bold+red') == six.u('\x1b[31;1mthis\x1b[0m') def test_readme_example(): fmt = Fmt() fmt.style(stars=lambda x: fmt('*** ', style='red') + \ fmt(x, style='black') + \ fmt(' ***', style='red')) message = 'Warning, problem!' assert fmt(message, style='stars') == six.u('\x1b[31m*** \x1b[0m\x1b[30mWarning, problem!\x1b[0m\x1b[31m ***\x1b[0m') def test_readme_example2(): fmt = Fmt() name = 'Joe' assert six.u('His name is ') + fmt(name, style='blue+underline') == six.u('His name is \x1b[34;4mJoe\x1b[0m') assert fmt('His name is {name:style=blue+underline}') == six.u('His name is \x1b[34;4mJoe\x1b[0m') assert fmt('His name is {name:style="blue+underline"}') == six.u('His name is \x1b[34;4mJoe\x1b[0m') assert fmt("His name is {name:style='blue+underline'}") == six.u('His name is \x1b[34;4mJoe\x1b[0m') def test_wrapping_example(): fmt = Fmt() text = "Move over, Coke. It looks like Apple is the real thing. The tech giant has ended Coca-Cola's 13-year run as the world's most valuable brand on a highly regarded annual list." fmt = Fmt() fmt.set(prefix=numberer(template=color('{n:>3}: ', fg='blue')), \ wrap=40) assert fmt(text) == six.u("\x1b[34m 1: \x1b[0mMove over, Coke. It looks\n\x1b[34m 2: \x1b[0mlike Apple is the real\n\x1b[34m 3: \x1b[0mthing. The tech giant has\n\x1b[34m 4: \x1b[0mended Coca-Cola's 13-year\n\x1b[34m 5: \x1b[0mrun as the world's most\n\x1b[34m 6: \x1b[0mvaluable brand on a highly\n\x1b[34m 7: \x1b[0mregarded annual list.") # now reset so numbering starts back at 1 fmt = Fmt() fmt.set(prefix=numberer(template=color('{n:>3}: ', fg='blue')), \ wrap=40) assert fmt(text, style='red') == six.u("\x1b[34m 1: \x1b[0m\x1b[31mMove over, Coke. It looks\x1b[0m\n\x1b[34m 2: \x1b[0m\x1b[31mlike Apple is the real\x1b[0m\n\x1b[34m 3: \x1b[0m\x1b[31mthing. The tech giant has\x1b[0m\n\x1b[34m 4: \x1b[0m\x1b[31mended Coca-Cola's 13-year\x1b[0m\n\x1b[34m 5: \x1b[0m\x1b[31mrun as the world's most\x1b[0m\n\x1b[34m 6: \x1b[0m\x1b[31mvaluable brand on a highly\x1b[0m\n\x1b[34m 7: \x1b[0m\x1b[31mregarded annual list.\x1b[0m") def test_color(): assert color('text', fg='green') == '\x1b[32mtext\x1b[0m' assert color('more', fg='blue', bg='yellow') == '\x1b[34;43mmore\x1b[0m' def test_styled(): assert color('test', fg='green', style='bold') == styled('test', 'green+bold') assert color('test', fg='green', style='bold') == styled('test', 'bold+green') assert color('test', fg='green', bg='red', style='bold') == styled('test', 'green+red+bold') assert color('test', fg='green', bg='red', style='bold') == styled('test', 'bold+green+red') assert color('test', fg='green', bg='red', style='bold') == styled('test', 'bold|green|red') assert color('test', fg='green', bg='red', style='bold') == styled('test', 'bold,green,red') def test_in_or_out(): fmt = Fmt() x = 12 assert fmt(x, style='blue+white+underline') == fmt("{x:style=blue+white+underline}") fmt.style(bwu=autostyle('blue+white+underline')) fmt.style(bwu2='blue+white+underline') assert fmt(x, style='bwu') == fmt(x, style='blue+white+underline') assert fmt(x, style='bwu') == fmt(x, style='bwu2') def test_Relative(): assert Relative(4) == 4 assert Relative(+4) == Relative(4) assert Relative(-5) == -5 assert Relative(0) == 0 assert Relative(1) + Relative(2) == Relative(3) assert Relative(1) + 2 == 3 assert 1 - Relative(1) == 0 assert Relative(2) - Relative(-1) == Relative(3) assert Relative(5) - Relative(2) == Relative(3) assert 1 + Relative(2) == 3 assert repr(Relative(4)) == 'Relative(+4)' assert repr(Relative(-5)) == 'Relative(-5)' assert repr(Relative(0)) == 'Relative(0)' def test_StyleDef_basic(): """ Minimal test of evovling StyleDef object """ s = StyleDef(name='test', join=False) assert s("this") == "this"
nrgaway/qubes-tools
builder-tools/libs/say-1.4.2/test/test_styling.py
Python
gpl-2.0
4,257
0.005168
#!/usr/bin/env python """ This utility extracts media urls from tweet jsonl.gz and save them as warc records. Warcio (https://github.com/webrecorder/warcio) is a dependency and before you can use it you need to: % pip install warcio You run it like this: % python media2warc.py /mnt/tweets/ferguson/tweets-0001.jsonl.gz /mnt/tweets/ferguson/tweets-0001.warc.gz The input file will be checked for duplicate urls to avoid duplicates within the input file. Subsequent runs will be deduplicated using a sqlite db. If an identical-payload-digest is found a revist record is created. The script is able to fetch media resources in multiple threads (maximum 2) by passing --threads <int> (default to a single thread). Please be careful modifying this script to use more than two threads since it can be interpreted as a DoS-attack. """ import os import gzip import json import time import queue import hashlib import logging import sqlite3 import argparse import requests import threading from datetime import timedelta from warcio.warcwriter import WARCWriter from warcio.statusandheaders import StatusAndHeaders q = queue.Queue() out_queue = queue.Queue() BLOCK_SIZE = 25600 class GetResource(threading.Thread): def __init__(self, q): threading.Thread.__init__(self) self.q = q self.rlock = threading.Lock() self.out_queue = out_queue self.d = Dedup() def run(self): while True: host = self.q.get() try: r = requests.get(host, headers={'Accept-Encoding': 'identity'}, stream=True) data = [r.raw.headers.items(), r.raw, host, r.status_code, r.reason] print(data[2]) self.out_queue.put(data) self.q.task_done() except requests.exceptions.RequestException as e: logging.error('%s for %s', e, data[2]) print(e) self.q.task_done() continue class WriteWarc(threading.Thread): def __init__(self, out_queue, warcfile): threading.Thread.__init__(self) self.out_queue = out_queue self.lock = threading.Lock() self.warcfile = warcfile self.dedup = Dedup() def run(self): with open(self.warcfile, 'ab') as output: while True: self.lock.acquire() data = self.out_queue.get() writer = WARCWriter(output, gzip=False) headers_list = data[0] http_headers = StatusAndHeaders('{} {}'.format(data[3], data[4]), headers_list, protocol='HTTP/1.0') record = writer.create_warc_record(data[2], 'response', payload=data[1], http_headers=http_headers) h = hashlib.sha1() h.update(record.raw_stream.read(BLOCK_SIZE)) if self.dedup.lookup(h.hexdigest()): record = writer.create_warc_record(data[2], 'revisit', http_headers=http_headers) writer.write_record(record) self.out_queue.task_done() self.lock.release() else: self.dedup.save(h.hexdigest(), data[2]) record.raw_stream.seek(0) writer.write_record(record) self.out_queue.task_done() self.lock.release() class Dedup: """ Stolen from warcprox https://github.com/internetarchive/warcprox/blob/master/warcprox/dedup.py """ def __init__(self): self.file = os.path.join(args.archive_dir,'dedup.db') def start(self): conn = sqlite3.connect(self.file) conn.execute( 'create table if not exists dedup (' ' key varchar(300) primary key,' ' value varchar(4000)' ');') conn.commit() conn.close() def save(self, digest_key, url): conn = sqlite3.connect(self.file) conn.execute( 'insert or replace into dedup (key, value) values (?, ?)', (digest_key, url)) conn.commit() conn.close() def lookup(self, digest_key, url=None): result = False conn = sqlite3.connect(self.file) cursor = conn.execute('select value from dedup where key = ?', (digest_key,)) result_tuple = cursor.fetchone() conn.close() if result_tuple: result = True return result def parse_extended_entities(extended_entities_dict): """Parse media file URL:s form tweet data :extended_entities_dict: :returns: list of media file urls """ urls = [] if "media" in extended_entities_dict.keys(): for item in extended_entities_dict["media"]: # add static image urls.append(item["media_url_https"]) # add best quality video file if "video_info" in item.keys(): max_bitrate = -1 # handle twitters occasional bitrate=0 video_url = None for video in item["video_info"]["variants"]: if "bitrate" in video.keys() and "content_type" in video.keys(): if video["content_type"] == "video/mp4": if int(video["bitrate"]) > max_bitrate: max_bitrate = int(video["bitrate"]) video_url = video["url"] if not video_url: print("Error: No bitrate / content_type") print(item["video_info"]) else: urls.append(video_url) return urls def parse_binlinks_from_tweet(tweetdict): """Parse binary file url:s from a single tweet. :tweetdict: json data dict for tweet :returns: list of urls for media files """ urls = [] if "user" in tweetdict.keys(): urls.append(tweetdict["user"]["profile_image_url_https"]) urls.append(tweetdict["user"]["profile_background_image_url_https"]) if "extended_entities" in tweetdict.keys(): urls.extend(parse_extended_entities(tweetdict["extended_entities"])) return urls def main(): start = time.time() if not os.path.isdir(args.archive_dir): os.mkdir(args.archive_dir) logging.basicConfig( filename=os.path.join(args.archive_dir, "media_harvest.log"), level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s" ) logging.getLogger(__name__) logging.info("Logging media harvest for %s", args.tweet_file) urls = [] d = Dedup() d.start() uniqueUrlCount = 0 duplicateUrlCount = 0 if args.tweet_file.endswith('.gz'): tweetfile = gzip.open(args.tweet_file, 'r') else: tweetfile = open(args.tweet_file, 'r') logging.info("Checking for duplicate urls") for line in tweetfile: tweet = json.loads(line) tweet_urls = parse_binlinks_from_tweet(tweet) for url in tweet_urls: if not url in urls: urls.append(url) q.put(url) uniqueUrlCount +=1 else: duplicateUrlCount += 1 logging.info("Found %s total media urls %s unique and %s duplicates", uniqueUrlCount+duplicateUrlCount, uniqueUrlCount, duplicateUrlCount) threads = int(args.threads) if threads > 2: threads = 2 for i in range(threads): t = GetResource(q) t.setDaemon(True) t.start() wt = WriteWarc(out_queue, os.path.join(args.archive_dir, 'warc.warc')) wt.setDaemon(True) wt.start() q.join() out_queue.join() logging.info("Finished media harvest in %s", str(timedelta(seconds=(time.time() - start)))) if __name__ == '__main__': parser = argparse.ArgumentParser("archive") parser.add_argument("tweet_file", action="store", help="a twitter jsonl.gz input file") parser.add_argument("archive_dir", action="store", help="a directory where the resulting warc is stored") parser.add_argument("--threads", action="store", default=1, help="Number of threads that fetches media resources") args = parser.parse_args() main()
edsu/twarc
utils/media2warc.py
Python
mit
8,273
0.003143
""" What ----- A Markov Chain is a sequence of values where the next value depends only on the current value (and not past values). It's basically a really simple state machine, where given the present state, the future state is conditionally independent of the past. Thus we can ask the question: Given the present word, how likely is it that this word I've chosen would be the next? How ----- 1) The last two words are the current state. 2) Next word depends on last two words only, or on present state only. I've simplified this example down to the core elements of a Markov text generator. Run the following to generate your own nonsensical strings: $ python run.py """ import random # Class for generating markov chain class Markov(object): def __init__(self, open_file): # Simple dict cache self.cache = {} self.open_file = open_file # Grabs all the words from the file self.words = self.file_to_words() # Verifys number of words in corpus self.word_size = len(self.words) self.database() # Function that grabs all the words from the given file def file_to_words(self): self.open_file.seek(0) data = self.open_file.read() words = data.split() return words def triples(self): """ Generates triples from the given data string. So if our string were "What a lovely day", we'd generate (What, a, lovely) and then (a, lovely, day). """ if len(self.words) < 3: # NEED MOAR WORDS return for i in range(len(self.words) - 2): yield (self.words[i], self.words[i+1], self.words[i+2]) def database(self): for w1, w2, w3 in self.triples(): # Sets the first 2 words as the key key = (w1, w2) # If that key exists in cache append the third word to the cache if key in self.cache: self.cache[key].append(w3) else: # If the key doesn't exist in the cache set the cache[key] = third word self.cache[key] = [w3] # Size denotes the length of the sentence to be outputted def generate_markov_text(self, size=20): # set seed to a random integer based on corpus size seed = random.randint(0, self.word_size-3) # Set next_word seed_word, next_word = self.words[seed], self.words[seed+1] w1, w2 = seed_word, next_word # Instantiate new list to hold the created sentence gen_words = [] # size refers the number of words in the requested output for i in xrange(size): #Appends the seed_word to the gen_words list gen_words.append(w1) # Flips the seed_word to (seed_word + 1) # random.choice return a random element from the cachce based on key[seed,seed+1] w1, w2 = w2, random.choice(self.cache[(w1, w2)]) gen_words.append(w2) print ' '.join(gen_words)
tedlee/markov
markovgen.py
Python
mit
2,709
0.032484