repo_name
stringlengths
5
92
path
stringlengths
4
232
copies
stringclasses
19 values
size
stringlengths
4
7
content
stringlengths
721
1.04M
license
stringclasses
15 values
hash
int64
-9,223,277,421,539,062,000
9,223,102,107B
line_mean
float64
6.51
99.9
line_max
int64
15
997
alpha_frac
float64
0.25
0.97
autogenerated
bool
1 class
chrischoy/3D-R2N2
lib/blender_renderer.py
1
10773
#!/usr/bin/env python3 import _init_paths import time import os import contextlib from math import radians import numpy as np from PIL import Image from tempfile import TemporaryFile from lib.utils import stdout_redirected from lib.config import cfg import bpy def voxel2mesh(voxels): cube_verts = [[0, 0, 0], [0, 0, 1], [0, 1, 0], [0, 1, 1], [1, 0, 0], [1, 0, 1], [1, 1, 0], [1, 1, 1]] # 8 points cube_faces = [[0, 1, 2], [1, 3, 2], [2, 3, 6], [3, 7, 6], [0, 2, 6], [0, 6, 4], [0, 5, 1], [0, 4, 5], [6, 7, 5], [6, 5, 4], [1, 7, 3], [1, 5, 7]] # 12 face cube_verts = np.array(cube_verts) cube_faces = np.array(cube_faces) + 1 l, m, n = voxels.shape scale = 0.01 cube_dist_scale = 1.1 verts = [] faces = [] curr_vert = 0 for i in range(l): for j in range(m): for k in range(n): # If there is a non-empty voxel if voxels[i, j, k] > 0: verts.extend(scale * (cube_verts + cube_dist_scale * np.array([[i, j, k]]))) faces.extend(cube_faces + curr_vert) curr_vert += len(cube_verts) return np.array(verts), np.array(faces) def write_obj(filename, verts, faces): """ write the verts and faces on file.""" with open(filename, 'w') as f: # write vertices f.write('g\n# %d vertex\n' % len(verts)) for vert in verts: f.write('v %f %f %f\n' % tuple(vert)) # write faces f.write('# %d faces\n' % len(faces)) for face in faces: f.write('f %d %d %d\n' % tuple(face)) class BaseRenderer: model_idx = 0 def __init__(self): # bpy.data.scenes['Scene'].render.engine = 'CYCLES' # bpy.context.scene.cycles.device = 'GPU' # bpy.context.user_preferences.system.compute_device_type = 'CUDA' # bpy.context.user_preferences.system.compute_device = 'CUDA_1' # changing these values does affect the render. # remove the default cube bpy.ops.object.select_pattern(pattern="Cube") bpy.ops.object.delete() render_context = bpy.context.scene.render world = bpy.context.scene.world camera = bpy.data.objects['Camera'] light_1 = bpy.data.objects['Lamp'] light_1.data.type = 'HEMI' # set the camera postion and orientation so that it is in # the front of the object camera.location = (1, 0, 0) camera.rotation_mode = 'ZXY' camera.rotation_euler = (0, radians(90), radians(90)) # parent camera with a empty object at origin org_obj = bpy.data.objects.new("RotCenter", None) org_obj.location = (0, 0, 0) org_obj.rotation_euler = (0, 0, 0) bpy.context.scene.objects.link(org_obj) camera.parent = org_obj # setup parenting # render setting render_context.resolution_percentage = 100 world.horizon_color = (1, 1, 1) # set background color to be white # set file name for storing rendering result self.result_fn = '%s/render_result_%d.png' % (cfg.DIR.RENDERING_PATH, os.getpid()) bpy.context.scene.render.filepath = self.result_fn self.render_context = render_context self.org_obj = org_obj self.camera = camera self.light = light_1 self._set_lighting() def initialize(self, models_fn, viewport_size_x, viewport_size_y): self.models_fn = models_fn self.render_context.resolution_x = viewport_size_x self.render_context.resolution_y = viewport_size_y def _set_lighting(self): pass def setViewpoint(self, azimuth, altitude, yaw, distance_ratio, fov): self.org_obj.rotation_euler = (0, 0, 0) self.light.location = (distance_ratio * (cfg.RENDERING.MAX_CAMERA_DIST + 2), 0, 0) self.camera.location = (distance_ratio * cfg.RENDERING.MAX_CAMERA_DIST, 0, 0) self.org_obj.rotation_euler = (radians(-yaw), radians(-altitude), radians(-azimuth)) def setTransparency(self, transparency): """ transparency is either 'SKY', 'TRANSPARENT' If set 'SKY', render background using sky color.""" self.render_context.alpha_mode = transparency def selectModel(self): bpy.ops.object.select_all(action='DESELECT') bpy.ops.object.select_pattern(pattern="RotCenter") bpy.ops.object.select_pattern(pattern="Lamp*") bpy.ops.object.select_pattern(pattern="Camera") bpy.ops.object.select_all(action='INVERT') def printSelection(self): print(bpy.context.selected_objects) def clearModel(self): self.selectModel() bpy.ops.object.delete() # The meshes still present after delete for item in bpy.data.meshes: bpy.data.meshes.remove(item) for item in bpy.data.materials: bpy.data.materials.remove(item) def setModelIndex(self, model_idx): self.model_idx = model_idx def loadModel(self, file_path=None): if file_path is None: file_path = self.models_fn[self.model_idx] if file_path.endswith('obj'): bpy.ops.import_scene.obj(filepath=file_path) elif file_path.endswith('3ds'): bpy.ops.import_scene.autodesk_3ds(filepath=file_path) elif file_path.endswith('dae'): # Must install OpenCollada. Please read README.md bpy.ops.wm.collada_import(filepath=file_path) else: raise Exception("Loading failed: %s Model loading for type %s not Implemented" % (file_path, file_path[-4:])) def render(self, load_model=True, clear_model=True, resize_ratio=None, return_image=True, image_path=os.path.join(cfg.RENDERING.BLENDER_TMP_DIR, 'tmp.png')): """ Render the object """ if load_model: self.loadModel() # resize object self.selectModel() if resize_ratio: bpy.ops.transform.resize(value=resize_ratio) self.result_fn = image_path bpy.context.scene.render.filepath = image_path bpy.ops.render.render(write_still=True) # save straight to file if resize_ratio: bpy.ops.transform.resize(value=(1/resize_ratio[0], 1/resize_ratio[1], 1/resize_ratio[2])) if clear_model: self.clearModel() if return_image: im = np.array(Image.open(self.result_fn)) # read the image # Last channel is the alpha channel (transparency) return im[:, :, :3], im[:, :, 3] class ShapeNetRenderer(BaseRenderer): def __init__(self): super().__init__() self.setTransparency('TRANSPARENT') def _set_lighting(self): # Create new lamp datablock light_data = bpy.data.lamps.new(name="New Lamp", type='HEMI') # Create new object with our lamp datablock light_2 = bpy.data.objects.new(name="New Lamp", object_data=light_data) bpy.context.scene.objects.link(light_2) # put the light behind the camera. Reduce specular lighting self.light.location = (0, -2, 2) self.light.rotation_mode = 'ZXY' self.light.rotation_euler = (radians(45), 0, radians(90)) self.light.data.energy = 0.7 light_2.location = (0, 2, 2) light_2.rotation_mode = 'ZXY' light_2.rotation_euler = (-radians(45), 0, radians(90)) light_2.data.energy = 0.7 class VoxelRenderer(BaseRenderer): def __init__(self): super().__init__() self.setTransparency('SKY') def _set_lighting(self): self.light.location = (0, 3, 3) self.light.rotation_mode = 'ZXY' self.light.rotation_euler = (-radians(45), 0, radians(90)) self.light.data.energy = 0.7 # Create new lamp datablock light_data = bpy.data.lamps.new(name="New Lamp", type='HEMI') # Create new object with our lamp datablock light_2 = bpy.data.objects.new(name="New Lamp", object_data=light_data) bpy.context.scene.objects.link(light_2) light_2.location = (4, 1, 6) light_2.rotation_mode = 'XYZ' light_2.rotation_euler = (radians(37), radians(3), radians(106)) light_2.data.energy = 0.7 def render_voxel(self, pred, thresh=0.4, image_path=os.path.join(cfg.RENDERING.BLENDER_TMP_DIR, 'tmp.png')): # Cleanup the scene self.clearModel() out_f = os.path.join(cfg.RENDERING.BLENDER_TMP_DIR, 'tmp.obj') occupancy = pred > thresh vertices, faces = voxel2mesh(occupancy) with contextlib.suppress(IOError): os.remove(out_f) write_obj(out_f, vertices, faces) # Load the obj bpy.ops.import_scene.obj(filepath=out_f) bpy.context.scene.render.filepath = image_path bpy.ops.render.render(write_still=True) # save straight to file im = np.array(Image.open(image_path)) # read the image # Last channel is the alpha channel (transparency) return im[:, :, :3], im[:, :, 3] def main(): """Test function""" # Modify the following file to visualize the model dn = '/ShapeNet/ShapeNetCore.v1/02958343/' model_id = [line.strip('\n') for line in open(dn + 'models.txt')] file_paths = [os.path.join(dn, line, 'model.obj') for line in model_id] sum_time = 0 renderer = ShapeNetRenderer() renderer.initialize(file_paths, 500, 500) for i, curr_model_id in enumerate(model_id): start = time.time() image_path = '%s/%s.png' % ('/tmp', curr_model_id[:-4]) az, el, depth_ratio = list( *([360, 5, 0.3] * np.random.rand(1, 3) + [0, 25, 0.65])) renderer.setModelIndex(i) renderer.setViewpoint(30, 30, 0, 0.7, 25) with TemporaryFile() as f, stdout_redirected(f): rendering, alpha = renderer.render(load_model=True, clear_model=True, image_path=image_path) print('Saved at %s' % image_path) end = time.time() sum_time += end - start if i % 10 == 0: print(sum_time/(10)) sum_time = 0 if __name__ == "__main__": main()
mit
1,062,251,790,059,312,600
32.77116
101
0.560475
false
spantaleev/sijax-python
sijax/core.py
1
16018
# -*- coding: utf-8 -* from __future__ import (absolute_import, unicode_literals) """ sijax.core ~~~~~~~~~~ Implements the main Sijax class, an instance of which is used to register callbacks, invoke callbacks, setup events handlers, etc. :copyright: (c) 2011 by Slavi Pantaleev. :license: BSD, see LICENSE.txt for more details. """ from builtins import str from .helper import json from .response.base import BaseResponse from .exception import SijaxError class Sijax(object): """The main Sijax object is what manages function registration and calling. Sijax initialization usually looks like this:: instance = Sijax() instance.set_data(POST_DICTIONARY_HERE) instance.set_request_uri(URI_TO_SEND_REQUESTS_TO) instance.register_callback('function_name', some_function) Sijax needs the POST parameters for the current request, to determine whether the request is meant to be handled by Sijax or by your regular page loading logic. Sijax needs a request URI to tell the client where to send the ajax requests. This is usually the current request URI, but another URI can also be used. Functions that are registered (using :meth:`sijax.Sijax.register_callback` or :meth:`sijax.Sijax.register_object`) are the only functions exposed to the browser for calling. """ PARAM_REQUEST = 'sijax_rq' PARAM_ARGS = 'sijax_args' #: Event called immediately before calling the response function. #: The event handler function receives the Response object argument. EVENT_BEFORE_PROCESSING = 'before_processing' #: Event called immediately after calling the response function. #: The event handler function receives the Response object argument. EVENT_AFTER_PROCESSING = 'after_processing' #: Event called when the function to be called is unknown #: (not registered with Sijax). #: The event handler function receives the Response object argument #: followed by the public name of the function that was #: supposed to be called. EVENT_INVALID_REQUEST = 'invalid_request' #: Event called when the function was called in a wrong way #: (bad arguments count). #: The event handler function receives the Response object argument #: followed by the callback function that was not called correctly. EVENT_INVALID_CALL = 'invalid_call' #: An option when registering callbacks that stores the callback function PARAM_CALLBACK = 'callback' #: An option when registering callbacks that allows the response class to #: be changed from :class:`sijax.response.BaseResponse` to something else. PARAM_RESPONSE_CLASS = 'response_class' #: An option when registering callbacks that makes certain extra arguments #: be passed automatically to the response function after the #: obj_response argument and before the other arguments PARAM_ARGS_EXTRA = 'args_extra' def __init__(self): cls = self.__class__ #: Would contain the request data (usually POST) self._data = {} #: Would contain the callbacks (name => dictionary of params) self._callbacks = {} #: Stores the various event callbacks self._events = {} #: The URI where the client should send ajax requests to self._request_uri = None #: The URI to json2.js or similar to provide JSON support #: for browsers that don't have it.. Sijax would load this on demand self._json_uri = None #: Stores a cached version of the arguments #: to be passed to the requested function self._request_args = None def invalid_request(obj_response, func_name): """Handler to be called when an unknown function is called.""" msg = 'The action you performed is unavailable! (Sijax error)' obj_response.alert(msg) def invalid_call(obj_response, callback): """Handler to be called when a function is called the wrong way.""" msg = 'You tried to perform an action in a wrong way! (Sijax error)' obj_response.alert(msg) # Register the before/after processing events # to not do anything by default self.register_event(cls.EVENT_BEFORE_PROCESSING, lambda r: r) self.register_event(cls.EVENT_AFTER_PROCESSING, lambda r: r) # Register the "error" events to show some alerts by default self.register_event(cls.EVENT_INVALID_REQUEST, invalid_request) self.register_event(cls.EVENT_INVALID_CALL, invalid_call) def set_data(self, data): """Sets the incoming data dictionary (usually POST). Sijax needs this data to determine if the current request is a Sijax request, and which callback should be called with what arguments. """ self._data = data self._request_args = None # cache invalidation return self def get_data(self): """Returns the incoming data array that the current instance uses.""" return self._data def register_callback(self, public_name, callback, response_class=None, args_extra=None): """Registers the specified callback function with the given name. Example:: def handler(obj_response): pass # Exposing the same function by 2 different names instance.register_callback('test', handler) instance.register_callback('test2', handler) The optional ``response_class`` parameter could be used to substitute the :class:`sijax.response.BaseResponse` class used by default. An instance of ``response_class`` is passed automatically as a first parameter to your response function (``obj_response``). Example:: def handler(obj_response): pass class MyResponse(sijax.response.BaseResponse): pass # `obj_response` passed to `handler` will be an instance of MyResponse instance.register_callback('test', handler, response_class=MyResponse) The optional ``args_extra`` parameter allows you to pass a list of extra arguments to your response function, immediately after the ``obj_response`` argument and before the other call arguments. Example:: def handler(obj_response, arg1, arg2, arg3): # arg1 = 'arg1' # arg2 = 'arg2' # arg3 - expected to come from the browser pass instance.register_callback('test', handler, args_extra=['arg1', 'arg2']) :param public_name: the name with which this function will be exposed in the browser :param callback: the actual function to call :param response_class: the response class, an instance of which to use instead of :class:`sijax.response.BaseResponse` :param args_extra: an optional list of additional arguments to pass after the response object argument and before the other call arguments """ if response_class is None: response_class = BaseResponse if not hasattr(response_class, '__call__'): raise SijaxError('response_class needs to be a callable!') if args_extra is None: args_extra = [] else: if isinstance(args_extra, tuple): args_extra = list(args_extra) elif not isinstance(args_extra, list): raise SijaxError('args_extra could only be a tuple or a list!') params = {} params[self.__class__.PARAM_CALLBACK] = callback params[self.__class__.PARAM_RESPONSE_CLASS] = response_class params[self.__class__.PARAM_ARGS_EXTRA] = args_extra self._callbacks[public_name] = params return self def register_object(self, obj, **options): """Registers all "public" callable attributes of the given object. The object could be anything (module, class, class instance, etc.) :param obj: the object whose callable attributes to register :param options: the options to be sent to :meth:`sijax.Sijax.register_callback` """ for attr_name in dir(obj): if attr_name.startswith('_'): continue attribute = getattr(obj, attr_name) if hasattr(attribute, '__call__'): self.register_callback(attr_name, attribute, **options) @property def is_sijax_request(self): """Tells whether this page request looks like a valid request meant to be handled by Sijax. Even if this is a Sijax request, this doesn't mean it's a valid one. Refer to :meth:`sijax.Sijax.process_request` to see what happens when a request for an unknown function was made. """ for k in (self.__class__.PARAM_REQUEST, self.__class__.PARAM_ARGS): if k not in self._data: return False return True @property def requested_function(self): """The name of the requested function, or None if the current request is not a Sijax request.""" request = self.__class__.PARAM_REQUEST return str(self._data[request]) if request in self._data else None @property def request_args(self): """The arguments list, that the function was called with in the browser. It should be noted that this is not necessarily the arguments list that the callback function will actually receive. Custom Response objects are allowed to override the arguments list. """ key_args = self.__class__.PARAM_ARGS if self._request_args is None: # no precached version.. cache it now self._request_args = [] if key_args in self._data: try: args = json.loads(self._data[key_args]) if isinstance(args, list): self._request_args = args except (ValueError): pass return self._request_args def process_request(self): """Executes the Sijax request and returns the response. Make sure that the current request is a Sijax request, using :attr:`sijax.Sijax.is_sijax_request` before calling this. A :class:`sijax.exception.SijaxError` will be raised, if this method is called for non-Sijax requests. If the function that was called from the browser is not registered (the function is unknown to Sijax), the :attr:`sijax.Sijax.EVENT_INVALID_REQUEST` event handler will be called instead. Refer to :meth:`sijax.Sijax.execute_callback` to see how the main handler is called and what the response (return value) is. """ if not self.is_sijax_request: raise SijaxError('You should not call this for non-Sijax requests!') function_name = self.requested_function if function_name in self._callbacks: options = self._callbacks[function_name] args = self.request_args else: # Function not registered.. Let's call the invalid request handler # passing to it the function name that should've been called args = [function_name] callback = self._events[self.__class__.EVENT_INVALID_REQUEST] options = {self.__class__.PARAM_CALLBACK: callback} return self.execute_callback(args, **options) def execute_callback(self, args, callback, **params): """Executes the given callback function and returns a response. Before executing the given callback, the :attr:`sijax.Sijax.EVENT_BEFORE_PROCESSING` event callback is fired. After executing the given callback, the :attr:`sijax.Sijax.EVENT_AFTER_PROCESSING` event callback is fired. The returned result could either be a string (for regular functions) or a generator (for streaming functions like Comet or Upload). :param args: the arguments list to pass to the callback :param callback: the callback function to execute :param options: more options - look at :meth:`sijax.Sijax.register_callback` to see what else is available :return: string for regular callbacks or generator for streaming callbacks """ cls = self.__class__ # Another response class could be used to extend behavior response_class = BaseResponse if cls.PARAM_RESPONSE_CLASS in params: response_class = params[cls.PARAM_RESPONSE_CLASS] args_extra = [] if cls.PARAM_ARGS_EXTRA in params: args_extra = params[cls.PARAM_ARGS_EXTRA] if not hasattr(callback, '__call__'): raise SijaxError('Provided callback is not callable!') # Pass the original call args to the response and get them back later # This allows the response class (potentially custom) # to override the arguments list. # Note that we're not passing args_extra to it, as we don't # want responses to know anything about that. obj_response = response_class(self, args) call_args = args_extra + obj_response._get_request_args() call_chain = [ (self._events[cls.EVENT_BEFORE_PROCESSING], []), (callback, call_args), (self._events[cls.EVENT_AFTER_PROCESSING], []) ] return obj_response._process_call_chain(call_chain) def register_event(self, event_name, callback): """Register a callback function to be called when the event occurs. Only one callback can be executed per event. The provided ``EVENT_*`` constants should be used for handling system events. Additionally, you can use any string to define your own events and callbacks. If more than one handler per event is needed, you can chain them manually, although it's not recommended. """ self._events[event_name] = callback return self def has_event(self, event_name): """Tells whether we've got a registered event by that name.""" return event_name in self._events def get_event(self, event_name): """Returns the event handler callback for the specified event.""" return self._events[event_name] if event_name in self._events else None def set_request_uri(self, uri): """Specifies the URI where ajax requests will be sent to by the client. This is usually the URI of the current request. """ self._request_uri = uri return self def set_json_uri(self, uri): """Sets the URI to an external JSON library, for browsers that do not support native JSON (such as IE <= 7). If this is not specified, Sijax will not work at all in such browsers. The script will only be loaded if a browser without JSON support is detected. """ self._json_uri = uri return self def get_js(self): """Returns the javascript code needed to prepare the Sijax environment in the browser. Note that the javascript code is unique and cannot be shared between different pages. """ if self._request_uri is None: raise SijaxError('Trying to get the sijax js, ' 'but no request_uri has been set!') js = 'Sijax.setRequestUri(%s);' % json.dumps(self._request_uri) if self._json_uri is not None: js += 'Sijax.setJsonUri(%s);' % json.dumps(self._json_uri) return js
bsd-3-clause
8,143,440,697,030,521,000
38.163814
84
0.629667
false
epam/DLab
infrastructure-provisioning/src/general/scripts/azure/edge_configure.py
1
20304
#!/usr/bin/python # ***************************************************************************** # # Copyright (c) 2016, EPAM SYSTEMS INC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ****************************************************************************** import json from dlab.fab import * from dlab.meta_lib import * import sys, time, os from dlab.actions_lib import * if __name__ == "__main__": local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'], os.environ['request_id']) local_log_filepath = "/logs/edge/" + local_log_filename logging.basicConfig(format='%(levelname)-8s [%(asctime)s] %(message)s', level=logging.DEBUG, filename=local_log_filepath) try: print('Generating infrastructure names and tags') edge_conf = dict() edge_conf['service_base_name'] = os.environ['conf_service_base_name'] edge_conf['resource_group_name'] = os.environ['azure_resource_group_name'] edge_conf['key_name'] = os.environ['conf_key_name'] edge_conf['vpc_name'] = os.environ['azure_vpc_name'] edge_conf['region'] = os.environ['azure_region'] edge_conf['subnet_name'] = os.environ['azure_subnet_name'] edge_conf['user_name'] = os.environ['edge_user_name'].replace('_', '-') edge_conf['user_keyname'] = os.environ['edge_user_name'] edge_conf['private_subnet_name'] = edge_conf['service_base_name'] + '-' + edge_conf['user_name'] + '-subnet' edge_conf['instance_name'] = edge_conf['service_base_name'] + "-" + edge_conf['user_name'] + '-edge' edge_conf['network_interface_name'] = edge_conf['service_base_name'] + "-" + edge_conf['user_name'] + '-edge-nif' edge_conf['static_public_ip_name'] = edge_conf['service_base_name'] + "-" + edge_conf['user_name'] + '-edge-ip' edge_conf['primary_disk_name'] = edge_conf['instance_name'] + '-disk0' edge_conf['instance_dns_name'] = 'host-' + edge_conf['instance_name'] + '.' + edge_conf['region'] + '.cloudapp.azure.com' edge_conf['user_storage_account_name'] = edge_conf['service_base_name'] + '-' + edge_conf[ 'user_name'] + '-storage' edge_conf['user_container_name'] = (edge_conf['service_base_name'] + '-' + edge_conf['user_name'] + '-container').lower() edge_conf['shared_storage_account_name'] = edge_conf['service_base_name'] + '-shared-storage' edge_conf['shared_container_name'] = (edge_conf['service_base_name'] + '-shared-container').lower() edge_conf['datalake_store_name'] = edge_conf['service_base_name'] + '-ssn-datalake' edge_conf['datalake_shared_directory_name'] = edge_conf['service_base_name'] + '-shared-folder' edge_conf['datalake_user_directory_name'] = '{0}-{1}-folder'.format(edge_conf['service_base_name'], edge_conf['user_name']) edge_conf['edge_security_group_name'] = edge_conf['instance_name'] + '-sg' edge_conf['notebook_security_group_name'] = edge_conf['service_base_name'] + "-" + edge_conf['user_name'] + \ '-nb-sg' edge_conf['master_security_group_name'] = edge_conf['service_base_name'] + '-' \ + edge_conf['user_name'] + '-dataengine-master-sg' edge_conf['slave_security_group_name'] = edge_conf['service_base_name'] + '-' \ + edge_conf['user_name'] + '-dataengine-slave-sg' edge_conf['dlab_ssh_user'] = os.environ['conf_os_user'] keyfile_name = "{}{}.pem".format(os.environ['conf_key_dir'], edge_conf['key_name']) edge_conf['private_subnet_cidr'] = AzureMeta().get_subnet(edge_conf['resource_group_name'], edge_conf['vpc_name'], edge_conf['private_subnet_name']).address_prefix edge_conf['edge_public_ip'] = AzureMeta().get_instance_public_ip_address(edge_conf['resource_group_name'], edge_conf['instance_name']) edge_conf['edge_private_ip'] = AzureMeta().get_private_ip_address(edge_conf['resource_group_name'], edge_conf['instance_name']) instance_hostname = AzureMeta().get_private_ip_address(edge_conf['resource_group_name'], edge_conf['instance_name']) except Exception as err: append_result("Failed to generate infrastructure names", str(err)) AzureActions().remove_instance(edge_conf['resource_group_name'], edge_conf['instance_name']) AzureActions().remove_subnet(edge_conf['resource_group_name'], edge_conf['vpc_name'], edge_conf['private_subnet_name']) AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['edge_security_group_name']) AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['notebook_security_group_name']) AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['master_security_group_name']) AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['slave_security_group_name']) for storage_account in AzureMeta().list_storage_accounts(edge_conf['resource_group_name']): if edge_conf['user_storage_account_name'] == storage_account.tags["Name"]: AzureActions().remove_storage_account(edge_conf['resource_group_name'], storage_account.name) if os.environ['azure_datalake_enable'] == 'true': for datalake in AzureMeta().list_datalakes(edge_conf['resource_group_name']): if edge_conf['datalake_store_name'] == datalake.tags["Name"]: AzureActions().remove_datalake_directory(datalake.name, edge_conf['datalake_user_directory_name']) sys.exit(1) try: if os.environ['conf_os_family'] == 'debian': initial_user = 'ubuntu' sudo_group = 'sudo' if os.environ['conf_os_family'] == 'redhat': initial_user = 'ec2-user' sudo_group = 'wheel' logging.info('[CREATING DLAB SSH USER]') print('[CREATING DLAB SSH USER]') params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format\ (instance_hostname, os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem", initial_user, edge_conf['dlab_ssh_user'], sudo_group) try: local("~/scripts/{}.py {}".format('create_ssh_user', params)) except: traceback.print_exc() raise Exception except Exception as err: append_result("Failed creating ssh user 'dlab'.", str(err)) AzureActions().remove_instance(edge_conf['resource_group_name'], edge_conf['instance_name']) AzureActions().remove_subnet(edge_conf['resource_group_name'], edge_conf['vpc_name'], edge_conf['private_subnet_name']) AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['edge_security_group_name']) AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['notebook_security_group_name']) AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['master_security_group_name']) AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['slave_security_group_name']) for storage_account in AzureMeta().list_storage_accounts(edge_conf['resource_group_name']): if edge_conf['user_storage_account_name'] == storage_account.tags["Name"]: AzureActions().remove_storage_account(edge_conf['resource_group_name'], storage_account.name) if os.environ['azure_datalake_enable'] == 'true': for datalake in AzureMeta().list_datalakes(edge_conf['resource_group_name']): if edge_conf['datalake_store_name'] == datalake.tags["Name"]: AzureActions().remove_datalake_directory(datalake.name, edge_conf['datalake_user_directory_name']) sys.exit(1) try: print('[INSTALLING PREREQUISITES]') logging.info('[INSTALLING PREREQUISITES]') params = "--hostname {} --keyfile {} --user {} --region {}".\ format(instance_hostname, keyfile_name, edge_conf['dlab_ssh_user'], os.environ['azure_region']) try: local("~/scripts/{}.py {}".format('install_prerequisites', params)) except: traceback.print_exc() raise Exception except Exception as err: append_result("Failed installing apps: apt & pip.", str(err)) AzureActions().remove_instance(edge_conf['resource_group_name'], edge_conf['instance_name']) AzureActions().remove_subnet(edge_conf['resource_group_name'], edge_conf['vpc_name'], edge_conf['private_subnet_name']) AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['edge_security_group_name']) AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['notebook_security_group_name']) AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['master_security_group_name']) AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['slave_security_group_name']) for storage_account in AzureMeta().list_storage_accounts(edge_conf['resource_group_name']): if edge_conf['user_storage_account_name'] == storage_account.tags["Name"]: AzureActions().remove_storage_account(edge_conf['resource_group_name'], storage_account.name) if os.environ['azure_datalake_enable'] == 'true': for datalake in AzureMeta().list_datalakes(edge_conf['resource_group_name']): if edge_conf['datalake_store_name'] == datalake.tags["Name"]: AzureActions().remove_datalake_directory(datalake.name, edge_conf['datalake_user_directory_name']) sys.exit(1) try: print('[INSTALLING HTTP PROXY]') logging.info('[INSTALLING HTTP PROXY]') additional_config = {"exploratory_subnet": edge_conf['private_subnet_cidr'], "template_file": "/root/templates/squid.conf"} params = "--hostname {} --keyfile {} --additional_config '{}' --user {}" \ .format(instance_hostname, keyfile_name, json.dumps(additional_config), edge_conf['dlab_ssh_user']) try: local("~/scripts/{}.py {}".format('configure_http_proxy', params)) except: traceback.print_exc() raise Exception except Exception as err: append_result("Failed installing http proxy.", str(err)) AzureActions().remove_instance(edge_conf['resource_group_name'], edge_conf['instance_name']) AzureActions().remove_subnet(edge_conf['resource_group_name'], edge_conf['vpc_name'], edge_conf['private_subnet_name']) AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['edge_security_group_name']) AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['notebook_security_group_name']) AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['master_security_group_name']) AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['slave_security_group_name']) for storage_account in AzureMeta().list_storage_accounts(edge_conf['resource_group_name']): if edge_conf['user_storage_account_name'] == storage_account.tags["Name"]: AzureActions().remove_storage_account(edge_conf['resource_group_name'], storage_account.name) if os.environ['azure_datalake_enable'] == 'true': for datalake in AzureMeta().list_datalakes(edge_conf['resource_group_name']): if edge_conf['datalake_store_name'] == datalake.tags["Name"]: AzureActions().remove_datalake_directory(datalake.name, edge_conf['datalake_user_directory_name']) sys.exit(1) try: print('[INSTALLING USERs KEY]') logging.info('[INSTALLING USERs KEY]') additional_config = {"user_keyname": edge_conf['user_keyname'], "user_keydir": os.environ['conf_key_dir']} params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format( instance_hostname, keyfile_name, json.dumps(additional_config), edge_conf['dlab_ssh_user']) try: local("~/scripts/{}.py {}".format('install_user_key', params)) except: traceback.print_exc() raise Exception except Exception as err: append_result("Failed installing users key. Excpeption: " + str(err)) AzureActions().remove_instance(edge_conf['resource_group_name'], edge_conf['instance_name']) AzureActions().remove_subnet(edge_conf['resource_group_name'], edge_conf['vpc_name'], edge_conf['private_subnet_name']) AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['edge_security_group_name']) AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['notebook_security_group_name']) AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['master_security_group_name']) AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['slave_security_group_name']) for storage_account in AzureMeta().list_storage_accounts(edge_conf['resource_group_name']): if edge_conf['user_storage_account_name'] == storage_account.tags["Name"]: AzureActions().remove_storage_account(edge_conf['resource_group_name'], storage_account.name) if os.environ['azure_datalake_enable'] == 'true': for datalake in AzureMeta().list_datalakes(edge_conf['resource_group_name']): if edge_conf['datalake_store_name'] == datalake.tags["Name"]: AzureActions().remove_datalake_directory(datalake.name, edge_conf['datalake_user_directory_name']) sys.exit(1) try: for storage_account in AzureMeta().list_storage_accounts(edge_conf['resource_group_name']): if edge_conf['shared_storage_account_name'] == storage_account.tags["Name"]: shared_storage_account_name = storage_account.name if edge_conf['user_storage_account_name'] == storage_account.tags["Name"]: user_storage_account_name = storage_account.name print('[SUMMARY]') logging.info('[SUMMARY]') print("Instance name: {}".format(edge_conf['instance_name'])) print("Hostname: {}".format(edge_conf['instance_dns_name'])) print("Public IP: {}".format(edge_conf['edge_public_ip'])) print("Private IP: {}".format(edge_conf['edge_private_ip'])) print("Key name: {}".format(edge_conf['key_name'])) print("User storage account name: {}".format(user_storage_account_name)) print("User container name: {}".format(edge_conf['user_container_name'])) if os.environ['azure_datalake_enable'] == 'true': for datalake in AzureMeta().list_datalakes(edge_conf['resource_group_name']): if edge_conf['datalake_store_name'] == datalake.tags["Name"]: datalake_id = datalake.name print("Data Lake name: {}".format(datalake_id)) print("Data Lake tag name: {}".format(edge_conf['datalake_store_name'])) print("Data Lake Store user directory name: {}".format(edge_conf['datalake_user_directory_name'])) print("Notebook SG: {}".format(edge_conf['notebook_security_group_name'])) print("Edge SG: {}".format(edge_conf['edge_security_group_name'])) print("Notebook subnet: {}".format(edge_conf['private_subnet_cidr'])) with open("/root/result.json", 'w') as result: if os.environ['azure_datalake_enable'] == 'false': res = {"hostname": edge_conf['instance_dns_name'], "public_ip": edge_conf['edge_public_ip'], "ip": edge_conf['edge_private_ip'], "key_name": edge_conf['key_name'], "user_storage_account_name": user_storage_account_name, "user_container_name": edge_conf['user_container_name'], "shared_storage_account_name": shared_storage_account_name, "shared_container_name": edge_conf['shared_container_name'], "user_storage_account_tag_name": edge_conf['user_storage_account_name'], "tunnel_port": "22", "socks_port": "1080", "notebook_sg": edge_conf['notebook_security_group_name'], "edge_sg": edge_conf['edge_security_group_name'], "notebook_subnet": edge_conf['private_subnet_cidr'], "instance_id": edge_conf['instance_name'], "full_edge_conf": edge_conf, "Action": "Create new EDGE server"} else: res = {"hostname": edge_conf['instance_dns_name'], "public_ip": edge_conf['edge_public_ip'], "ip": edge_conf['edge_private_ip'], "key_name": edge_conf['key_name'], "user_storage_account_name": user_storage_account_name, "user_container_name": edge_conf['user_container_name'], "shared_storage_account_name": shared_storage_account_name, "shared_container_name": edge_conf['shared_container_name'], "user_storage_account_tag_name": edge_conf['user_storage_account_name'], "datalake_name": datalake_id, "datalake_tag_name": edge_conf['datalake_store_name'], "datalake_shared_directory_name": edge_conf['datalake_shared_directory_name'], "datalake_user_directory_name": edge_conf['datalake_user_directory_name'], "tunnel_port": "22", "socks_port": "1080", "notebook_sg": edge_conf['notebook_security_group_name'], "edge_sg": edge_conf['edge_security_group_name'], "notebook_subnet": edge_conf['private_subnet_cidr'], "instance_id": edge_conf['instance_name'], "full_edge_conf": edge_conf, "Action": "Create new EDGE server"} print(json.dumps(res)) result.write(json.dumps(res)) except: print("Failed writing results.") sys.exit(0) sys.exit(0)
apache-2.0
-8,906,384,043,524,874,000
65.789474
131
0.578063
false
markwal/Cura
plugins/CuraEngineBackend/CuraEngineBackend.py
1
10593
# Copyright (c) 2015 Ultimaker B.V. # Cura is released under the terms of the AGPLv3 or higher. from UM.Backend.Backend import Backend from UM.Application import Application from UM.Scene.SceneNode import SceneNode from UM.Scene.Iterator.DepthFirstIterator import DepthFirstIterator from UM.Preferences import Preferences from UM.Math.Vector import Vector from UM.Signal import Signal from UM.Logger import Logger from UM.Resources import Resources from UM.Settings.SettingOverrideDecorator import SettingOverrideDecorator from UM.Message import Message from cura.OneAtATimeIterator import OneAtATimeIterator from . import Cura_pb2 from . import ProcessSlicedObjectListJob from . import ProcessGCodeJob from . import StartSliceJob import os import sys import numpy from PyQt5.QtCore import QTimer from UM.i18n import i18nCatalog catalog = i18nCatalog("cura") class CuraEngineBackend(Backend): def __init__(self): super().__init__() # Find out where the engine is located, and how it is called. This depends on how Cura is packaged and which OS we are running on. default_engine_location = os.path.join(Application.getInstallPrefix(), "bin", "CuraEngine") if hasattr(sys, "frozen"): default_engine_location = os.path.join(os.path.dirname(os.path.abspath(sys.executable)), "CuraEngine") if sys.platform == "win32": default_engine_location += ".exe" default_engine_location = os.path.abspath(default_engine_location) Preferences.getInstance().addPreference("backend/location", default_engine_location) self._scene = Application.getInstance().getController().getScene() self._scene.sceneChanged.connect(self._onSceneChanged) # Workaround to disable layer view processing if layer view is not active. self._layer_view_active = False Application.getInstance().getController().activeViewChanged.connect(self._onActiveViewChanged) self._onActiveViewChanged() self._stored_layer_data = None Application.getInstance().getMachineManager().activeMachineInstanceChanged.connect(self._onChanged) self._profile = None Application.getInstance().getMachineManager().activeProfileChanged.connect(self._onActiveProfileChanged) self._onActiveProfileChanged() self._change_timer = QTimer() self._change_timer.setInterval(500) self._change_timer.setSingleShot(True) self._change_timer.timeout.connect(self.slice) self._message_handlers[Cura_pb2.SlicedObjectList] = self._onSlicedObjectListMessage self._message_handlers[Cura_pb2.Progress] = self._onProgressMessage self._message_handlers[Cura_pb2.GCodeLayer] = self._onGCodeLayerMessage self._message_handlers[Cura_pb2.GCodePrefix] = self._onGCodePrefixMessage self._message_handlers[Cura_pb2.ObjectPrintTime] = self._onObjectPrintTimeMessage self._slicing = False self._restart = False self._enabled = True self._always_restart = True self._message = None self.backendConnected.connect(self._onBackendConnected) Application.getInstance().getController().toolOperationStarted.connect(self._onToolOperationStarted) Application.getInstance().getController().toolOperationStopped.connect(self._onToolOperationStopped) Application.getInstance().getMachineManager().activeMachineInstanceChanged.connect(self._onInstanceChanged) ## Get the command that is used to call the engine. # This is usefull for debugging and used to actually start the engine # \return list of commands and args / parameters. def getEngineCommand(self): active_machine = Application.getInstance().getMachineManager().getActiveMachineInstance() if not active_machine: return None return [Preferences.getInstance().getValue("backend/location"), "connect", "127.0.0.1:{0}".format(self._port), "-j", active_machine.getMachineDefinition().getPath(), "-vv"] ## Emitted when we get a message containing print duration and material amount. This also implies the slicing has finished. # \param time The amount of time the print will take. # \param material_amount The amount of material the print will use. printDurationMessage = Signal() ## Emitted when the slicing process starts. slicingStarted = Signal() ## Emitted whne the slicing process is aborted forcefully. slicingCancelled = Signal() ## Perform a slice of the scene. def slice(self): if not self._enabled: return if self._slicing: self._slicing = False self._restart = True if self._process is not None: Logger.log("d", "Killing engine process") try: self._process.terminate() except: # terminating a process that is already terminating causes an exception, silently ignore this. pass if self._message: self._message.hide() self._message = None self.slicingCancelled.emit() return if self._profile.hasErrorValue(): Logger.log('w', "Profile has error values. Aborting slicing") if self._message: self._message.hide() self._message = None self._message = Message(catalog.i18nc("@info:status", "Unable to slice. Please check your setting values for errors.")) self._message.show() return #No slicing if we have error values since those are by definition illegal values. self.processingProgress.emit(0.0) if not self._message: self._message = Message(catalog.i18nc("@info:status", "Slicing..."), 0, False, -1) self._message.show() else: self._message.setProgress(-1) self._scene.gcode_list = [] self._slicing = True job = StartSliceJob.StartSliceJob(self._profile, self._socket) job.start() job.finished.connect(self._onStartSliceCompleted) def _onStartSliceCompleted(self, job): if job.getError() or job.getResult() != True: if self._message: self._message.hide() self._message = None return def _onSceneChanged(self, source): if type(source) is not SceneNode: return if source is self._scene.getRoot(): return if source.getMeshData() is None: return if source.getMeshData().getVertices() is None: return self._onChanged() def _onActiveProfileChanged(self): if self._profile: self._profile.settingValueChanged.disconnect(self._onSettingChanged) self._profile = Application.getInstance().getMachineManager().getActiveProfile() if self._profile: self._profile.settingValueChanged.connect(self._onSettingChanged) self._onChanged() def _onSettingChanged(self, setting): self._onChanged() def _onSlicedObjectListMessage(self, message): if self._layer_view_active: job = ProcessSlicedObjectListJob.ProcessSlicedObjectListJob(message) job.start() else : self._stored_layer_data = message def _onProgressMessage(self, message): if self._message: self._message.setProgress(round(message.amount * 100)) self.processingProgress.emit(message.amount) def _onGCodeLayerMessage(self, message): self._scene.gcode_list.append(message.data.decode("utf-8", "replace")) def _onGCodePrefixMessage(self, message): self._scene.gcode_list.insert(0, message.data.decode("utf-8", "replace")) def _onObjectPrintTimeMessage(self, message): self.printDurationMessage.emit(message.time, message.material_amount) self.processingProgress.emit(1.0) self._slicing = False if self._message: self._message.setProgress(100) self._message.hide() self._message = None if self._always_restart: try: self._process.terminate() self._createSocket() except: # terminating a process that is already terminating causes an exception, silently ignore this. pass def _createSocket(self): super()._createSocket() self._socket.registerMessageType(1, Cura_pb2.Slice) self._socket.registerMessageType(2, Cura_pb2.SlicedObjectList) self._socket.registerMessageType(3, Cura_pb2.Progress) self._socket.registerMessageType(4, Cura_pb2.GCodeLayer) self._socket.registerMessageType(5, Cura_pb2.ObjectPrintTime) self._socket.registerMessageType(6, Cura_pb2.SettingList) self._socket.registerMessageType(7, Cura_pb2.GCodePrefix) ## Manually triggers a reslice def forceSlice(self): self._change_timer.start() def _onChanged(self): if not self._profile: return self._change_timer.start() def _onBackendConnected(self): if self._restart: self._onChanged() self._restart = False def _onToolOperationStarted(self, tool): self._enabled = False # Do not reslice when a tool is doing it's 'thing' def _onToolOperationStopped(self, tool): self._enabled = True # Tool stop, start listening for changes again. self._onChanged() def _onActiveViewChanged(self): if Application.getInstance().getController().getActiveView(): view = Application.getInstance().getController().getActiveView() if view.getPluginId() == "LayerView": self._layer_view_active = True if self._stored_layer_data: job = ProcessSlicedObjectListJob.ProcessSlicedObjectListJob(self._stored_layer_data) job.start() self._stored_layer_data = None else: self._layer_view_active = False def _onInstanceChanged(self): self._slicing = False self._restart = True if self._process is not None: Logger.log("d", "Killing engine process") try: self._process.terminate() except: # terminating a process that is already terminating causes an exception, silently ignore this. pass self.slicingCancelled.emit()
agpl-3.0
-7,926,261,142,368,935,000
37.380435
180
0.652412
false
adamsumm/CausalMario
HiddenCauses/irm/Mario/parse.py
1
2165
import sys import re import random vevent = re.compile("V\s+([\-\d\w]+)\s+(\w+)") sevent = re.compile("S\s+([\-\d\w]+)\s+([\-\d\w]+)") aevent = re.compile("A\s+([\-\d\w]+)") devent = re.compile("D\s+([\-\d\w]{2,})") cevent = re.compile("C\s+([\-\d\w]+)\s+[\-\d]*\s*([\-\d\w]+)\s+([\-\d\w]+)") animate = ["Goomba","Mario","BigMario","FireMario","GreenKoopa","RedKoopa"] enemies = ["Goomba","GreenKoopa","RedKoopa"] dirs = ["U","D","L","R"] opposite = { "U":"D", "D":"U", "L":"R", "R":"L" } enemyOdds = 0#1.0/3200.0 bushOdds = 0#1.0/3200.0 with open(sys.argv[1],'r') as openfile: print "ObjectA,ObjectB,A2BDir,EffectType,Source,Target,VelChange" causes = [] effects = [] for line in openfile: if 'NEWFRAME' in line: #print causes if random.random() < bushOdds: an = random.choice(animate) d =random.choice(dirs) causes.append(["Bush",an,d]) causes.append([an,"Bush",opposite[d]]) if random.random() < enemyOdds: e1 = random.choice(enemies) e2 = random.choice(enemies) d =random.choice(dirs) causes.append([e1,e2,d]) causes.append([e2,e1,opposite[d]]) if not causes: pass #causes.append(["None","None","None"]) for cause in causes: if not effects: print ",".join(cause) + ",None,None,None,None" for effect in effects: print ",".join(cause) + "," + ",".join(effect) causes = [] effects = [] else: amatch = aevent.search(line) dmatch = devent.search(line) smatch = sevent.search(line) cmatch = cevent.search(line) vmatch = vevent.search(line) if amatch: effects.append(["Add",amatch.group(1),"None","None"]) if vmatch: effects.append(["VelChange",vmatch.group(1),"None",vmatch.group(2)]) if smatch: effects.append(["Change",smatch.group(1),smatch.group(2),"None"]) if dmatch: if 'RUN' not in line: effects.append(["Delete",dmatch.group(1),"None","None"]) if cmatch: o1 = cmatch.group(1) o2 = cmatch.group(2) if "-" in o1: o1 = "B" + o1 if "-" in o2: o2 = "B" + o2 causes.append([o1,o2,cmatch.group(3)]) causes.append([o2,o1,opposite[cmatch.group(3)]])
mit
7,378,686,023,350,711,000
28.256757
76
0.579677
false
mazvv/travelcrm
travelcrm/lib/qb/services.py
1
1097
# -*coding: utf-8-*- from collections import Iterable from . import ResourcesQueryBuilder from ...models.resource_type import ResourceType from ...models.resource import Resource from ...models.service import Service class ServicesQueryBuilder(ResourcesQueryBuilder): def __init__(self, context): super(ServicesQueryBuilder, self).__init__(context) self._fields = { 'id': Service.id, '_id': Service.id, 'name': Service.name, 'resource_type': ResourceType.humanize, } self._simple_search_fields = [ Service.name, ] self.build_query() def build_query(self): self.build_base_query() self.query = ( self.query .join(Service, Resource.service) .join(ResourceType, Service.resource_type) ) super(ServicesQueryBuilder, self).build_query() def filter_id(self, id): assert isinstance(id, Iterable), u"Must be iterable object" if id: self.query = self.query.filter(Service.id.in_(id))
gpl-3.0
-5,362,266,019,576,411,000
28.648649
67
0.601641
false
arenaoftitans/arena-of-titans-api
aot/game/cards/deck.py
1
4174
################################################################################ # Copyright (C) 2015-2020 by Last Run Contributors. # # This file is part of Arena of Titans. # # Arena of Titans is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Arena of Titans is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Arena of Titans. If not, see <http://www.gnu.org/licenses/>. ################################################################################ import copy import random from .card import Card from .exceptions import CardNotFoundError class Deck: CARDS_IN_HAND = 5 _cards = [] _graveyard = [] _hand = [] _stock = [] def __init__(self, cards): self._cards = cards self._graveyard = [] self._hand = [] self._init_stock() self.init_turn() def _init_stock(self): self._stock = random.sample(self._cards, len(self._cards)) def init_turn(self): while len(self._hand) < self.CARDS_IN_HAND: self._hand.append(self._draw_next_card()) def _draw_next_card(self): if self.number_cards_in_stock == 0: self._init_stock() for card in self._hand: self._stock.remove(card) drawn_card = self._stock[0] self._stock.remove(drawn_card) return drawn_card def modify_colors(self, colors, filter_=None): for card in filter(filter_, self._hand): card.modify_colors(colors) def modify_number_moves(self, delta, filter_=None): for card in filter(filter_, self._hand): card.modify_number_moves(delta) def set_special_actions_to_card(self, card_name, special_action_descriptions): for card in filter(lambda x: x.name == card_name, self._hand): actions_copy = copy.deepcopy(special_action_descriptions) card.set_special_actions(actions_copy) def view_possible_squares(self, card, position): if card is not None and not isinstance(card, Card): game_card = self.get_card(card.name, card.color) else: game_card = card if game_card is not None and position is not None: return game_card.move(position) else: return set() def play(self, card): if card is not None and not isinstance(card, Card): card = self.get_card(card.name, card.color) if card is not None and card in self._hand: card.revert_to_default() self._hand.remove(card) self._graveyard.append(card) def remove_color_from_possible_colors(self, color): for card in self._hand: card.remove_color_from_possible_colors(color) def revert_to_default(self): for card in self._hand: card.revert_to_default() @property def first_card_in_hand(self): return self._hand[0] def get_card(self, card_name, card_color): matching_cards = [ card for card in self._hand if card.name == card_name and card.color == card_color ] if len(matching_cards) != 1: raise CardNotFoundError return matching_cards[0] @property def graveyard(self): return self._graveyard @property def hand(self): return self._hand @property def number_cards_in_stock(self): return len(self._stock) @property def number_cards_in_graveyard(self): return len(self._graveyard) @property def number_cards_in_hand(self): return len(self._hand) @property def stock(self): return self._stock def __iter__(self): return iter(self._hand + self._stock)
agpl-3.0
-7,988,025,100,036,046,000
28.814286
94
0.594873
false
petebachant/CFT-vectors
cft_vectors.py
1
18584
#!/usr/bin/env python """ This script generates a force and velocity vector diagram for a cross-flow turbine. """ from __future__ import division, print_function import numpy as np import matplotlib import matplotlib.pyplot as plt import pandas as pd from scipy.interpolate import interp1d import seaborn as sns from pxl.styleplot import set_sns import os # Define some colors (some from the Seaborn deep palette) blue = sns.color_palette()[0] green = sns.color_palette()[1] dark_gray = (0.3, 0.3, 0.3) red = sns.color_palette()[2] purple = sns.color_palette()[3] tan = sns.color_palette()[4] light_blue = sns.color_palette()[5] def load_foildata(): """Loads NACA 0020 airfoil data at Re = 2.1 x 10^5.""" Re = 2.1e5 foil = "0020" fname = "NACA {}_T1_Re{:.3f}_M0.00_N9.0.dat".format(foil, Re/1e6) fpath = "data/{}".format(fname) alpha, cl, cd = np.loadtxt(fpath, skiprows=14, unpack=True) if alpha[0] != 0.0: alpha = np.append([0.0], alpha[:-1]) cl = np.append([1e-12], cl[:-1]) cd = np.append(cd[0], cd[:-1]) # Mirror data about 0 degrees AoA since it's a symmetrical foil alpha = np.append(-np.flipud(alpha), alpha) cl = np.append(-np.flipud(cl), cl) cd = np.append(np.flipud(cd), cd) df = pd.DataFrame() df["alpha_deg"] = alpha df["cl"] = cl df["cd"] = cd return df def lookup_foildata(alpha_deg): """Lookup foil characteristics at given angle of attack.""" alpha_deg = np.asarray(alpha_deg) df = load_foildata() df["alpha_rad"] = np.deg2rad(df.alpha_deg) f_cl = interp1d(df.alpha_deg, df.cl, bounds_error=False) f_cd = interp1d(df.alpha_deg, df.cd, bounds_error=False) f_ct = interp1d(df.alpha_deg, df.cl*np.sin(df.alpha_rad) \ - df.cd*np.cos(df.alpha_rad), bounds_error=False) cl, cd, ct = f_cl(alpha_deg), f_cd(alpha_deg), f_ct(alpha_deg) return {"cl": cl, "cd": cd, "ct": ct} def calc_cft_ctorque(tsr=2.0, chord=0.14, R=0.5): """Calculate the geometric torque coefficient for a CFT.""" U_infty = 1.0 omega = tsr*U_infty/R theta_blade_deg = np.arange(0, 721) theta_blade_rad = np.deg2rad(theta_blade_deg) blade_vel_mag = omega*R blade_vel_x = blade_vel_mag*np.cos(theta_blade_rad) blade_vel_y = blade_vel_mag*np.sin(theta_blade_rad) u = U_infty # No induction rel_vel_mag = np.sqrt((blade_vel_x + u)**2 + blade_vel_y**2) rel_vel_x = u + blade_vel_x rel_vel_y = blade_vel_y relvel_dot_bladevel = (blade_vel_x*rel_vel_x + blade_vel_y*rel_vel_y) alpha_rad = np.arccos(relvel_dot_bladevel/(rel_vel_mag*blade_vel_mag)) alpha_rad[theta_blade_deg > 180] *= -1 alpha_deg = np.rad2deg(alpha_rad) foil_coeffs = lookup_foildata(alpha_deg) ctorque = foil_coeffs["ct"]*chord/(2*R)*rel_vel_mag**2/U_infty**2 cdx = -foil_coeffs["cd"]*np.sin(np.pi/2 - alpha_rad + theta_blade_rad) clx = foil_coeffs["cl"]*np.cos(np.pi/2 - alpha_rad - theta_blade_rad) df = pd.DataFrame() df["theta"] = theta_blade_deg df["alpha_deg"] = alpha_deg df["rel_vel_mag"] = rel_vel_mag df["ctorque"] = ctorque df["cdrag"] = clx + cdx return df def mag(v): """ Return magnitude of 2-D vector (input as a tuple, list, or NumPy array). """ return np.sqrt(v[0]**2 + v[1]**2) def rotate(v, rad): """Rotate a 2-D vector by rad radians.""" dc, ds = np.cos(rad), np.sin(rad) x, y = v[0], v[1] x, y = dc*x - ds*y, ds*x + dc*y return np.array((x, y)) def gen_naca_points(naca="0020", c=100, npoints=100, tuples=True): """Generate points for a NACA foil.""" x = np.linspace(0, 1, npoints)*c t = float(naca[2:])/100.0 y = 5.0*t*c*(0.2969*np.sqrt(x/c) - 0.1260*(x/c) - 0.3516*(x/c)**2 \ + 0.2843*(x/c)**3 - 0.1015*(x/c)**4) y = np.append(y, -y[::-1]) x = np.append(x, x[::-1]) if tuples: return np.array([(x0, y0) for x0, y0 in zip(x, y)]) else: return x, y def test_gen_naca_points(): points = gen_naca_points() x = [] y = [] for p in points: x.append(p[0]) y.append(p[1]) fig, ax = plt.subplots() ax.plot(x, y, "o") ax.set_aspect(1) plt.show() def plot_radius(ax, theta_deg=0): """Plot radius at given azimuthal angle.""" r = 0.495 theta_rad = np.deg2rad(theta_deg) x2, y2 = r*np.cos(theta_rad), r*np.sin(theta_rad) ax.plot((0, x2), (0, y2), "gray", linewidth=2) def plot_center(ax, length=0.07, linewidth=1.2): """Plot centermark at origin.""" ax.plot((0, 0), (-length/2, length/2), lw=linewidth, color="black") ax.plot((-length/2, length/2), (0, 0), lw=linewidth, color="black") def make_naca_path(c=0.3, theta_deg=0.0): verts = gen_naca_points(c=c) verts = np.array([rotate(v, -np.pi/2) for v in verts]) verts += (0.5, c/4) theta_rad = np.deg2rad(theta_deg) verts = np.array([rotate(v, theta_rad) for v in verts]) p = matplotlib.path.Path(verts, closed=True) return p def plot_foil(ax, c=0.3, theta_deg=0.0): """Plot the foil shape using a matplotlib patch.""" p = matplotlib.patches.PathPatch(make_naca_path(c, theta_deg), facecolor="gray", linewidth=1, edgecolor="gray") ax.add_patch(p) def plot_blade_path(ax, R=0.5): """Plot blade path as a dashed line.""" p = plt.Circle((0, 0), R, linestyle="dashed", edgecolor="black", facecolor="none", linewidth=1) ax.add_patch(p) def plot_vectors(fig, ax, theta_deg=0.0, tsr=2.0, c=0.3, label=False): """Plot blade velocity, free stream velocity, relative velocity, lift, and drag vectors. """ r = 0.5 u_infty = 0.26 theta_deg %= 360 theta_rad = np.deg2rad(theta_deg) blade_xy = r*np.cos(theta_rad), r*np.sin(theta_rad) head_width = 0.04 head_length = 0.11 linewidth = 1.5 # Function for plotting vector labels def plot_label(text, x, y, dx, dy, text_width=0.09, text_height=0.03, sign=-1, dist=1.0/3.0): text_width *= plt.rcParams["font.size"]/12*6/fig.get_size_inches()[1] text_height *= plt.rcParams["font.size"]/12*6/fig.get_size_inches()[1] dvec = np.array((dx, dy)) perp_vec = rotate(dvec, np.pi/2) perp_vec /= mag(perp_vec) if theta_deg > 270: diag = text_height else: diag = np.array((text_width, text_height)) # Projection of text diagonal vector onto normal vector proj = np.dot(diag, perp_vec) if sign != -1: proj = 0 # Text is on right side of vector if theta_deg > 180: sign *= -1 dxlab, dylab = perp_vec*(np.abs(proj) + .01)*sign xlab, ylab = x + dx*dist + dxlab, y + dy*dist + dylab ax.text(xlab, ylab, text) # Make blade velocity vector x1, y1 = rotate((0.5, tsr*u_infty), np.deg2rad(theta_deg)) dx, dy = np.array(blade_xy) - np.array((x1, y1)) blade_vel = np.array((dx, dy)) ax.arrow(x1, y1, dx, dy, head_width=head_width, head_length=head_length, length_includes_head=True, color=dark_gray, linewidth=linewidth) if label: plot_label(r"$-\omega r$", x1, y1, dx*0.25, dy*0.5) # Make chord line vector x1c, y1c = np.array((x1, y1)) - np.array((dx, dy))*0.5 x2c, y2c = np.array((x1, y1)) + np.array((dx, dy))*2 ax.plot([x1c, x2c], [y1c, y2c], marker=None, color="k", linestyle="-.", zorder=1) # Make free stream velocity vector y1 += u_infty ax.arrow(x1, y1, 0, -u_infty, head_width=head_width, head_length=head_length, length_includes_head=True, color=blue, linewidth=linewidth) u_infty = np.array((0, -u_infty)) if label: dy = -mag(u_infty) plot_label(r"$U_\mathrm{in}$", x1, y1, 0, dy, text_width=0.1) # Make relative velocity vector dx, dy = np.array(blade_xy) - np.array((x1, y1)) rel_vel = u_infty + blade_vel ax.plot((x1, x1 + dx), (y1, y1 + dy), lw=0) ax.arrow(x1, y1, dx, dy, head_width=head_width, head_length=head_length, length_includes_head=True, color=tan, linewidth=linewidth) if label: plot_label(r"$U_\mathrm{rel}$", x1, y1, dx, dy, sign=1, text_width=0.11) # Calculate angle between blade vel and rel vel alpha_deg = np.rad2deg(np.arccos(np.dot(blade_vel/mag(blade_vel), rel_vel/mag(rel_vel)))) if theta_deg > 180: alpha_deg *= -1 # Make drag vector drag_amplify = 3.0 data = lookup_foildata(alpha_deg) drag = data["cd"]*mag(rel_vel)**2*drag_amplify if drag < 0.4/drag_amplify: hs = 0.5 else: hs = 1 dx, dy = drag*np.array((dx, dy))/mag((dx, dy)) ax.arrow(blade_xy[0], blade_xy[1], dx, dy, head_width=head_width*hs, head_length=head_length*hs, length_includes_head=True, color=red, linewidth=linewidth) if label: plot_label(r"$F_d$", blade_xy[0], blade_xy[1], dx, dy, sign=-1, dist=0.66) # Make lift vector lift_amplify = 1.5 lift = data["cl"]*mag(rel_vel)**2*lift_amplify dx, dy = rotate((dx, dy), -np.pi/2)/mag((dx, dy))*lift if np.abs(lift) < 0.4/lift_amplify: hs = 0.5 else: hs = 1 ax.plot((blade_xy[0], blade_xy[0] + dx), (blade_xy[1], blade_xy[1] + dy), linewidth=0) ax.arrow(blade_xy[0], blade_xy[1], dx, dy, head_width=head_width*hs, head_length=head_length*hs, length_includes_head=True, color=green, linewidth=linewidth) if label: plot_label(r"$F_l$", blade_xy[0], blade_xy[1], dx, dy, sign=-1, text_width=0.12, text_height=0.02, dist=0.66) # Label radius if label: plot_label("$r$", 0, 0, blade_xy[0], blade_xy[1], text_width=0.04, text_height=0.04) # Label angle of attack if label: ast = "simple,head_width={},tail_width={},head_length={}".format( head_width*8, linewidth/16, head_length*8) xy = blade_xy - rel_vel/mag(rel_vel)*0.2 ax.annotate(r"$\alpha$", xy=xy, xycoords="data", xytext=(37.5, 22.5), textcoords="offset points", arrowprops=dict(arrowstyle=ast, ec="none", connectionstyle="arc3,rad=0.1", color="k")) xy = blade_xy - blade_vel/mag(blade_vel)*0.2 ax.annotate("", xy=xy, xycoords="data", xytext=(-15, -30), textcoords="offset points", arrowprops=dict(arrowstyle=ast, ec="none", connectionstyle="arc3,rad=-0.1", color="k")) # Label azimuthal angle if label: xy = np.array(blade_xy)*0.6 ast = "simple,head_width={},tail_width={},head_length={}".format( head_width*5.5, linewidth/22, head_length*5.5) ax.annotate(r"$\theta$", xy=xy, xycoords="data", xytext=(0.28, 0.12), textcoords="data", arrowprops=dict(arrowstyle=ast, ec="none", connectionstyle="arc3,rad=0.1", color="k")) ax.annotate("", xy=(0.41, 0), xycoords="data", xytext=(0.333, 0.12), textcoords="data", arrowprops=dict(arrowstyle=ast, ec="none", connectionstyle="arc3,rad=-0.1", color="k")) # Label pitching moment if label: xy = np.array(blade_xy)*1.1 - blade_vel/mag(blade_vel) * c/4 ast = "simple,head_width={},tail_width={},head_length={}".format( head_width*8, linewidth/16, head_length*8) ax.annotate(r"", xy=xy, xycoords="data", xytext=(25, -15), textcoords="offset points", arrowprops=dict(arrowstyle=ast, ec="none", connectionstyle="arc3,rad=0.6", color="k")) plot_label(r"$M$", xy[0], xy[1], 0.1, 0.1, sign=-1, dist=0.66) return {"u_infty": u_infty, "blade_vel": blade_vel, "rel_vel": rel_vel} def plot_alpha(ax=None, tsr=2.0, theta=None, alpha_ss=None, **kwargs): """Plot angle of attack versus azimuthal angle.""" if theta is not None: theta %= 360 if ax is None: fig, ax = plt.subplots() df = calc_cft_ctorque(tsr=tsr) ax.plot(df.theta, df.alpha_deg, **kwargs) ax.set_ylabel(r"$\alpha$ (degrees)") ax.set_xlabel(r"$\theta$ (degrees)") ax.set_xlim((0, 360)) ylim = np.round(df.alpha_deg.max() + 5) ax.set_ylim((-ylim, ylim)) if theta is not None: f = interp1d(df.theta, df.alpha_deg) ax.plot(theta, f(theta), "ok") if alpha_ss is not None: ax.hlines((alpha_ss, -alpha_ss), 0, 360, linestyles="dashed") def plot_rel_vel_mag(ax=None, tsr=2.0, theta=None, **kwargs): """Plot relative velocity magnitude versus azimuthal angle.""" if theta is not None: theta %= 360 if ax is None: fig, ax = plt.subplots() df = calc_cft_ctorque(tsr=tsr) ax.plot(df.theta, df.rel_vel_mag, **kwargs) ax.set_ylabel(r"$|\vec{U}_\mathrm{rel}|$") ax.set_xlabel(r"$\theta$ (degrees)") ax.set_xlim((0, 360)) if theta is not None: f = interp1d(df.theta, df.rel_vel_mag) ax.plot(theta, f(theta), "ok") def plot_alpha_relvel_all(tsrs=np.arange(1.5, 6.1, 1.0), save=False): """Plot angle of attack and relative velocity magnitude for a list of TSRs. Figure will have two subplots in a single row. """ fig, (ax1, ax2) = plt.subplots(nrows=1, ncols=2, figsize=(7.5, 3.0)) cm = plt.cm.get_cmap("Reds") for tsr in tsrs: color = cm(tsr/np.max(tsrs)) plot_alpha(ax=ax1, tsr=tsr, label=r"$\lambda = {}$".format(tsr), color=color) plot_rel_vel_mag(ax=ax2, tsr=tsr, color=color) [a.set_xticks(np.arange(0, 361, 60)) for a in (ax1, ax2)] ax1.legend(loc=(0.17, 1.1), ncol=len(tsrs)) ax1.set_ylim((-45, 45)) ax1.set_yticks(np.arange(-45, 46, 15)) ax2.set_ylabel(r"$|\vec{U}_\mathrm{rel}|/U_\infty$") fig.tight_layout() if save: fig.savefig("figures/alpha_deg_urel_geom.pdf", bbox_inches="tight") def plot_ctorque(ax=None, tsr=2.0, theta=None, **kwargs): """Plot torque coefficient versus azimuthal angle.""" theta %= 360 if ax is None: fig, ax = plt.subplots() df = calc_cft_ctorque(tsr=tsr) ax.plot(df.theta, df.ctorque, **kwargs) ax.set_ylabel("Torque coeff.") ax.set_xlabel(r"$\theta$ (degrees)") ax.set_xlim((0, 360)) if theta is not None: f = interp1d(df.theta, df.ctorque) ax.plot(theta, f(theta), "ok") def plot_diagram(fig=None, ax=None, theta_deg=0.0, tsr=2.0, label=False, save=False, axis="on", full_view=True): """Plot full vector diagram.""" if ax is None: fig, ax = plt.subplots(figsize=(6, 6)) plot_blade_path(ax) if label: # Create dashed line for x-axis ax.plot((-0.5, 0.5), (0, 0), linestyle="dashed", color="k", zorder=1) plot_foil(ax, c=0.3, theta_deg=theta_deg) plot_radius(ax, theta_deg) plot_center(ax) plot_vectors(fig, ax, theta_deg, tsr, label=label) # Figure formatting if full_view: ax.set_xlim((-1, 1)) ax.set_ylim((-1, 1)) ax.set_aspect(1) ax.set_xticks([]) ax.set_yticks([]) ax.axis(axis) if save: fig.savefig("figures/cft-vectors.pdf") def plot_all(theta_deg=0.0, tsr=2.0, scale=1.0, full_view=True): """Create diagram and plots of kinematics in a single figure.""" fig = plt.figure(figsize=(7.5*scale, 4.75*scale)) # Draw vector diagram ax1 = plt.subplot2grid((3, 3), (0, 0), colspan=2, rowspan=3) plot_diagram(fig, ax1, theta_deg, tsr, axis="on", full_view=full_view) # Plot angle of attack ax2 = plt.subplot2grid((3, 3), (0, 2)) plot_alpha(ax2, tsr=tsr, theta=theta_deg, alpha_ss=18, color=light_blue) # Plot relative velocity magnitude ax3 = plt.subplot2grid((3, 3), (1, 2)) plot_rel_vel_mag(ax3, tsr=tsr, theta=theta_deg, color=tan) # Plot torque coefficient ax4 = plt.subplot2grid((3, 3), (2, 2)) plot_ctorque(ax4, tsr=tsr, theta=theta_deg, color=purple) fig.tight_layout() return fig def make_frame(t): """Make a frame for a movie.""" sec_per_rev = 5.0 deg = t/sec_per_rev*360 return mplfig_to_npimage(plot_all(deg, scale=2.0)) def make_animation(filetype="mp4", fps=30): """Make animation video.""" if not os.path.isdir("videos"): os.mkdir("videos") animation = VideoClip(make_frame, duration=5.0) if "mp4" in filetype.lower(): animation.write_videofile("videos/cft-animation.mp4", fps=fps) elif "gif" in filetype.lower(): animation.write_gif("videos/cft-animation.gif", fps=fps) if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description="Create cross-flow turbine \ vector diagrams.") parser.add_argument("create", choices=["figure", "diagram", "animation"], help="Either create a static figure or animation") parser.add_argument("--angle", type=float, default=60.0, help="Angle (degrees) to create figure") parser.add_argument("--show", action="store_true", default=False) parser.add_argument("--save", "-s", action="store_true", default=False, help="Save figure") args = parser.parse_args() if args.save: if not os.path.isdir("figures"): os.mkdir("figures") if args.create == "diagram": set_sns(font_scale=2) plot_diagram(theta_deg=args.angle, label=True, axis="off", save=args.save) elif args.create == "figure": set_sns() plot_alpha_relvel_all(save=args.save) elif args.create == "animation": set_sns(font_scale=2) from moviepy.editor import VideoClip from moviepy.video.io.bindings import mplfig_to_npimage make_animation() if args.show: plt.show()
mit
-164,932,162,652,562,180
35.296875
79
0.562096
false
bluecube/codecad
codecad/rendering/svg.py
1
1105
from . import polygon2d def render_svg(obj, filename): polygons = polygon2d.polygon(obj) with open(filename, "w") as fp: box = obj.bounding_box() box_size = box.size() fp.write('<svg xmlns="http://www.w3.org/2000/svg" ') fp.write('width="{}mm" height="{}mm" '.format(box_size.x, box_size.y)) fp.write( 'viewBox="{} {} {} {}">'.format(box.a.x, -box.b.y, box_size.x, box_size.y) ) fp.write('<style type="text/css">') fp.write("path{") fp.write("stroke:#000;") fp.write("stroke-width:1px;") fp.write("vector-effect:non-scaling-stroke;") fp.write("fill:#BBF23C{};") fp.write("}") fp.write("</style>") fp.write('<path d="') for polygon in polygons: it = reversed(polygon) x, y = next(it) fp.write("M{},{}".format(x, -y)) for x, y in it: fp.write("L{},{}".format(x, -y)) fp.write("L{},{}".format(polygon[-1][0], -polygon[-1][1])) fp.write('"/>') fp.write("</svg>")
gpl-3.0
-1,875,038,684,060,846,800
30.571429
86
0.483258
false
jmwenda/hypermap
hypermap/aggregator/tasks.py
1
5215
from __future__ import absolute_import from django.conf import settings from celery import shared_task @shared_task(bind=True) def check_all_services(self): from aggregator.models import Service service_to_processes = Service.objects.filter(active=True) total = service_to_processes.count() count = 0 for service in service_to_processes: # update state if not self.request.called_directly: self.update_state( state='PROGRESS', meta={'current': count, 'total': total} ) check_service.delay(service) count = count + 1 @shared_task(bind=True) def check_service(self, service): # total is determined (and updated) exactly after service.update_layers total = 100 def status_update(count): if not self.request.called_directly: self.update_state( state='PROGRESS', meta={'current': count, 'total': total} ) status_update(0) service.update_layers() # we count 1 for update_layers and 1 for service check for simplicity layer_to_process = service.layer_set.all() total = layer_to_process.count() + 2 status_update(1) service.check() status_update(2) count = 3 for layer in layer_to_process: # update state status_update(count) if not settings.SKIP_CELERY_TASK: check_layer.delay(layer) else: check_layer(layer) count = count + 1 @shared_task(bind=True) def check_layer(self, layer): print 'Checking layer %s' % layer.name success, message = layer.check() if not success: from aggregator.models import TaskError task_error = TaskError( task_name=self.name, args=layer.id, message=message ) task_error.save() @shared_task(name="clear_solr") def clear_solr(): print 'Clearing the solr core and indexes' from aggregator.solr import SolrHypermap solrobject = SolrHypermap() solrobject.clear_solr() @shared_task(bind=True) def remove_service_checks(self, service): service.check_set.all().delete() def status_update(count, total): if not self.request.called_directly: self.update_state( state='PROGRESS', meta={'current': count, 'total': total} ) layer_to_process = service.layer_set.all() count = 0 total = layer_to_process.count() for layer in layer_to_process: # update state status_update(count, total) layer.check_set.all().delete() count = count + 1 @shared_task(bind=True) def index_service(self, service): layer_to_process = service.layer_set.all() total = layer_to_process.count() def status_update(count): if not self.request.called_directly: self.update_state( state='PROGRESS', meta={'current': count, 'total': total} ) count = 0 for layer in layer_to_process: # update state status_update(count) index_layer.delay(layer) count = count + 1 @shared_task(bind=True) def index_layer(self, layer): from aggregator.solr import SolrHypermap print 'Syncing layer %s to solr' % layer.name solrobject = SolrHypermap() success, message = solrobject.layer_to_solr(layer) if not success: from aggregator.models import TaskError task_error = TaskError( task_name=self.name, args=layer.id, message=message ) task_error.save() @shared_task(bind=True) def index_all_layers(self): from aggregator.models import Layer clear_solr() layer_to_processes = Layer.objects.all() total = layer_to_processes.count() count = 0 for layer in Layer.objects.all(): # update state if not self.request.called_directly: self.update_state( state='PROGRESS', meta={'current': count, 'total': total} ) if not settings.SKIP_CELERY_TASK: index_layer.delay(layer) else: index_layer(layer) count = count + 1 @shared_task(bind=True) def update_endpoints(self, endpoint_list): from aggregator.utils import create_services_from_endpoint # for now we process the enpoint even if they were already processed endpoint_to_process = endpoint_list.endpoint_set.all() total = endpoint_to_process.count() count = 0 for endpoint in endpoint_to_process: # for now we process the enpoint even if they were already processed # if not endpoint.processed: print 'Processing endpoint %s' % endpoint.url imported, message = create_services_from_endpoint(endpoint.url) endpoint.imported = imported endpoint.message = message endpoint.processed = True endpoint.save() # update state if not self.request.called_directly: self.update_state( state='PROGRESS', meta={'current': count, 'total': total} ) count = count + 1 return True
mit
2,542,782,802,133,991,400
27.812155
76
0.60767
false
habnabit/pip
pip/exceptions.py
1
1260
"""Exceptions used throughout package""" from __future__ import absolute_import class PipError(Exception): """Base pip exception""" class InstallationError(PipError): """General exception during installation""" class UninstallationError(PipError): """General exception during uninstallation""" class DistributionNotFound(InstallationError): """Raised when a distribution cannot be found to satisfy a requirement""" class RequirementsFileParseError(PipError): """Raised when an invalid state is encountered during requirement file parsing.""" class BestVersionAlreadyInstalled(PipError): """Raised when the most up-to-date version of a package is already installed.""" class BadCommand(PipError): """Raised when virtualenv or a command is not found""" class CommandError(PipError): """Raised when there is an error in command-line arguments""" class PreviousBuildDirError(PipError): """Raised when there's a previous conflicting build directory""" class HashMismatch(InstallationError): """Distribution file hash values don't match.""" class InvalidWheelFilename(InstallationError): """Invalid wheel filename.""" class UnsupportedWheel(InstallationError): """Unsupported wheel."""
mit
-3,849,063,694,971,408,400
23.230769
77
0.743651
false
georgyberdyshev/ascend
pygtk/selftest.py
1
7384
# Sort of an experiment in thread programming. This script locates all # ASCEND files in the ASCENDLIBRARY path, then for any that contain 'self_test' # methods, it loads them and solves them and runs the self test. It's not # very good at checking the results just yet: probably there needs to be a # 'TestReporter' hook implemented that we can script with. # The initial file scan is done by filename, then followed with the regular # expression check for the METHOD_RE expression. Then the file is accessed with # ASCEND to check that the TEST_METHOD_NAME ('self_test') method really exists. # Only *then* is the model instantiated and the test run. # Our input will be the ASCENDLIBRARY environment variable # plus perhaps some commandline options relating to exclusions and inclusions. # This program needs to search files in the given directory and hunt for # A4C and A4L files that contain MODELs that contain 'METHOD self_test' # methods. We then create instances of any such models and run these # methods. The methods should include commands which we can trap via # some kind of callback, reporting failure or success. TEST_METHOD_NAME='self_test'; METHOD_RE = '\\s*METHOD\\s+'+TEST_METHOD_NAME+'\\s*;'; #------------------------------------------------------------------------------- import os, os.path, mmap, re import threading, heapq import time, platform import sys, dl # This sets the flags for dlopen used by python so that the symbols in the # ascend library are made available to libraries dlopened within ASCEND: sys.setdlopenflags(dl.RTLD_GLOBAL|dl.RTLD_NOW) import ascpy METHOD_REGEXP = re.compile(METHOD_RE); #------------------------------------------------------------------------------- # TASK CLASSES # These classes represent tasks that will be added to the queue. They # just need to have __init__ and run methods, they will be queued and # run by the thread manager. # Scan a directory, store any a4c or a4l files in the queue, # also store and subdirectories in the queue, they will be # recursed into. class AscendScanDirectory: global jobs, jobslock def __init__(self,path): self.path=path def run(self): print "Scanning",self.path,"..." for f in os.listdir(self.path): if f==".svn" or f=="CVS" or f=="westerberg": continue if os.path.isdir( os.path.join(self.path,f) ): jobslock.acquire() heapq.heappush( jobs,( 30, AscendScanDirectory(os.path.join(self.path,f)) ) ) jobslock.release() else: stem,ext = os.path.splitext(f) if ext==".a4c" or ext==".a4l": jobslock.acquire() heapq.heappush( jobs,( 10, AscendInspectFile(os.path.join(self.path,f)) ) ) jobslock.release() # Ascend file inspection task: load the file and check for 'METHOD self_test' # somewhere in the file. class AscendInspectFile: global jobs,jobslock; global inspectlock; def __init__(self,filepath): self.filepath = filepath; def run(self): inspectlock.acquire() try: (path,file) = os.path.split(self.filepath) #print "Checking",self.filepath,"for '"+TEST_METHOD_NAME+"' method" size = os.path.getsize(self.filepath) #print "File size is",size if size > 0: f = open(self.filepath,'r') s = mmap.mmap(f.fileno(),size,mmap.MAP_SHARED,mmap.PROT_READ) if METHOD_REGEXP.search(s): print "Found 'METHOD "+TEST_METHOD_NAME+"' in",file jobslock.acquire() heapq.heappush(jobs, (5, AscendTestFile(self.filepath)) ) jobslock.release() else: pass #print "File",file," is not self-testing" f.close() except IOError: print "IOError" inspectlock.release() # This 'task class' will use ASCEND to load the specified model # then some sub-task will read off the list of models and methods # and for each model that includes self_test, will run that # method and check the result. class AscendTestFile: global jobs, jobslock, ascendlock, library; def __init__(self,filepath): self.filepath = filepath def run(self): ascendlock.acquire() L = ascpy.Library() L.clear() L.load(self.filepath) for M in L.getModules(): print "Looking at module '"+M.getName()+"'" for t in L.getModuleTypes(M): #print "Looking at type '"+str(t.getName())+"'" for m in t.getMethods(): #print "Looking at method '"+str(m.getName())+"'" if m.getName()==TEST_METHOD_NAME: jobslock.acquire() heapq.heappush(jobs, (0, AscendTestModel( self.filepath,t.getName().toString())) ) jobslock.release() ascendlock.release() # Run self_test on a given model class AscendTestModel: global jobs, jobslock; def __init__(self,filepath,modelname): self.filepath=filepath self.modelname=modelname def run(self): ascendlock.acquire() try: L = ascpy.Library() L.clear() L.load(self.filepath) t = L.findType(self.modelname) testmethod = None for m in t.getMethods(): if m.getName()==TEST_METHOD_NAME: testmethod = m; if not testmethod: raise RuntimeError("No method '"+TEST_METHOD_NAME+"' found") s = t.getSimulation('testsim'); #s.check() s.build(); print "LAUNCHING SOLVER...\n\n" r = ascpy.SolverReporter() s.solve(ascpy.Solver('QRSlv'),r) s.run(testmethod) except RuntimeError, e: print e ascendlock.release() #--------------------------------------------------------------- # Thread runner class AscendModelLibraryTester: def __init__(self): global jobs, jobslock, inspectlock, ascendlock jobs = [] self.runlock = threading.Lock() jobslock = threading.Lock() inspectlock = threading.Lock() ascendlock = threading.Lock() self.runlock.acquire() self.path = os.environ['ASCENDLIBRARY'] self.threads = [] if platform.system()=='Windows': self.separator=";" else: self.separator=":" print 'Search path:',self.path for p in self.path.split(self.separator): j = AscendScanDirectory(p) jobslock.acquire() heapq.heappush(jobs, (20,j) ) jobslock.release() def run(self,num_threads): for i in range(num_threads): t = AscendModelLibraryTester.DoWork(i) # new thread self.threads.append(t) t.start() for i in range(num_threads): self.threads[i].join() self.runlock.release() class DoWork(threading.Thread): # thread number tn global jobs, jobslock, max_threads moreThreads = threading.Event() lock = threading.Lock() def __init__(self,tn): threading.Thread.__init__(self) self.threadnum = tn def run(self): while 1: jobslock.acquire() try: index, job = heapq.heappop(jobs) except IndexError: jobslock.release() #print "No jobs left for thread",self.threadnum return jobslock.release() job.run() # The following little extra bit means that this file can be automatically # be used as part of a larger unittesting process. So if there are # CUnit tests, and they can be executed somehow via Python, then we # test everything in ASCEND at once. import unittest class TestAscendModelLibrary(unittest.TestCase): def testAllModelsInPath(self): t = AscendModelLibraryTester(); t.run(5); # five threads print 'Waiting for ASCEND Model Library Tester to complete...' t.runlock.acquire() if __name__=='__main__': unittest.main()
gpl-2.0
8,621,907,191,316,145,000
28.291498
88
0.659399
false
yosukesuzuki/deep-link-app
project/api/tests/__init__.py
1
4097
# -*- coding: utf-8 -*- from kay.ext.testutils.gae_test_base import GAETestBase from api.shorturls import URLShorten from core.models import ShortURL, ShortURLUser class URLShortenTest(GAETestBase): CLEANUP_USED_KIND = True USE_PRODUCTION_STUBS = True def test_get(self): user = ShortURLUser(key_name='foobar') user.put() short_url = ShortURL(long_url='http://appu.pw/', user_created=str(user.key())) short_url.check_exist_or_create() short_url2 = ShortURL(long_url='https://console-dot-appupw.appspot.com/', user_created=str(user.key())) short_url2.check_exist_or_create() shorturls = URLShorten(method='GET', user_created=str(user.key()), values={}) shorturls.do() result = shorturls.result self.assertEquals(result['short_urls'][0]['long_url'], 'https://console-dot-appupw.appspot.com/') def test_post(self): user = ShortURLUser(key_name='foobar') user.put() short_url = URLShorten(method='POST', user_created=str(user.key()), values={'long_url': 'https://console-dot-appupw.appspot.com/'}) short_url.do() self.assertEquals(short_url.result['path'], '1') short_url2 = URLShorten(method='POST', user_created=str(user.key()), values={'long_url': 'https://console-dot-appupw.appspot.com/'}) short_url2.do() self.assertEquals(short_url2.result['code'], 409) def test_put(self): user = ShortURLUser(key_name='foobar') user.put() short_url = URLShorten(method='POST', user_created=str(user.key()), values={'long_url': 'https://console-dot-appupw.appspot.com/'}) short_url.do() short_url2 = URLShorten(method='PATCH', user_created=str(user.key()), values={'iphone_url': 'iphoneschema://foobar'}, path=short_url.result['path']) short_url2.do() self.assertEquals(short_url2.result['iphone_url'], 'iphoneschema://foobar') short_url3 = ShortURL.get_by_key_name(short_url.result['path']) self.assertEquals(short_url3.iphone_url, 'iphoneschema://foobar') def test_custom_name(self): user = ShortURLUser(key_name='foobar') user.put() short_url = URLShorten(method='POST', user_created=str(user.key()), values={'long_url': 'https://console-dot-appupw.appspot.com/'}) short_url.do() short_url2 = URLShorten(method='PATCH', user_created=str(user.key()), values={'custom_name': 'hoge'}, path=short_url.result['path']) short_url2.do() self.assertEquals(short_url2.result['path'], 'hoge') self.assertEquals(short_url2.code, 201) short_another_url = URLShorten(method='POST', user_created=str(user.key()), values={'long_url': 'https://console-dot-appupw.appspot.com/another'}) short_another_url.do() short_another_url2 = URLShorten(method='PUT', user_created=str(user.key()), values={'custom_name': 'hoge'}, path=short_another_url.result['path']) short_another_url2.do() self.assertEquals(short_another_url2.code, 409) self.assertEquals(short_another_url2.result['status'], 'error') def test_delete(self): user = ShortURLUser(key_name='foobar') user.put() short_url = URLShorten(method='POST', user_created=str(user.key()), values={'long_url': 'https://console-dot-appupw.appspot.com/'}) short_url.do() short_url2 = URLShorten(method='DELETE', user_created=str(user.key()), values={}, path=short_url.result['path']) short_url2.do() self.assertEquals(short_url2.code, 204) check_entity = ShortURL.get_by_key_name(short_url.result['path']) self.assertEquals(check_entity, None)
mit
-2,044,805,659,408,637,400
48.963415
120
0.580425
false
MasterFacilityList/mfl_api
common/tests/test_tasks.py
1
1419
from mock import patch from requests.exceptions import ConnectionError from django.test import TestCase from django.test.utils import override_settings from ..tasks import backup_db, refresh_material_views class S3BucketMock(object): pass class S3Mock(object): def __init__(self, *args, **kwargs): super(S3Mock, self).__init__(*args, **kwargs) def create_bucket(self, *args, **kwargs): return S3BucketMock() @override_settings( EMAIL_BACKEND='django.core.mail.backends.dummy.EmailBackend') class TestCommonCeleryTasks(TestCase): def test_db_backup_success(self): with patch('fabfile.S3Connection') as s3_mock: s3_mock.return_value = S3Mock() with patch('fabfile.local') as fabric_local_mock: fabric_local_mock.return_value = None fabric_local_mock.side_effect = None backup_db() def test_db_backup_failure(self): with patch('fabfile.S3Connection') as s3_mock: s3_mock.return_value = S3Mock() s3_mock.side_effect = ConnectionError with patch('fabfile.local') as fabric_local_mock: fabric_local_mock.return_value = None fabric_local_mock.side_effect = None backup_db() def test_refresh_material_view(self): # The test is just for coverage purpose only refresh_material_views()
mit
1,987,873,591,940,204,800
30.533333
65
0.642706
false
slzatz/esp8266
iot_generic.py
1
2478
''' The basic setup here is to have an Adafruit Feather HUZZAH ESP8266 plus a Featherwing OLED SSD1306 This is a non-specific script that writes the MQTT message to the OLED display. The MQTT broker is running on an EC2 instance. The esp8266+OLED that subscribes to the topic can be used to turn on and off a Data Loggers IoT Control Relay. Uses umqtt_client_official.py - my renamed version of the official simple mqtt script The mqtt topic is in a separate file called topic Pressing the AWS IoT switch triggers an AWS Lambda function that sends the MQTT message to the EC2 MQTT broker with topic "switch" and the jsonified info that the AWS IoT Button generates, which is: {"batteryVoltage": "1705mV", "serialNumber": "G030MD0371271BB1", "clickType": "SINGLE"} Note the clickType can be SINGLE, DOUBLE or LONG. I am using SINGLE to turn the relay swith on and DOUBLE to shut it off. ''' from time import sleep, time import json import network from config import ssid, pw, mqtt_aws_host from ssd1306_min import SSD1306 as SSD from umqtt_client_official import MQTTClient as umc from machine import Pin, I2C with open('mqtt_id', 'r') as f: mqtt_id = f.read().strip() with open('topic', 'r') as f: topic = f.read().strip() print("mqtt_id =", mqtt_id) print("host =", mqtt_aws_host) print("topic =", topic) i2c = I2C(scl=Pin(5), sda=Pin(4), freq=400000) d = SSD(i2c) d.init_display() d.draw_text(0, 0, "HELLO STEVE") d.display() pin15 = Pin(15, Pin.OUT) c = umc(mqtt_id, mqtt_aws_host, 1883) def run(): wlan = network.WLAN(network.STA_IF) wlan.active(True) if not wlan.isconnected(): print('connecting to network...') wlan.connect(ssid, pw) while not wlan.isconnected(): pass print('network config:', wlan.ifconfig()) def callback(topic, msg): zz = json.loads(msg.decode('utf-8')) msg = zz.get('message', '') t = zz.get('time', '') if msg == 'on': pin15.value(1) elif msg == 'off': pin15.value(0) else: pass d.clear() d.display() d.draw_text(0, 0, "topic: "+topic.decode('utf-8')) d.draw_text(0, 12, "time: "+t) d.draw_text(0, 24, "message: "+msg) d.display() r = c.connect() print("connect:",r) c.set_callback(callback) r = c.subscribe(topic) print("subscribe:",r) sleep(5) cur_time = time() while 1: c.check_msg() t = time() if t > cur_time + 30: c.ping() cur_time = t sleep(1) run()
mit
7,950,682,271,571,295,000
24.546392
111
0.657385
false
suraj-deshmukh/myCodes
crout.py
1
1270
#!/usr/bin/python2.7 import numpy as num import sys import csv def crout(A): n = A.shape if(n[0]!=n[1]): print("Give matrix is not square matrix. Code Terminated") sys.exit() n=n[0] A = num.mat(A,dtype=float) U = num.matrix(A*0) L = num.matrix(U) for i in range(0,n): L[i,0] = A[i,0] U[i,i] = 1 for j in range(1,n): U[0,j] = A[0,j]/L[0,0] for i in range(1,n): for j in range(1,i+1): L[i,j] = A[i,j] - L[i,range(0,j)] * U[range(0,j),j] for j in range(i,n): U[i,j] = (A[i,j] - L[i,range(0,i)] * U[range(0,i),j])/L[i,i] print "A=\n",A,"\n\nUpper=\n",U,"\n\nLower=\n",L,"\n\nA=Lower*Upper\n",L*U return try: FILE = open(sys.argv[1],'r') reader = csv.reader(FILE) mat = num.matrix([map(float,row) for row in reader]) crout(mat) except IndexError: print("No Data File is provided in command line argument") print "Run Following command for help\n",sys.argv[0],"--help" except IOError: print("-------------------HELP-------------------") print "This Code takes file in csv as command line argument having numbers stored in matrix fashion\n" print "for ex:\n","matrix.csv\n","\n1,2,3\n5,6,3\n5,3,6\n" print "Run the code using following command\n",sys.argv[0],"matrix.csv\nOr" print "python",sys.argv[0][2:],"matrix.csv"
mit
1,452,418,819,009,944,600
29.97561
103
0.60315
false
mrclro/kbxrec
project/people/migrations/0001_initial.py
1
4164
# -*- coding: utf-8 -*- # Generated by Django 1.11 on 2017-04-22 19:46 from __future__ import unicode_literals import django.core.validators from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ('events', '0001_initial'), ('locations', '0001_initial'), ] operations = [ migrations.CreateModel( name='Alias', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=64, unique=True)), ('slug', models.SlugField(unique=True)), ], options={ 'verbose_name_plural': 'aliases', 'ordering': ['name'], }, ), migrations.CreateModel( name='Fighter', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('sex', models.CharField(choices=[('M', 'Male'), ('F', 'Female')], default='M', max_length=2)), ('first_name', models.CharField(max_length=128)), ('last_name', models.CharField(max_length=128)), ('birth_name', models.CharField(blank=True, max_length=256)), ('birth_date', models.DateField(blank=True)), ('height', models.IntegerField(blank=True, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(250)])), ('reach', models.IntegerField(blank=True, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(300)])), ('stance', models.CharField(blank=True, choices=[('O', 'Orthodox'), ('S', 'Southpaw')], max_length=2)), ('facebook', models.URLField(blank=True)), ('instagram', models.URLField(blank=True)), ('twitter', models.URLField(blank=True)), ('website', models.URLField(blank=True)), ('slug', models.SlugField(unique=True)), ('alias', models.ManyToManyField(blank=True, to='people.Alias')), ('birth_place', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='birth_place', to='locations.City')), ('current_titles', models.ManyToManyField(blank=True, related_name='current_titles', to='events.Title')), ('nationality', models.ManyToManyField(blank=True, to='locations.Country')), ('past_titles', models.ManyToManyField(blank=True, related_name='past_titles', to='events.Title')), ('residence', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='residence', to='locations.City')), ('team', models.ManyToManyField(blank=True, to='locations.Team')), ], options={ 'ordering': ['slug'], }, ), migrations.CreateModel( name='Referee', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=128, unique=True)), ('slug', models.SlugField(unique=True)), ], options={ 'ordering': ['name'], }, ), migrations.CreateModel( name='Trainer', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=128, unique=True)), ('slug', models.SlugField(unique=True)), ], options={ 'ordering': ['name'], }, ), migrations.AddField( model_name='fighter', name='trainer', field=models.ManyToManyField(blank=True, to='people.Trainer'), ), ]
mit
2,426,154,979,955,103,000
46.318182
168
0.555716
false
classicsxdx/puzzlecoin
contrib/bitrpc/bitrpc.py
1
7844
from jsonrpc import ServiceProxy import sys import string # ===== BEGIN USER SETTINGS ===== # if you do not set these you will be prompted for a password for every command rpcuser = "" rpcpass = "" # ====== END USER SETTINGS ====== if rpcpass == "": access = ServiceProxy("http://127.0.0.1:11814") else: access = ServiceProxy("http://"+rpcuser+":"+rpcpass+"@127.0.0.1:11814") cmd = sys.argv[1].lower() if cmd == "backupwallet": try: path = raw_input("Enter destination path/filename: ") print access.backupwallet(path) except: print "\n---An error occurred---\n" elif cmd == "getaccount": try: addr = raw_input("Enter a puzzlecoin address: ") print access.getaccount(addr) except: print "\n---An error occurred---\n" elif cmd == "getaccountaddress": try: acct = raw_input("Enter an account name: ") print access.getaccountaddress(acct) except: print "\n---An error occurred---\n" elif cmd == "getaddressesbyaccount": try: acct = raw_input("Enter an account name: ") print access.getaddressesbyaccount(acct) except: print "\n---An error occurred---\n" elif cmd == "getbalance": try: acct = raw_input("Enter an account (optional): ") mc = raw_input("Minimum confirmations (optional): ") try: print access.getbalance(acct, mc) except: print access.getbalance() except: print "\n---An error occurred---\n" elif cmd == "getblockbycount": try: height = raw_input("Height: ") print access.getblockbycount(height) except: print "\n---An error occurred---\n" elif cmd == "getblockcount": try: print access.getblockcount() except: print "\n---An error occurred---\n" elif cmd == "getblocknumber": try: print access.getblocknumber() except: print "\n---An error occurred---\n" elif cmd == "getconnectioncount": try: print access.getconnectioncount() except: print "\n---An error occurred---\n" elif cmd == "getdifficulty": try: print access.getdifficulty() except: print "\n---An error occurred---\n" elif cmd == "getgenerate": try: print access.getgenerate() except: print "\n---An error occurred---\n" elif cmd == "gethashespersec": try: print access.gethashespersec() except: print "\n---An error occurred---\n" elif cmd == "getinfo": try: print access.getinfo() except: print "\n---An error occurred---\n" elif cmd == "getnewaddress": try: acct = raw_input("Enter an account name: ") try: print access.getnewaddress(acct) except: print access.getnewaddress() except: print "\n---An error occurred---\n" elif cmd == "getreceivedbyaccount": try: acct = raw_input("Enter an account (optional): ") mc = raw_input("Minimum confirmations (optional): ") try: print access.getreceivedbyaccount(acct, mc) except: print access.getreceivedbyaccount() except: print "\n---An error occurred---\n" elif cmd == "getreceivedbyaddress": try: addr = raw_input("Enter a puzzlecoin address (optional): ") mc = raw_input("Minimum confirmations (optional): ") try: print access.getreceivedbyaddress(addr, mc) except: print access.getreceivedbyaddress() except: print "\n---An error occurred---\n" elif cmd == "gettransaction": try: txid = raw_input("Enter a transaction ID: ") print access.gettransaction(txid) except: print "\n---An error occurred---\n" elif cmd == "getwork": try: data = raw_input("Data (optional): ") try: print access.gettransaction(data) except: print access.gettransaction() except: print "\n---An error occurred---\n" elif cmd == "help": try: cmd = raw_input("Command (optional): ") try: print access.help(cmd) except: print access.help() except: print "\n---An error occurred---\n" elif cmd == "listaccounts": try: mc = raw_input("Minimum confirmations (optional): ") try: print access.listaccounts(mc) except: print access.listaccounts() except: print "\n---An error occurred---\n" elif cmd == "listreceivedbyaccount": try: mc = raw_input("Minimum confirmations (optional): ") incemp = raw_input("Include empty? (true/false, optional): ") try: print access.listreceivedbyaccount(mc, incemp) except: print access.listreceivedbyaccount() except: print "\n---An error occurred---\n" elif cmd == "listreceivedbyaddress": try: mc = raw_input("Minimum confirmations (optional): ") incemp = raw_input("Include empty? (true/false, optional): ") try: print access.listreceivedbyaddress(mc, incemp) except: print access.listreceivedbyaddress() except: print "\n---An error occurred---\n" elif cmd == "listtransactions": try: acct = raw_input("Account (optional): ") count = raw_input("Number of transactions (optional): ") frm = raw_input("Skip (optional):") try: print access.listtransactions(acct, count, frm) except: print access.listtransactions() except: print "\n---An error occurred---\n" elif cmd == "move": try: frm = raw_input("From: ") to = raw_input("To: ") amt = raw_input("Amount:") mc = raw_input("Minimum confirmations (optional): ") comment = raw_input("Comment (optional): ") try: print access.move(frm, to, amt, mc, comment) except: print access.move(frm, to, amt) except: print "\n---An error occurred---\n" elif cmd == "sendfrom": try: frm = raw_input("From: ") to = raw_input("To: ") amt = raw_input("Amount:") mc = raw_input("Minimum confirmations (optional): ") comment = raw_input("Comment (optional): ") commentto = raw_input("Comment-to (optional): ") try: print access.sendfrom(frm, to, amt, mc, comment, commentto) except: print access.sendfrom(frm, to, amt) except: print "\n---An error occurred---\n" elif cmd == "sendmany": try: frm = raw_input("From: ") to = raw_input("To (in format address1:amount1,address2:amount2,...): ") mc = raw_input("Minimum confirmations (optional): ") comment = raw_input("Comment (optional): ") try: print access.sendmany(frm,to,mc,comment) except: print access.sendmany(frm,to) except: print "\n---An error occurred---\n" elif cmd == "sendtoaddress": try: to = raw_input("To (in format address1:amount1,address2:amount2,...): ") amt = raw_input("Amount:") comment = raw_input("Comment (optional): ") commentto = raw_input("Comment-to (optional): ") try: print access.sendtoaddress(to,amt,comment,commentto) except: print access.sendtoaddress(to,amt) except: print "\n---An error occurred---\n" elif cmd == "setaccount": try: addr = raw_input("Address: ") acct = raw_input("Account:") print access.setaccount(addr,acct) except: print "\n---An error occurred---\n" elif cmd == "setgenerate": try: gen= raw_input("Generate? (true/false): ") cpus = raw_input("Max processors/cores (-1 for unlimited, optional):") try: print access.setgenerate(gen, cpus) except: print access.setgenerate(gen) except: print "\n---An error occurred---\n" elif cmd == "settxfee": try: amt = raw_input("Amount:") print access.settxfee(amt) except: print "\n---An error occurred---\n" elif cmd == "stop": try: print access.stop() except: print "\n---An error occurred---\n" elif cmd == "validateaddress": try: addr = raw_input("Address: ") print access.validateaddress(addr) except: print "\n---An error occurred---\n" elif cmd == "walletpassphrase": try: pwd = raw_input("Enter wallet passphrase: ") access.walletpassphrase(pwd, 60) print "\n---Wallet unlocked---\n" except: print "\n---An error occurred---\n" elif cmd == "walletpassphrasechange": try: pwd = raw_input("Enter old wallet passphrase: ") pwd2 = raw_input("Enter new wallet passphrase: ") access.walletpassphrasechange(pwd, pwd2) print print "\n---Passphrase changed---\n" except: print print "\n---An error occurred---\n" print else: print "Command not found or not supported"
mit
3,194,827,894,278,671,400
23.209877
79
0.662035
false
midnightradio/gerrit
tools/pack_war.py
1
1943
#!/usr/bin/python # Copyright (C) 2013 The Android Open Source Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function from optparse import OptionParser from os import getcwd, chdir, makedirs, path, symlink from subprocess import check_call, check_output import sys opts = OptionParser() opts.add_option('-o', help='path to write WAR to') opts.add_option('--lib', action='append', help='target for WEB-INF/lib') opts.add_option('--pgmlib', action='append', help='target for WEB-INF/pgm-lib') opts.add_option('--tmp', help='temporary directory') args, ctx = opts.parse_args() war = args.tmp root = war[:war.index('buck-out')] jars = set() def link_jars(libs, directory): makedirs(directory) while not path.isfile('.buckconfig'): chdir('..') cp = check_output(['buck', 'audit', 'classpath'] + libs) for j in cp.strip().splitlines(): if j not in jars: jars.add(j) n = path.basename(j) if j.startswith('buck-out/gen/gerrit-'): n = j.split('/')[2] + '-' + n symlink(path.join(root, j), path.join(directory, n)) if args.lib: link_jars(args.lib, path.join(war, 'WEB-INF', 'lib')) if args.pgmlib: link_jars(args.pgmlib, path.join(war, 'WEB-INF', 'pgm-lib')) try: for s in ctx: check_call(['unzip', '-q', '-d', war, s]) check_call(['zip', '-9qr', args.o, '.'], cwd=war) except KeyboardInterrupt: print('Interrupted by user', file=sys.stderr) exit(1)
apache-2.0
-142,233,088,232,519,580
33.087719
79
0.685538
false
dabrahams/zeroinstall
zeroinstall/injector/trust.py
1
8662
""" Records who we trust to sign feeds. Trust is divided up into domains, so that it is possible to trust a key in some cases and not others. @var trust_db: Singleton trust database instance. """ # Copyright (C) 2009, Thomas Leonard # See the README file for details, or visit http://0install.net. from zeroinstall import _, SafeException import os from logging import info from zeroinstall.support import basedir, tasks from .namespaces import config_site, config_prog, XMLNS_TRUST KEY_INFO_TIMEOUT = 10 # Maximum time to wait for response from key-info-server class TrustDB(object): """A database of trusted keys. @ivar keys: maps trusted key fingerprints to a set of domains for which where it is trusted @type keys: {str: set(str)} @ivar watchers: callbacks invoked by L{notify} @see: L{trust_db} - the singleton instance of this class""" __slots__ = ['keys', 'watchers'] def __init__(self): self.keys = None self.watchers = [] def is_trusted(self, fingerprint, domain = None): self.ensure_uptodate() domains = self.keys.get(fingerprint, None) if not domains: return False # Unknown key if domain is None: return True # Deprecated return domain in domains or '*' in domains def get_trust_domains(self, fingerprint): """Return the set of domains in which this key is trusted. If the list includes '*' then the key is trusted everywhere. @since: 0.27 """ self.ensure_uptodate() return self.keys.get(fingerprint, set()) def get_keys_for_domain(self, domain): """Return the set of keys trusted for this domain. @since: 0.27""" self.ensure_uptodate() return set([fp for fp in self.keys if domain in self.keys[fp]]) def trust_key(self, fingerprint, domain = '*'): """Add key to the list of trusted fingerprints. @param fingerprint: base 16 fingerprint without any spaces @type fingerprint: str @param domain: domain in which key is to be trusted @type domain: str @note: call L{notify} after trusting one or more new keys""" if self.is_trusted(fingerprint, domain): return int(fingerprint, 16) # Ensure fingerprint is valid if fingerprint not in self.keys: self.keys[fingerprint] = set() #if domain == '*': # warn("Calling trust_key() without a domain is deprecated") self.keys[fingerprint].add(domain) self.save() def untrust_key(self, key, domain = '*'): self.ensure_uptodate() self.keys[key].remove(domain) if not self.keys[key]: # No more domains for this key del self.keys[key] self.save() def save(self): from xml.dom import minidom import tempfile doc = minidom.Document() root = doc.createElementNS(XMLNS_TRUST, 'trusted-keys') root.setAttribute('xmlns', XMLNS_TRUST) doc.appendChild(root) for fingerprint in self.keys: keyelem = doc.createElementNS(XMLNS_TRUST, 'key') root.appendChild(keyelem) keyelem.setAttribute('fingerprint', fingerprint) for domain in self.keys[fingerprint]: domainelem = doc.createElementNS(XMLNS_TRUST, 'domain') domainelem.setAttribute('value', domain) keyelem.appendChild(domainelem) d = basedir.save_config_path(config_site, config_prog) fd, tmpname = tempfile.mkstemp(dir = d, prefix = 'trust-') tmp = os.fdopen(fd, 'wb') doc.writexml(tmp, indent = "", addindent = " ", newl = "\n") tmp.close() os.rename(tmpname, os.path.join(d, 'trustdb.xml')) def notify(self): """Call all watcher callbacks. This should be called after trusting or untrusting one or more new keys. @since: 0.25""" for w in self.watchers: w() def ensure_uptodate(self): from xml.dom import minidom # This is a bit inefficient... (could cache things) self.keys = {} trust = basedir.load_first_config(config_site, config_prog, 'trustdb.xml') if trust: keys = minidom.parse(trust).documentElement for key in keys.getElementsByTagNameNS(XMLNS_TRUST, 'key'): domains = set() self.keys[key.getAttribute('fingerprint')] = domains for domain in key.getElementsByTagNameNS(XMLNS_TRUST, 'domain'): domains.add(domain.getAttribute('value')) else: # Convert old database to XML format trust = basedir.load_first_config(config_site, config_prog, 'trust') if trust: #print "Loading trust from", trust_db for key in open(trust).read().split('\n'): if key: self.keys[key] = set(['*']) else: # No trust database found. # Trust Thomas Leonard's key for 0install.net by default. # Avoids distracting confirmation box on first run when we check # for updates to the GUI. self.keys['92429807C9853C0744A68B9AAE07828059A53CC1'] = set(['0install.net']) def domain_from_url(url): """Extract the trust domain for a URL. @param url: the feed's URL @type url: str @return: the trust domain @rtype: str @since: 0.27 @raise SafeException: the URL can't be parsed""" import urlparse if os.path.isabs(url): raise SafeException(_("Can't get domain from a local path: '%s'") % url) domain = urlparse.urlparse(url)[1] if domain and domain != '*': return domain raise SafeException(_("Can't extract domain from URL '%s'") % url) trust_db = TrustDB() class TrustMgr(object): """A TrustMgr handles the process of deciding whether to trust new keys (contacting the key information server, prompting the user, accepting automatically, etc) @since: 0.53""" __slots__ = ['config', '_current_confirm'] def __init__(self, config): self.config = config self._current_confirm = None # (a lock to prevent asking the user multiple questions at once) @tasks.async def confirm_keys(self, pending): """We don't trust any of the signatures yet. Collect information about them and add the keys to the trusted list, possibly after confirming with the user (via config.handler). Updates the L{trust} database, and then calls L{trust.TrustDB.notify}. @since: 0.53 @arg pending: an object holding details of the updated feed @type pending: L{PendingFeed} @return: A blocker that triggers when the user has chosen, or None if already done. @rtype: None | L{Blocker}""" assert pending.sigs from zeroinstall.injector import gpg valid_sigs = [s for s in pending.sigs if isinstance(s, gpg.ValidSig)] if not valid_sigs: def format_sig(sig): msg = str(sig) if sig.messages: msg += "\nMessages from GPG:\n" + sig.messages return msg raise SafeException(_('No valid signatures found on "%(url)s". Signatures:%(signatures)s') % {'url': pending.url, 'signatures': ''.join(['\n- ' + format_sig(s) for s in pending.sigs])}) # Start downloading information about the keys... fetcher = self.config.fetcher kfs = {} for sig in valid_sigs: kfs[sig] = fetcher.fetch_key_info(sig.fingerprint) # Wait up to KEY_INFO_TIMEOUT seconds for key information to arrive. Avoids having the dialog # box update while the user is looking at it, and may allow it to be skipped completely in some # cases. timeout = tasks.TimeoutBlocker(KEY_INFO_TIMEOUT, "key info timeout") while True: key_info_blockers = [sig_info.blocker for sig_info in kfs.values() if sig_info.blocker is not None] if not key_info_blockers: break info("Waiting for response from key-info server: %s", key_info_blockers) yield [timeout] + key_info_blockers if timeout.happened: info("Timeout waiting for key info response") break # If we're already confirming something else, wait for that to finish... while self._current_confirm is not None: info("Waiting for previous key confirmations to finish") yield self._current_confirm domain = domain_from_url(pending.url) if self.config.auto_approve_keys: existing_feed = self.config.iface_cache.get_feed(pending.url) if not existing_feed: changes = False for sig, kf in kfs.iteritems(): for key_info in kf.info: if key_info.getAttribute("vote") == "good": info(_("Automatically approving key for new feed %s based on response from key info server"), pending.url) trust_db.trust_key(sig.fingerprint, domain) changes = True if changes: trust_db.notify() # Check whether we still need to confirm. The user may have # already approved one of the keys while dealing with another # feed, or we may have just auto-approved it. for sig in kfs: is_trusted = trust_db.is_trusted(sig.fingerprint, domain) if is_trusted: return # Take the lock and confirm this feed self._current_confirm = lock = tasks.Blocker('confirm key lock') try: done = self.config.handler.confirm_import_feed(pending, kfs) if done is not None: yield done tasks.check(done) finally: self._current_confirm = None lock.trigger()
lgpl-2.1
-5,275,137,996,461,932,000
32.187739
113
0.699607
false
jpurma/Kataja
plugins/TreesAreMemory/TreesAreMemoryParser.py
1
9791
from itertools import chain try: # When this is imported in Kataja context from plugins.TreesAreMemory.Constituent import Constituent from plugins.TreesAreMemory.Feature import Feature from kataja.syntax.SyntaxState import SyntaxState from plugins.TreesAreMemory.utils import simple_bracket_tree_parser except ImportError: # this is run as a standalone, from command line from Constituent import Constituent from Feature import Feature from utils import simple_bracket_tree_parser SyntaxState = None def load_lexicon(lines): d = {} for line in lines: line = line.strip() if line.startswith('#') or not line: continue key, definition = line.split('::') key = key.strip() definitions = definition.split() features = [Feature.from_string(df.strip()) for df in definitions] d[key] = Constituent(label=key, features=features, morphology=key) return d class TreesAreMemoryParser: def __init__(self, lexicon, forest=None): if isinstance(lexicon, dict): self.lexicon = lexicon else: self.lexicon = load_lexicon(lexicon) self.forest = forest def read_lexicon(self, entry_list, lexicon=None): self.lexicon = load_lexicon(entry_list) def parse(self, sentence): word_list = self.normalize_input(sentence.strip().split()) if not word_list: return None next_const = self.get_lexeme(word_list.pop(0)) return self._parse(next_const, None, word_list) def _parse(self, next_const, tree, word_list): # Check if new node has immediate justification for merging with tree. if tree: next_will_match = self.find_matching_features_for(next_const, tree, deep=False) else: next_will_match = False if next_will_match and False: # ...if match is found a pile of nodes in wrong order is put at the top of the tree msg = f'Merge because "{next_const}" fits into ongoing phrase. "{tree.label}", {next_will_match[0]}' \ f' & {next_will_match[2]}' tree = self.merge(next_const, tree) self.export_to_kataja(tree, msg) else: # otherwise we attempt to fix the structure before merging a new node into it. # fixing is done by raising nodes as long as we can. msg = f'Next constituent "{next_const}" doesn´t fit into ongoing phrase. "{tree and tree.label}".\n' match = self.find_matching_features_for(tree, tree) if match: # match is a tuple that has match_feat, match_node, feat msg += f'Internal Merge {match[1].label} to repair {tree.label}' tree = self.raise_matching_features(match, tree) self.export_to_kataja(tree, msg) return self._parse(next_const, tree, word_list) elif next_const: if tree: msg = f'Merge after raising "{next_const}" to "{tree.label}"' tree = self.merge(next_const, tree) else: msg = f'Starting new tree from "{next_const}"' tree = next_const self.export_to_kataja(tree, msg) if not (word_list or next_const): print('stop parsing because no words left and nothing to do') return tree next_const = self.get_lexeme(word_list.pop(0)) if word_list else None return self._parse(next_const, tree, word_list) @staticmethod def merge(left, right): merged = Constituent(label=left.label, left=left, right=right) merged.inherited_features = list(left.features) merged.head = left.head # print('made a naive merge: ', merged, merged.inherited_features) return merged @staticmethod def raise_matching_features(matching_features, tree): """ Feature match that is justifying this merge is also used to configure the merge. """ match_feat, const, feat = matching_features # print('matching features: ', match_feat, match_feat.leads, feat, feat.leads) # Label label = '-' if match_feat.leads and not feat.leads_other: inherited_from_const = const.inherited_features label = const.label else: inherited_from_const = [match_feat] if feat.leads and not match_feat.leads_other: inherited_from_tree = tree.inherited_features label = tree.label else: inherited_from_tree = [feat] if match_feat.leads and feat.leads: label = f'{const.label}+{tree.label}' # Checking if match_feat.positive: checked_features = [feat, match_feat] match_feat.checks = feat feat.checked_by = match_feat else: checked_features = [match_feat, feat] feat.checks = match_feat match_feat.checked_by = feat # Features are satisfied or not if match_feat.expires_in_use and feat.expires_other: match_feat.used = True if feat.expires_in_use and match_feat.expires_other: feat.used = True # Directionality, e.g. which goes left if match_feat.goes_left and feat.goes_left: const_is_left = not feat.positive elif not (match_feat.goes_left or feat.goes_left): const_is_left = not match_feat.positive else: const_is_left = match_feat.goes_left if const_is_left: left = const right = tree inherited_features = inherited_from_const + inherited_from_tree else: left = tree right = const inherited_features = inherited_from_tree + inherited_from_const merged = Constituent(label=label, left=left, right=right) # Is this phase? - should we stop search here if match_feat.phase_barrier or feat.phase_barrier: merged.phase_barrier = True # print('set up phase barrier at ', merged) # print(merged.word_edge) if match_feat.leads: merged.head = const.head else: merged.head = tree.head merged.checked_features = checked_features merged.inherited_features = inherited_features return merged def find_matching_features_for(self, const, tree, deep=True): if not (const and tree and tree.parts): return for feat in const.inherited_features: # print(f'looking for match w. {feat} (has_initiative: {feat.has_initiative})') if not feat.has_initiative: if feat.blocks and not feat.is_satisfied(): break continue if feat.is_satisfied(): continue passed = {const} match = self.find_match_for(feat, tree, passed, deep) if match: return match if feat.blocks and not feat.is_satisfied(): break def find_match_for(self, feat, const, passed, deep=True): passed.add(const) # print('at ', const.label, ' looking for match for ', feat) # print('inherited features here: ', const.inherited_features) for match_feat in const.inherited_features: if (match_feat.name == feat.name and not match_feat.is_satisfied() and feat.host is not match_feat.host and match_feat.positive != feat.positive): return match_feat, const, feat if match_feat.blocks and feat.host is not match_feat.host and not match_feat.is_satisfied(): print('breaking feature search because block: ', match_feat) return if deep and not const.phase_barrier: if const.left and const.left not in passed: match = self.find_match_for(feat, const.left, passed) if match: return match if const.right and const.right not in passed: return self.find_match_for(feat, const.right, passed) elif deep and const.phase_barrier: if const.left and const.left not in passed and const.left.right and const.left.right not in passed: return self.find_match_for(feat, const.left.right, passed, deep=False) def normalize_input(self, lst): return list(chain.from_iterable([reversed(self.get_morphology(w).split("#")) for w in lst])) def get_lexeme(self, key): if key in self.lexicon: return self.lexicon[key].copy() return Constituent(label=key) def get_morphology(self, key): return self.lexicon[key].morphology if key in self.lexicon else key @staticmethod def spellout(tree): def _spellout(tree, used): if tree in used: # don't visit any branch twice return [] used.add(tree) if tree.left: return _spellout(tree.left, used) + _spellout(tree.right, used) else: return [str(tree)] used = set() res = _spellout(tree, used) return ' '.join(res) def export_to_kataja(self, tree, message): if self.forest: print(message) syn_state = SyntaxState(tree_roots=[tree], msg=message) self.forest.add_step(syn_state)
gpl-3.0
-4,325,917,991,685,196,300
39.659574
114
0.575077
false
agoravoting/agora-tools
import_election_csv.py
1
18454
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # This file is part of agora-tools. # Copyright (C) 2014-2016 Agora Voting SL <agora@agoravoting.com> # agora-tools is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License. # agora-tools is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # You should have received a copy of the GNU Affero General Public License # along with agora-tools. If not, see <http://www.gnu.org/licenses/>. import json import csv import os import copy import operator import argparse from datetime import datetime, timedelta from utils.csvblocks import csv_to_blocks from utils.json_serialize import serialize def iget(d, key, default): ''' Ignore-case get This function makes a dict.get but there's no need for the key to be exactly the same as the key in the dict. Before the real **get** we look into the dict keys and find for this key ignoring the case so the key param can differ from the dict key in lower or upper cases. :param d: this is the dict to search in :param key: this is the key to search :param default: this is the default value to return if key isn't in the dict ''' real_key = key keyl = key.lower() for k in d.keys(): if k.lower() == keyl: real_key = k return d.get(real_key, default) BASE_ELECTION = { "id": -1, "title": "", "description": "", "layout": "", "presentation": { "share_text": [ { "network": "Twitter", "button_text": "", "social_message": "I have just voted in election __URL__, you can too! #nvotes" }, { "network": "Facebook", "button_text": "", "social_message": "__URL__" } ], "theme": 'default', "urls": [], "theme_css": "", "extra_options": {} }, "end_date": "", "start_date": "", "real": True, "questions": [] } BASE_QUESTION = { "description": "", "layout": 'simple', "max": 1, "min": 0, "num_winners": 1, "title": "", "randomize_answer_order": True, "tally_type": "plurality-at-large", "answer_total_votes_percentage": "over-total-votes", "extra_options": {}, "answers": [] } BASE_ANSWER = { "id": -1, "category": '', "details": "", "sort_order": -1, "urls": [], "text": "" } def parse_int(s): return int(s) def parse_list(s): return s.split(",") def parse_bool(s): return s == "TRUE" def parse_extra(q): val = dict( (key.replace("extra: ", ""), value) for key, value in q.items() if key.startswith("extra: ") ) if "success_screen__hide_download_ballot_ticket" in val: val["success_screen__hide_download_ballot_ticket"] = parse_bool( val["success_screen__hide_download_ballot_ticket"] ) if "shuffle_category_list" in val: val["shuffle_category_list"] = parse_list(val["shuffle_category_list"]) if "shuffle_categories" in val: val["shuffle_categories"] = parse_bool(val["shuffle_categories"]) if "shuffle_all_options" in val: val["shuffle_all_options"] = parse_bool(val["shuffle_all_options"]) if "select_all_category_clicks" in val: val["select_all_category_clicks"] = parse_int(val["select_all_category_clicks"]) return val def blocks_to_election(blocks, config, add_to_id=0): ''' Parses a list of blocks into an election ''' # convert blocks into a more convenient structure election = blocks[0]['values'] blocks.pop(0) questions = [] def get_answer_id(answer): return answer['Id'] def get_description(answer): return answer.get('Description', '').replace('\n', '<br/>') def get_url(key, value): if key in ['Gender', 'Tag', 'Support']: return "https://agoravoting.com/api/%s/%s" % (key.lower(), value) elif value.startswith('http://') or value.startswith('https://'): return value.strip() return key + value.strip() for question, options in zip(blocks[0::2], blocks[1::2]): q = question['values'] q['options'] = options['values'] data = { "description": q.get("Description", ''), "layout": q.get("Layout", 'simple'), "max": int(q["Maximum choices"]), "min": int(q["Minimum choices"]), "num_winners": int(q["Number of winners"]), "title": q["Title"], "randomize_answer_order": parse_bool(q.get("Randomize options order", False)), "tally_type": q.get("Voting system", "plurality-at-large"), "answer_total_votes_percentage": q["Totals"], "extra_options": parse_extra(q), "answers": [ { "id": int(get_answer_id(answer)), "category": answer.get("Category", ''), "details": get_description(answer), "sort_order": index, "urls": [ { 'title': url_key, 'url': get_url(url_key, url_val) } for url_key, url_val in answer.items() if url_key in ['Image URL', 'URL', 'Gender', 'Tag', 'Support'] and\ len(url_val.strip()) > 0 ], "text": answer['Text'], } for answer, index in zip(q['options'], range(len(q['options']))) if len("".join(answer.values()).strip()) > 0 ] } # check answers try: assert len(data['answers']) == len(set(list(map(operator.itemgetter('text'), data['answers'])))) except Exception as e: print("duplicated options in question '%s':" % q["Title"]) l = list(map(operator.itemgetter('text'), data['answers'])) print(set([x for x in l if l.count(x) > 1])) raise e data['max'] = min(data['max'], len(data['answers'])) data['num_winners'] = min(data['num_winners'], len(data['answers'])) for answ in data['answers']: try: assert answ['id'] == answ['sort_order'] except: print(answ) questions.append(data) def get_def(dictionary, key, default_value): if key not in dictionary or len(dictionary[key]) == 0: return default_value return dictionary[key] start_date = datetime.strptime("10/10/2015 10:10", "%d/%m/%Y %H:%M") if len(election["Start date time"]) > 0: try: start_date = datetime.strptime(election["Start date time"], "%d/%m/%Y %H:%M:%S") except: start_date = datetime.strptime(election["Start date time"], "%d/%m/%Y %H:%M") ret = { "id": int(election['Id']) + add_to_id, "authorities": config['authorities'], "director": config['director'], "title": election['Title'], "description": election['Description'], "layout": election.get('Layout', ''), "presentation": { "share_text": [ { "network": "Twitter", "button_text": "", "social_message": election.get('Share Text', '') }, { "network": "Facebook", "button_text": "", "social_message": "__URL__" } ], "theme": election.get('Theme', 'default'), "urls": [], "theme_css": "", "extra_options": parse_extra(election), "show_login_link_on_home": parse_bool(iget(election, 'login link on home', False)), }, "end_date": (start_date + timedelta(hours=int(get_def(election, 'Duration in hours', '24')))).isoformat() + ".001", "start_date": start_date.isoformat() + ".001", "questions": questions, "real": True } return ret def form_to_elections(path, separator, config, add_to_id): ''' Converts the google forms into election configurations ''' election_funcs = { "Título": lambda d: ["title", d], "Descripción": lambda d: ["description", d], "Comienzo": lambda d: ["start_date", datetime.strptime(d, "%m/%d/%Y %H:%M:%S").isoformat()+ ".001"], "Final": lambda d: ["end_date", datetime.strptime(d, "%m/%d/%Y %H:%M:%S").isoformat()+ ".001"], } census_key = "Censo" more_keys = { "¿Más preguntas?": lambda v: "No" not in v } auth_method = config['authapi']['event_config']['auth_method'] question_options_key = "Opciones" question_funcs = { "Título": lambda d: ["title", d], "Descripción": lambda d: ["description", d], "Número de ganadores": lambda d: ["num_winners", int(d)], "Número máximo de opciones": lambda d: ["max", int(d)], "Número mínimo de opciones": lambda d: ["min", int(d)], "Orden aleatorio": lambda d: ["randomize_answer_order", d == "Aleatorio"], "Resultados": lambda d: ["answer_total_votes_percentage", "over-total-votes" if d == "Sobre votos totales" else "over-total-valid-votes"] } elections = [] base_election = copy.deepcopy(BASE_ELECTION) base_election['director'] = config['director'] base_election['authorities'] = config['authorities'] with open(path, mode='r', encoding="utf-8", errors='strict') as f: fcsv = csv.reader(f, delimiter=',', quotechar='"') keys = fcsv.__next__() for values in fcsv: if len(values) == 0: continue question_num = -1 election = copy.deepcopy(base_election) election['id'] = add_to_id + len(elections) question = None for key, value, index in zip(keys, values, range(len(values))): if question_num == -1 and key not in more_keys.keys() and key in election_funcs.keys(): dest_key, dest_value = election_funcs[key](value) election[dest_key] = dest_value elif key == census_key: if auth_method == "sms": election['census'] = [{"tlf": item} for item in value.split("\n")] else: # email election['census'] = [{"email": item} for item in value.split("\n")] question_num += 1 question = copy.deepcopy(BASE_QUESTION) elif question_num >= 0 and key in question_funcs.keys(): dest_key, dest_value = question_funcs[key](value) question[dest_key] = dest_value elif question_num >= 0 and key == question_options_key: options = value.strip().split("\n") question['answers'] = [{ "id": opt_id, "category": '', "details": '', "sort_order": opt_id, "urls": [], "text": opt } for opt, opt_id in zip(options, range(len(options)))] elif question_num >= 0 and key in more_keys.keys(): question_num += 1 election['questions'].append(question) question = copy.deepcopy(BASE_QUESTION) if not more_keys[key](value): elections.append(election) break return elections if __name__ == '__main__': parser = argparse.ArgumentParser(description='Converts a CSV into the json to create an election.') parser.add_argument('-c', '--config-path', help='default config for the election') parser.add_argument('-i', '--input-path', help='input file or directory') parser.add_argument('-o', '--output-path', help='output file or directory') parser.add_argument('-A', '--admin-format', help='use create format for agora-admin instead of agora-elections', action="store_true") parser.add_argument('-a', '--add-to-id', type=int, help='add an int number to the id', default=0) parser.add_argument( '-f', '--format', choices=['csv-blocks', 'tsv-blocks', 'csv-google-forms'], default="csv-blocks", help='output file or directory') args = parser.parse_args() if not os.access(args.input_path, os.R_OK): print("can't read %s" % args.input_path) exit(2) if os.path.isdir(args.output_path) and not os.access(args.output_path, os.W_OK): print("can't write to %s" % args.output_path) exit(2) if not os.access(args.config_path, os.R_OK): print("can't read %s" % args.config_path) exit(2) config = None with open(args.config_path, mode='r', encoding="utf-8", errors='strict') as f: config = json.loads(f.read()) try: if args.format == "csv-blocks" or args.format == "tsv-blocks": separator = { "csv-blocks": ",", "tsv-blocks": "\t" }[args.format] extension = { "csv-blocks": ".csv", "tsv-blocks": ".tsv" }[args.format] if os.path.isdir(args.input_path): if not os.path.exists(args.output_path): os.makedirs(args.output_path) i = 0 files = sorted([name for name in os.listdir(args.input_path) if os.path.isfile(os.path.join(args.input_path, name)) and name.endswith(extension)]) for name in files: print("importing %s" % name) file_path = os.path.join(args.input_path, name) blocks = csv_to_blocks(path=file_path, separator=separator) election = blocks_to_election(blocks, config, args.add_to_id) if str(election['id']) + extension != name: print("WARNING: election id %i doesn't match filename %s" % (election['id'], name)) if not args.admin_format: output_path = os.path.join(args.output_path, str(election['id']) + ".config.json") else: output_path = os.path.join(args.output_path, str(i) + ".json") auth_config_path = os.path.join(args.output_path, str(i) + ".config.json") auth_config = config['authapi']['event_config'] with open(auth_config_path, mode='w', encoding="utf-8", errors='strict') as f: f.write(serialize(auth_config)) auth_census_path = os.path.join(args.output_path, str(i) + ".census.json") census_config = config['authapi'].get('census_data', []) with open(auth_census_path, mode='w', encoding="utf-8", errors='strict') as f: f.write(serialize(census_config)) with open(output_path, mode='w', encoding="utf-8", errors='strict') as f: f.write(serialize(election)) if config.get('agora_results_config', None) is not None: if not args.admin_format: results_conf_path = os.path.join(args.output_path, str(election['id']) + ".config.results.json") else: results_conf_path = os.path.join(args.output_path, str(i) + ".config.results.json") with open( results_conf_path, mode='w', encoding="utf-8", errors='strict') as f: f.write(serialize( dict( version="1.0", pipes=config['agora_results_config'] ) ) i += i + 1 else: blocks = csv_to_blocks(path=args.input_path, separator=separator) election = blocks_to_election(blocks, config, args.add_to_id) if str(election['id']) + extension != os.path.basename(args.input_path): print("WARNING: election id %i doesn't match filename %s" % (election['id'], os.path.basename(args.input_path))) with open(args.output_path, mode='w', encoding="utf-8", errors='strict') as f: f.write(serialize(election)) else: if not os.path.exists(args.output_path): os.makedirs(args.output_path) elif not os.path.isdir(args.output_path): print("output path must be a directory") exit(2) elections = form_to_elections(path=args.input_path, separator="\t", config=config, add_to_id=args.add_to_id) for election in elections: fpath = os.path.join(args.output_path, "%d.census.json" % election["id"]) with open(fpath, mode='w', encoding="utf-8", errors='strict') as f: f.write(serialize(election['census'])) del election['census'] fpath = os.path.join(args.output_path, "%d.json" % election["id"]) with open(fpath, mode='w', encoding="utf-8", errors='strict') as f: f.write(serialize(election)) fpath = os.path.join(args.output_path, "%d.config.json" % election["id"]) with open(fpath, mode='w', encoding="utf-8", errors='strict') as f: f.write(serialize(config['authapi']['event_config'])) except: print("malformed CSV") import traceback traceback.print_exc() exit(3)
agpl-3.0
-4,769,490,560,645,930,000
38.492505
146
0.51895
false
hxxft/lynx-native
Core/build/jni_generator.py
1
52358
#!/usr/bin/env python # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Extracts native methods from a Java file and generates the JNI bindings. If you change this, please run and update the tests.""" import collections import errno import optparse import os import re import string from string import Template import subprocess import sys import textwrap import zipfile class ParseError(Exception): """Exception thrown when we can't parse the input file.""" def __init__(self, description, *context_lines): Exception.__init__(self) self.description = description self.context_lines = context_lines def __str__(self): context = '\n'.join(self.context_lines) return '***\nERROR: %s\n\n%s\n***' % (self.description, context) class Param(object): """Describes a param for a method, either java or native.""" def __init__(self, **kwargs): self.datatype = kwargs['datatype'] self.name = kwargs['name'] class NativeMethod(object): """Describes a C/C++ method that is called by Java code""" def __init__(self, **kwargs): self.static = kwargs['static'] self.java_class_name = kwargs['java_class_name'] self.return_type = kwargs['return_type'] self.name = kwargs['name'] self.params = kwargs['params'] if self.params: assert type(self.params) is list assert type(self.params[0]) is Param if (self.params and self.params[0].datatype == kwargs.get('ptr_type', 'int') and self.params[0].name.startswith('native')): self.type = 'method' self.p0_type = self.params[0].name[len('native'):] if kwargs.get('native_class_name'): self.p0_type = kwargs['native_class_name'] else: self.type = 'function' self.method_id_var_name = kwargs.get('method_id_var_name', None) class CalledByNative(object): """Describes a java method exported to c/c++""" def __init__(self, **kwargs): self.system_class = kwargs['system_class'] self.unchecked = kwargs['unchecked'] self.static = kwargs['static'] self.java_class_name = kwargs['java_class_name'] self.return_type = kwargs['return_type'] self.name = kwargs['name'] self.params = kwargs['params'] self.method_id_var_name = kwargs.get('method_id_var_name', None) self.signature = kwargs.get('signature') self.is_constructor = kwargs.get('is_constructor', False) self.env_call = GetEnvCall(self.is_constructor, self.static, self.return_type) self.static_cast = GetStaticCastForReturnType(self.return_type) class ConstantField(object): def __init__(self, **kwargs): self.name = kwargs['name'] self.value = kwargs['value'] def JavaDataTypeToC(java_type): """Returns a C datatype for the given java type.""" java_pod_type_map = { 'int': 'jint', 'byte': 'jbyte', 'char': 'jchar', 'short': 'jshort', 'boolean': 'jboolean', 'long': 'jlong', 'double': 'jdouble', 'float': 'jfloat', } java_type_map = { 'void': 'void', 'String': 'jstring', 'java/lang/String': 'jstring', 'java/lang/Class': 'jclass', } if java_type in java_pod_type_map: return java_pod_type_map[java_type] elif java_type in java_type_map: return java_type_map[java_type] elif java_type.endswith('[]'): if java_type[:-2] in java_pod_type_map: return java_pod_type_map[java_type[:-2]] + 'Array' return 'jobjectArray' elif java_type.startswith('Class'): # Checking just the start of the name, rather than a direct comparison, # in order to handle generics. return 'jclass' else: return 'jobject' def JavaDataTypeToCForCalledByNativeParam(java_type): """Returns a C datatype to be when calling from native.""" if java_type == 'int': return 'int' else: return JavaDataTypeToC(java_type) def JavaReturnValueToC(java_type): """Returns a valid C return value for the given java type.""" java_pod_type_map = { 'int': '0', 'byte': '0', 'char': '0', 'short': '0', 'boolean': 'false', 'long': '0', 'double': '0', 'float': '0', 'void': '' } return java_pod_type_map.get(java_type, 'NULL') class JniParams(object): _imports = [] _fully_qualified_class = '' _package = '' _inner_classes = [] _remappings = [] _implicit_imports = [] @staticmethod def SetFullyQualifiedClass(fully_qualified_class): JniParams._fully_qualified_class = 'L' + fully_qualified_class JniParams._package = '/'.join(fully_qualified_class.split('/')[:-1]) @staticmethod def AddAdditionalImport(class_name): assert class_name.endswith('.class') raw_class_name = class_name[:-len('.class')] if '.' in raw_class_name: raise SyntaxError('%s cannot be used in @JNIAdditionalImport. ' 'Only import unqualified outer classes.' % class_name) new_import = 'L%s/%s' % (JniParams._package, raw_class_name) if new_import in JniParams._imports: raise SyntaxError('Do not use JNIAdditionalImport on an already ' 'imported class: %s' % (new_import.replace('/', '.'))) JniParams._imports += [new_import] @staticmethod def ExtractImportsAndInnerClasses(contents): if not JniParams._package: raise RuntimeError('SetFullyQualifiedClass must be called before ' 'ExtractImportsAndInnerClasses') contents = contents.replace('\n', '') re_import = re.compile(r'import.*?(?P<class>\S*?);') for match in re.finditer(re_import, contents): JniParams._imports += ['L' + match.group('class').replace('.', '/')] re_inner = re.compile(r'(class|interface)\s+?(?P<name>\w+?)\W') for match in re.finditer(re_inner, contents): inner = match.group('name') if not JniParams._fully_qualified_class.endswith(inner): JniParams._inner_classes += [JniParams._fully_qualified_class + '$' + inner] re_additional_imports = re.compile( r'@JNIAdditionalImport\(\s*{?(?P<class_names>.*?)}?\s*\)') for match in re.finditer(re_additional_imports, contents): for class_name in match.group('class_names').split(','): JniParams.AddAdditionalImport(class_name.strip()) @staticmethod def ParseJavaPSignature(signature_line): prefix = 'Signature: ' index = signature_line.find(prefix) if index == -1: prefix = 'descriptor: ' index = signature_line.index(prefix) return '"%s"' % signature_line[index + len(prefix):] @staticmethod def JavaToJni(param): """Converts a java param into a JNI signature type.""" pod_param_map = { 'int': 'I', 'boolean': 'Z', 'char': 'C', 'short': 'S', 'long': 'J', 'double': 'D', 'float': 'F', 'byte': 'B', 'void': 'V', } object_param_list = [ 'Ljava/lang/Boolean', 'Ljava/lang/Integer', 'Ljava/lang/Long', 'Ljava/lang/Object', 'Ljava/lang/String', 'Ljava/lang/Class', ] prefix = '' # Array? while param[-2:] == '[]': prefix += '[' param = param[:-2] # Generic? if '<' in param: param = param[:param.index('<')] if param in pod_param_map: return prefix + pod_param_map[param] if '/' in param: # Coming from javap, use the fully qualified param directly. return prefix + 'L' + JniParams.RemapClassName(param) + ';' for qualified_name in (object_param_list + [JniParams._fully_qualified_class] + JniParams._inner_classes): if (qualified_name.endswith('/' + param) or qualified_name.endswith('$' + param.replace('.', '$')) or qualified_name == 'L' + param): return prefix + JniParams.RemapClassName(qualified_name) + ';' # Is it from an import? (e.g. referecing Class from import pkg.Class; # note that referencing an inner class Inner from import pkg.Class.Inner # is not supported). for qualified_name in JniParams._imports: if qualified_name.endswith('/' + param): # Ensure it's not an inner class. components = qualified_name.split('/') if len(components) > 2 and components[-2][0].isupper(): raise SyntaxError('Inner class (%s) can not be imported ' 'and used by JNI (%s). Please import the outer ' 'class and use Outer.Inner instead.' % (qualified_name, param)) return prefix + JniParams.RemapClassName(qualified_name) + ';' # Is it an inner class from an outer class import? (e.g. referencing # Class.Inner from import pkg.Class). if '.' in param: components = param.split('.') outer = '/'.join(components[:-1]) inner = components[-1] for qualified_name in JniParams._imports: if qualified_name.endswith('/' + outer): return (prefix + JniParams.RemapClassName(qualified_name) + '$' + inner + ';') raise SyntaxError('Inner class (%s) can not be ' 'used directly by JNI. Please import the outer ' 'class, probably:\n' 'import %s.%s;' % (param, JniParams._package.replace('/', '.'), outer.replace('/', '.'))) JniParams._CheckImplicitImports(param) # Type not found, falling back to same package as this class. return (prefix + 'L' + JniParams.RemapClassName(JniParams._package + '/' + param) + ';') @staticmethod def _CheckImplicitImports(param): # Ensure implicit imports, such as java.lang.*, are not being treated # as being in the same package. if not JniParams._implicit_imports: # This file was generated from android.jar and lists # all classes that are implicitly imported. with file(os.path.join(os.path.dirname(sys.argv[0]), 'android_jar.classes'), 'r') as f: JniParams._implicit_imports = f.readlines() for implicit_import in JniParams._implicit_imports: implicit_import = implicit_import.strip().replace('.class', '') implicit_import = implicit_import.replace('/', '.') if implicit_import.endswith('.' + param): raise SyntaxError('Ambiguous class (%s) can not be used directly ' 'by JNI.\nPlease import it, probably:\n\n' 'import %s;' % (param, implicit_import)) @staticmethod def Signature(params, returns, wrap): """Returns the JNI signature for the given datatypes.""" items = ['('] items += [JniParams.JavaToJni(param.datatype) for param in params] items += [')'] items += [JniParams.JavaToJni(returns)] if wrap: return '\n' + '\n'.join(['"' + item + '"' for item in items]) else: return '"' + ''.join(items) + '"' @staticmethod def Parse(params): """Parses the params into a list of Param objects.""" if not params: return [] ret = [] for p in [p.strip() for p in params.split(',')]: items = p.split(' ') if 'final' in items: items.remove('final') param = Param( datatype=items[0], name=(items[1] if len(items) > 1 else 'p%s' % len(ret)), ) ret += [param] return ret @staticmethod def RemapClassName(class_name): """Remaps class names using the jarjar mapping table.""" for old, new in JniParams._remappings: if old.endswith('**') and old[:-2] in class_name: return class_name.replace(old[:-2], new, 1) if '*' not in old and class_name.endswith(old): return class_name.replace(old, new, 1) return class_name @staticmethod def SetJarJarMappings(mappings): """Parse jarjar mappings from a string.""" JniParams._remappings = [] for line in mappings.splitlines(): rule = line.split() if rule[0] != 'rule': continue _, src, dest = rule src = src.replace('.', '/') dest = dest.replace('.', '/') if src.endswith('**'): src_real_name = src[:-2] else: assert not '*' in src src_real_name = src if dest.endswith('@0'): JniParams._remappings.append((src, dest[:-2] + src_real_name)) elif dest.endswith('@1'): assert '**' in src JniParams._remappings.append((src, dest[:-2])) else: assert not '@' in dest JniParams._remappings.append((src, dest)) def ExtractJNINamespace(contents): re_jni_namespace = re.compile('.*?@JNINamespace\("(.*?)"\)') m = re.findall(re_jni_namespace, contents) if not m: return '' return m[0] def ExtractFullyQualifiedJavaClassName(java_file_name, contents): re_package = re.compile('.*?package (.*?);') matches = re.findall(re_package, contents) if not matches: raise SyntaxError('Unable to find "package" line in %s' % java_file_name) return (matches[0].replace('.', '/') + '/' + os.path.splitext(os.path.basename(java_file_name))[0]) def ExtractNatives(contents, ptr_type): """Returns a list of dict containing information about a native method.""" contents = contents.replace('\n', '') natives = [] re_native = re.compile(r'(@NativeClassQualifiedName' '\(\"(?P<native_class_name>.*?)\"\)\s+)?' '(@NativeCall(\(\"(?P<java_class_name>.*?)\"\))\s+)?' '(?P<qualifiers>\w+\s\w+|\w+|\s+)\s*native ' '(?P<return_type>\S*) ' '(?P<name>native\w+)\((?P<params>.*?)\);') for match in re.finditer(re_native, contents): native = NativeMethod( static='static' in match.group('qualifiers'), java_class_name=match.group('java_class_name'), native_class_name=match.group('native_class_name'), return_type=match.group('return_type'), name=match.group('name').replace('native', ''), params=JniParams.Parse(match.group('params')), ptr_type=ptr_type) natives += [native] return natives def GetStaticCastForReturnType(return_type): type_map = { 'String' : 'jstring', 'java/lang/String' : 'jstring', 'boolean[]': 'jbooleanArray', 'byte[]': 'jbyteArray', 'char[]': 'jcharArray', 'short[]': 'jshortArray', 'int[]': 'jintArray', 'long[]': 'jlongArray', 'float[]': 'jfloatArray', 'double[]': 'jdoubleArray' } ret = type_map.get(return_type, None) if ret: return ret if return_type.endswith('[]'): return 'jobjectArray' return None def GetEnvCall(is_constructor, is_static, return_type): """Maps the types availabe via env->Call__Method.""" if is_constructor: return 'NewObject' env_call_map = {'boolean': 'Boolean', 'byte': 'Byte', 'char': 'Char', 'short': 'Short', 'int': 'Int', 'long': 'Long', 'float': 'Float', 'void': 'Void', 'double': 'Double', 'Object': 'Object', } call = env_call_map.get(return_type, 'Object') if is_static: call = 'Static' + call return 'Call' + call + 'Method' def GetMangledParam(datatype): """Returns a mangled identifier for the datatype.""" if len(datatype) <= 2: return datatype.replace('[', 'A') ret = '' for i in range(1, len(datatype)): c = datatype[i] if c == '[': ret += 'A' elif c.isupper() or datatype[i - 1] in ['/', 'L']: ret += c.upper() return ret def GetMangledMethodName(name, params, return_type): """Returns a mangled method name for the given signature. The returned name can be used as a C identifier and will be unique for all valid overloads of the same method. Args: name: string. params: list of Param. return_type: string. Returns: A mangled name. """ mangled_items = [] for datatype in [return_type] + [x.datatype for x in params]: mangled_items += [GetMangledParam(JniParams.JavaToJni(datatype))] mangled_name = name + '_'.join(mangled_items) assert re.match(r'[0-9a-zA-Z_]+', mangled_name) return mangled_name def MangleCalledByNatives(called_by_natives): """Mangles all the overloads from the call_by_natives list.""" method_counts = collections.defaultdict( lambda: collections.defaultdict(lambda: 0)) for called_by_native in called_by_natives: java_class_name = called_by_native.java_class_name name = called_by_native.name method_counts[java_class_name][name] += 1 for called_by_native in called_by_natives: java_class_name = called_by_native.java_class_name method_name = called_by_native.name method_id_var_name = method_name if method_counts[java_class_name][method_name] > 1: method_id_var_name = GetMangledMethodName(method_name, called_by_native.params, called_by_native.return_type) called_by_native.method_id_var_name = method_id_var_name return called_by_natives # Regex to match the JNI return types that should be included in a # ScopedLocalJavaRef. RE_SCOPED_JNI_RETURN_TYPES = re.compile('jobject|jclass|jstring|.*Array') # Regex to match a string like "@CalledByNative public void foo(int bar)". RE_CALLED_BY_NATIVE = re.compile( '@CalledByNative(?P<Unchecked>(Unchecked)*?)(?:\("(?P<annotation>.*)"\))?' '\s+(?P<prefix>[\w ]*?)' '\s*(?P<return_type>\S+?)' '\s+(?P<name>\w+)' '\s*\((?P<params>[^\)]*)\)') def ExtractCalledByNatives(contents): """Parses all methods annotated with @CalledByNative. Args: contents: the contents of the java file. Returns: A list of dict with information about the annotated methods. TODO(bulach): return a CalledByNative object. Raises: ParseError: if unable to parse. """ called_by_natives = [] for match in re.finditer(RE_CALLED_BY_NATIVE, contents): called_by_natives += [CalledByNative( system_class=False, unchecked='Unchecked' in match.group('Unchecked'), static='static' in match.group('prefix'), java_class_name=match.group('annotation') or '', return_type=match.group('return_type'), name=match.group('name'), params=JniParams.Parse(match.group('params')))] # Check for any @CalledByNative occurrences that weren't matched. unmatched_lines = re.sub(RE_CALLED_BY_NATIVE, '', contents).split('\n') for line1, line2 in zip(unmatched_lines, unmatched_lines[1:]): if '@CalledByNative' in line1: raise ParseError('could not parse @CalledByNative method signature', line1, line2) return MangleCalledByNatives(called_by_natives) class JNIFromJavaP(object): """Uses 'javap' to parse a .class file and generate the JNI header file.""" def __init__(self, contents, options): self.contents = contents self.namespace = options.namespace for line in contents: class_name = re.match( '.*?(public).*?(class|interface) (?P<class_name>\S+?)( |\Z)', line) if class_name: self.fully_qualified_class = class_name.group('class_name') break self.fully_qualified_class = self.fully_qualified_class.replace('.', '/') # Java 7's javap includes type parameters in output, like HashSet<T>. Strip # away the <...> and use the raw class name that Java 6 would've given us. self.fully_qualified_class = self.fully_qualified_class.split('<', 1)[0] JniParams.SetFullyQualifiedClass(self.fully_qualified_class) self.java_class_name = self.fully_qualified_class.split('/')[-1] if not self.namespace: self.namespace = 'JNI_' + self.java_class_name re_method = re.compile('(?P<prefix>.*?)(?P<return_type>\S+?) (?P<name>\w+?)' '\((?P<params>.*?)\)') self.called_by_natives = [] for lineno, content in enumerate(contents[2:], 2): match = re.match(re_method, content) if not match: continue self.called_by_natives += [CalledByNative( system_class=True, unchecked=False, static='static' in match.group('prefix'), java_class_name='', return_type=match.group('return_type').replace('.', '/'), name=match.group('name'), params=JniParams.Parse(match.group('params').replace('.', '/')), signature=JniParams.ParseJavaPSignature(contents[lineno + 1]))] re_constructor = re.compile('(.*?)public ' + self.fully_qualified_class.replace('/', '.') + '\((?P<params>.*?)\)') for lineno, content in enumerate(contents[2:], 2): match = re.match(re_constructor, content) if not match: continue self.called_by_natives += [CalledByNative( system_class=True, unchecked=False, static=False, java_class_name='', return_type=self.fully_qualified_class, name='Constructor', params=JniParams.Parse(match.group('params').replace('.', '/')), signature=JniParams.ParseJavaPSignature(contents[lineno + 1]), is_constructor=True)] self.called_by_natives = MangleCalledByNatives(self.called_by_natives) self.constant_fields = [] re_constant_field = re.compile('.*?public static final int (?P<name>.*?);') re_constant_field_value = re.compile( '.*?Constant(Value| value): int (?P<value>(-*[0-9]+)?)') for lineno, content in enumerate(contents[2:], 2): match = re.match(re_constant_field, content) if not match: continue value = re.match(re_constant_field_value, contents[lineno + 2]) if not value: value = re.match(re_constant_field_value, contents[lineno + 3]) if value: self.constant_fields.append( ConstantField(name=match.group('name'), value=value.group('value'))) self.inl_header_file_generator = InlHeaderFileGenerator( self.namespace, self.fully_qualified_class, [], self.called_by_natives, self.constant_fields, options) def GetContent(self): return self.inl_header_file_generator.GetContent() @staticmethod def CreateFromClass(class_file, options): class_name = os.path.splitext(os.path.basename(class_file))[0] p = subprocess.Popen(args=[options.javap, '-c', '-verbose', '-s', class_name], cwd=os.path.dirname(class_file), stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, _ = p.communicate() jni_from_javap = JNIFromJavaP(stdout.split('\n'), options) return jni_from_javap class JNIFromJavaSource(object): """Uses the given java source file to generate the JNI header file.""" # Match single line comments, multiline comments, character literals, and # double-quoted strings. _comment_remover_regex = re.compile( r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"', re.DOTALL | re.MULTILINE) def __init__(self, contents, fully_qualified_class, options): contents = self._RemoveComments(contents) JniParams.SetFullyQualifiedClass(fully_qualified_class) JniParams.ExtractImportsAndInnerClasses(contents) jni_namespace = ExtractJNINamespace(contents) or options.namespace natives = ExtractNatives(contents, options.ptr_type) called_by_natives = ExtractCalledByNatives(contents) if len(natives) == 0 and len(called_by_natives) == 0: raise SyntaxError('Unable to find any JNI methods for %s.' % fully_qualified_class) inl_header_file_generator = InlHeaderFileGenerator( jni_namespace, fully_qualified_class, natives, called_by_natives, [], options) self.content = inl_header_file_generator.GetContent() @classmethod def _RemoveComments(cls, contents): # We need to support both inline and block comments, and we need to handle # strings that contain '//' or '/*'. # TODO(bulach): This is a bit hacky. It would be cleaner to use a real Java # parser. Maybe we could ditch JNIFromJavaSource and just always use # JNIFromJavaP; or maybe we could rewrite this script in Java and use APT. # http://code.google.com/p/chromium/issues/detail?id=138941 def replacer(match): # Replace matches that are comments with nothing; return literals/strings # unchanged. s = match.group(0) if s.startswith('/'): return '' else: return s return cls._comment_remover_regex.sub(replacer, contents) def GetContent(self): return self.content @staticmethod def CreateFromFile(java_file_name, options): contents = file(java_file_name).read() fully_qualified_class = ExtractFullyQualifiedJavaClassName(java_file_name, contents) return JNIFromJavaSource(contents, fully_qualified_class, options) class InlHeaderFileGenerator(object): """Generates an inline header file for JNI integration.""" def __init__(self, namespace, fully_qualified_class, natives, called_by_natives, constant_fields, options): self.namespace = namespace self.fully_qualified_class = fully_qualified_class self.class_name = self.fully_qualified_class.split('/')[-1] self.natives = natives self.called_by_natives = called_by_natives self.header_guard = fully_qualified_class.replace('/', '_') + '_JNI' self.constant_fields = constant_fields self.options = options self.init_native = self.ExtractInitNative(options) def ExtractInitNative(self, options): for native in self.natives: if options.jni_init_native_name == 'native' + native.name: self.natives.remove(native) return native return None def GetContent(self): """Returns the content of the JNI binding file.""" template = Template("""\ // This file is autogenerated by // ${SCRIPT_NAME} // For // ${FULLY_QUALIFIED_CLASS} #ifndef ${HEADER_GUARD} #define ${HEADER_GUARD} #include <jni.h> ${INCLUDES} #include "base/android/android_jni.h" // Step 1: forward declarations. namespace { $CLASS_PATH_DEFINITIONS $METHOD_ID_DEFINITIONS } // namespace $OPEN_NAMESPACE $FORWARD_DECLARATIONS $CONSTANT_FIELDS // Step 2: method stubs. $METHOD_STUBS // Step 3: RegisterNatives. $JNI_NATIVE_METHODS $REGISTER_NATIVES $CLOSE_NAMESPACE $JNI_REGISTER_NATIVES #endif // ${HEADER_GUARD} """) values = { 'SCRIPT_NAME': self.options.script_name, 'FULLY_QUALIFIED_CLASS': self.fully_qualified_class, 'CLASS_PATH_DEFINITIONS': self.GetClassPathDefinitionsString(), 'METHOD_ID_DEFINITIONS': self.GetMethodIDDefinitionsString(), 'FORWARD_DECLARATIONS': self.GetForwardDeclarationsString(), 'CONSTANT_FIELDS': self.GetConstantFieldsString(), 'METHOD_STUBS': self.GetMethodStubsString(), 'OPEN_NAMESPACE': self.GetOpenNamespaceString(), 'JNI_NATIVE_METHODS': self.GetJNINativeMethodsString(), 'REGISTER_NATIVES': self.GetRegisterNativesString(), 'CLOSE_NAMESPACE': self.GetCloseNamespaceString(), 'HEADER_GUARD': self.header_guard, 'INCLUDES': self.GetIncludesString(), 'JNI_REGISTER_NATIVES': self.GetJNIRegisterNativesString() } return WrapOutput(template.substitute(values)) def GetClassPathDefinitionsString(self): ret = [] ret += [self.GetClassPathDefinitions()] return '\n'.join(ret) def GetMethodIDDefinitionsString(self): """Returns the definition of method ids for the called by native methods.""" if not self.options.eager_called_by_natives: return '' template = Template("""\ jmethodID g_${JAVA_CLASS}_${METHOD_ID_VAR_NAME} = NULL;""") ret = [] for called_by_native in self.called_by_natives: values = { 'JAVA_CLASS': called_by_native.java_class_name or self.class_name, 'METHOD_ID_VAR_NAME': called_by_native.method_id_var_name, } ret += [template.substitute(values)] return '\n'.join(ret) def GetForwardDeclarationsString(self): ret = [] for native in self.natives: if native.type != 'method': ret += [self.GetForwardDeclaration(native)] if self.options.native_exports and ret: return '\nextern "C" {\n' + "\n".join(ret) + '\n}; // extern "C"' return '\n'.join(ret) def GetConstantFieldsString(self): if not self.constant_fields: return '' ret = ['enum Java_%s_constant_fields {' % self.class_name] for c in self.constant_fields: ret += [' %s = %s,' % (c.name, c.value)] ret += ['};'] return '\n'.join(ret) def GetMethodStubsString(self): """Returns the code corresponding to method stubs.""" ret = [] for native in self.natives: if native.type == 'method': ret += [self.GetNativeMethodStubString(native)] if self.options.eager_called_by_natives: ret += self.GetEagerCalledByNativeMethodStubs() else: ret += self.GetLazyCalledByNativeMethodStubs() if self.options.native_exports and ret: return '\nextern "C" {\n' + "\n".join(ret) + '\n}; // extern "C"' return '\n'.join(ret) def GetLazyCalledByNativeMethodStubs(self): return [self.GetLazyCalledByNativeMethodStub(called_by_native) for called_by_native in self.called_by_natives] def GetEagerCalledByNativeMethodStubs(self): ret = [] if self.called_by_natives: ret += ['namespace {'] for called_by_native in self.called_by_natives: ret += [self.GetEagerCalledByNativeMethodStub(called_by_native)] ret += ['} // namespace'] return ret def GetIncludesString(self): if not self.options.includes: return '' includes = self.options.includes.split(',') return '\n'.join('#include "%s"' % x for x in includes) def GetKMethodsString(self, clazz): ret = [] for native in self.natives: if (native.java_class_name == clazz or (not native.java_class_name and clazz == self.class_name)): ret += [self.GetKMethodArrayEntry(native)] return '\n'.join(ret) def SubstituteNativeMethods(self, template): """Substitutes JAVA_CLASS and KMETHODS in the provided template.""" ret = [] all_classes = self.GetUniqueClasses(self.natives) all_classes[self.class_name] = self.fully_qualified_class for clazz in all_classes: kmethods = self.GetKMethodsString(clazz) if kmethods: values = {'JAVA_CLASS': clazz, 'KMETHODS': kmethods} ret += [template.substitute(values)] if not ret: return '' return '\n' + '\n'.join(ret) def GetJNINativeMethodsString(self): """Returns the implementation of the array of native methods.""" if self.options.native_exports and not self.options.native_exports_optional: return '' template = Template("""\ static const JNINativeMethod kMethods${JAVA_CLASS}[] = { ${KMETHODS} }; """) return self.SubstituteNativeMethods(template) def GetRegisterCalledByNativesImplString(self): """Returns the code for registering the called by native methods.""" if not self.options.eager_called_by_natives: return '' template = Template("""\ g_${JAVA_CLASS}_${METHOD_ID_VAR_NAME} = ${GET_METHOD_ID_IMPL} if (g_${JAVA_CLASS}_${METHOD_ID_VAR_NAME} == NULL) { return false; } """) ret = [] for called_by_native in self.called_by_natives: values = { 'JAVA_CLASS': called_by_native.java_class_name or self.class_name, 'METHOD_ID_VAR_NAME': called_by_native.method_id_var_name, 'GET_METHOD_ID_IMPL': self.GetMethodIDImpl(called_by_native), } ret += [template.substitute(values)] return '\n'.join(ret) def GetRegisterNativesString(self): """Returns the code for RegisterNatives.""" template = Template("""\ ${REGISTER_NATIVES_SIGNATURE} { ${EARLY_EXIT} ${CLASSES} ${NATIVES} ${CALLED_BY_NATIVES} return true; } """) signature = 'static bool RegisterNativesImpl(JNIEnv* env' if self.init_native: signature += ', jclass clazz)' else: signature += ')' early_exit = '' if self.options.native_exports_optional: early_exit = """\ if (base::android::IsManualJniRegistrationDisabled()) return true; """ natives = self.GetRegisterNativesImplString() called_by_natives = self.GetRegisterCalledByNativesImplString() values = {'REGISTER_NATIVES_SIGNATURE': signature, 'EARLY_EXIT': early_exit, 'CLASSES': self.GetFindClasses(), 'NATIVES': natives, 'CALLED_BY_NATIVES': called_by_natives, } return template.substitute(values) def GetRegisterNativesImplString(self): """Returns the shared implementation for RegisterNatives.""" if self.options.native_exports and not self.options.native_exports_optional: return '' template = Template("""\ const int kMethods${JAVA_CLASS}Size = sizeof(kMethods${JAVA_CLASS})/sizeof(kMethods${JAVA_CLASS}[0]); if (env->RegisterNatives(${JAVA_CLASS}_clazz(env), kMethods${JAVA_CLASS}, kMethods${JAVA_CLASS}Size) < 0) { //jni_generator::HandleRegistrationError( // env, ${JAVA_CLASS}_clazz(env), __FILE__); return false; } """) return self.SubstituteNativeMethods(template) def GetJNIRegisterNativesString(self): """Returns the implementation for the JNI registration of native methods.""" if not self.init_native: return '' template = Template("""\ extern "C" JNIEXPORT bool JNICALL Java_${FULLY_QUALIFIED_CLASS}_${INIT_NATIVE_NAME}(JNIEnv* env, jclass clazz) { return ${NAMESPACE}RegisterNativesImpl(env, clazz); } """) if self.options.native_exports: java_name = JniParams.RemapClassName(self.fully_qualified_class) java_name = java_name.replace('_', '_1').replace('/', '_') else: java_name = self.fully_qualified_class.replace('/', '_') namespace = '' if self.namespace: namespace = self.namespace + '::' values = {'FULLY_QUALIFIED_CLASS': java_name, 'INIT_NATIVE_NAME': 'native' + self.init_native.name, 'NAMESPACE': namespace, 'REGISTER_NATIVES_IMPL': self.GetRegisterNativesImplString() } return template.substitute(values) def GetOpenNamespaceString(self): if self.namespace: all_namespaces = ['namespace %s {' % ns for ns in self.namespace.split('::')] return '\n'.join(all_namespaces) return '' def GetCloseNamespaceString(self): if self.namespace: all_namespaces = ['} // namespace %s' % ns for ns in self.namespace.split('::')] all_namespaces.reverse() return '\n'.join(all_namespaces) + '\n' return '' def GetJNIFirstParam(self, native): ret = [] if native.type == 'method': ret = ['jobject jcaller'] elif native.type == 'function': if native.static: ret = ['jclass jcaller'] else: ret = ['jobject jcaller'] return ret def GetParamsInDeclaration(self, native): """Returns the params for the stub declaration. Args: native: the native dictionary describing the method. Returns: A string containing the params. """ return ',\n '.join(self.GetJNIFirstParam(native) + [JavaDataTypeToC(param.datatype) + ' ' + param.name for param in native.params]) def GetCalledByNativeParamsInDeclaration(self, called_by_native): return ',\n '.join([ JavaDataTypeToCForCalledByNativeParam(param.datatype) + ' ' + param.name for param in called_by_native.params]) def GetStubName(self, native): """Return the name of the stub function for this native method. Args: native: the native dictionary describing the method. Returns: A string with the stub function name. For native exports mode this is the Java_* symbol name required by the JVM; otherwise it is just the name of the native method itself. """ if self.options.native_exports: template = Template("Java_${JAVA_NAME}_native${NAME}") java_name = JniParams.RemapClassName(self.fully_qualified_class) java_name = java_name.replace('_', '_1').replace('/', '_') if native.java_class_name: java_name += '_00024' + native.java_class_name values = {'NAME': native.name, 'JAVA_NAME': java_name} return template.substitute(values) else: return native.name def GetForwardDeclaration(self, native): template_str = """ static ${RETURN} ${NAME}(JNIEnv* env, ${PARAMS}); """ if self.options.native_exports: template_str += """ __attribute__((visibility("default"))) ${RETURN} ${STUB_NAME}(JNIEnv* env, ${PARAMS}) { return ${NAME}(${PARAMS_IN_CALL}); } """ template = Template(template_str) params_in_call = [] if not self.options.pure_native_methods: params_in_call = ['env', 'jcaller'] params_in_call = ', '.join(params_in_call + [p.name for p in native.params]) values = {'RETURN': JavaDataTypeToC(native.return_type), 'NAME': native.name, 'PARAMS': self.GetParamsInDeclaration(native), 'PARAMS_IN_CALL': params_in_call, 'STUB_NAME': self.GetStubName(native)} return template.substitute(values) def GetNativeMethodStubString(self, native): """Returns stubs for native methods.""" if self.options.native_exports: template_str = """\ __attribute__((visibility("default"))) ${RETURN} ${STUB_NAME}(JNIEnv* env, ${PARAMS_IN_DECLARATION}) {""" else: template_str = """\ static ${RETURN} ${STUB_NAME}(JNIEnv* env, ${PARAMS_IN_DECLARATION}) {""" template_str += """ ${P0_TYPE}* native = reinterpret_cast<${P0_TYPE}*>(${PARAM0_NAME}); CHECK_NATIVE_PTR(env, jcaller, native, "${NAME}"${OPTIONAL_ERROR_RETURN}); return native->${NAME}(${PARAMS_IN_CALL})${POST_CALL}; } """ template = Template(template_str) params = [] if not self.options.pure_native_methods: params = ['env', 'jcaller'] params_in_call = ', '.join(params + [p.name for p in native.params[1:]]) return_type = JavaDataTypeToC(native.return_type) optional_error_return = JavaReturnValueToC(native.return_type) if optional_error_return: optional_error_return = ', ' + optional_error_return post_call = '' if re.match(RE_SCOPED_JNI_RETURN_TYPES, return_type): post_call = '.Get()' values = { 'RETURN': return_type, 'OPTIONAL_ERROR_RETURN': optional_error_return, 'NAME': native.name, 'PARAMS_IN_DECLARATION': self.GetParamsInDeclaration(native), 'PARAM0_NAME': native.params[0].name, 'P0_TYPE': native.p0_type, 'PARAMS_IN_CALL': params_in_call, 'POST_CALL': post_call, 'STUB_NAME': self.GetStubName(native), } return template.substitute(values) def GetArgument(self, param): return ('int(' + param.name + ')' if param.datatype == 'int' else param.name) def GetArgumentsInCall(self, params): """Return a string of arguments to call from native into Java""" return [self.GetArgument(p) for p in params] def GetCalledByNativeValues(self, called_by_native): """Fills in necessary values for the CalledByNative methods.""" java_class = called_by_native.java_class_name or self.class_name if called_by_native.static or called_by_native.is_constructor: first_param_in_declaration = '' first_param_in_call = ('%s_clazz(env)' % java_class) else: first_param_in_declaration = ', jobject obj' first_param_in_call = 'obj' params_in_declaration = self.GetCalledByNativeParamsInDeclaration( called_by_native) if params_in_declaration: params_in_declaration = ', ' + params_in_declaration params_in_call = ', '.join(self.GetArgumentsInCall(called_by_native.params)) if params_in_call: params_in_call = ', ' + params_in_call pre_call = '' post_call = '' if called_by_native.static_cast: pre_call = 'static_cast<%s>(' % called_by_native.static_cast post_call = ')' check_exception = '' if not called_by_native.unchecked: check_exception = 'base::android::CheckException(env);' return_type = JavaDataTypeToC(called_by_native.return_type) optional_error_return = JavaReturnValueToC(called_by_native.return_type) if optional_error_return: optional_error_return = ', ' + optional_error_return return_declaration = '' return_clause = '' if return_type != 'void': pre_call = ' ' + pre_call return_declaration = return_type + ' ret =' if re.match(RE_SCOPED_JNI_RETURN_TYPES, return_type): return_type = 'base::android::ScopedLocalJavaRef<' + return_type + '>' return_clause = 'return ' + return_type + '(env, ret);' else: return_clause = 'return ret;' return { 'JAVA_CLASS': java_class, 'RETURN_TYPE': return_type, 'OPTIONAL_ERROR_RETURN': optional_error_return, 'RETURN_DECLARATION': return_declaration, 'RETURN_CLAUSE': return_clause, 'FIRST_PARAM_IN_DECLARATION': first_param_in_declaration, 'PARAMS_IN_DECLARATION': params_in_declaration, 'PRE_CALL': pre_call, 'POST_CALL': post_call, 'ENV_CALL': called_by_native.env_call, 'FIRST_PARAM_IN_CALL': first_param_in_call, 'PARAMS_IN_CALL': params_in_call, 'METHOD_ID_VAR_NAME': called_by_native.method_id_var_name, 'CHECK_EXCEPTION': check_exception, 'GET_METHOD_ID_IMPL': self.GetMethodIDImpl(called_by_native) } def GetEagerCalledByNativeMethodStub(self, called_by_native): """Returns the implementation of the called by native method.""" template = Template(""" static ${RETURN_TYPE} ${METHOD_ID_VAR_NAME}(\ JNIEnv* env${FIRST_PARAM_IN_DECLARATION}${PARAMS_IN_DECLARATION}) { ${RETURN_DECLARATION}${PRE_CALL}env->${ENV_CALL}(${FIRST_PARAM_IN_CALL}, g_${JAVA_CLASS}_${METHOD_ID_VAR_NAME}${PARAMS_IN_CALL})${POST_CALL}; ${RETURN_CLAUSE} }""") values = self.GetCalledByNativeValues(called_by_native) return template.substitute(values) def GetLazyCalledByNativeMethodStub(self, called_by_native): """Returns a string.""" function_signature_template = Template("""\ static ${RETURN_TYPE} Java_${JAVA_CLASS}_${METHOD_ID_VAR_NAME}(\ JNIEnv* env${FIRST_PARAM_IN_DECLARATION}${PARAMS_IN_DECLARATION})""") function_header_template = Template("""\ ${FUNCTION_SIGNATURE} {""") function_header_with_unused_template = Template("""\ ${FUNCTION_SIGNATURE} __attribute__ ((unused)); ${FUNCTION_SIGNATURE} {""") template = Template(""" static intptr_t g_${JAVA_CLASS}_${METHOD_ID_VAR_NAME} = 0; ${FUNCTION_HEADER} /* Must call RegisterNativesImpl() */ //CHECK_CLAZZ(env, ${FIRST_PARAM_IN_CALL}, // ${JAVA_CLASS}_clazz(env)${OPTIONAL_ERROR_RETURN}); jmethodID method_id = ${GET_METHOD_ID_IMPL} ${RETURN_DECLARATION} ${PRE_CALL}env->${ENV_CALL}(${FIRST_PARAM_IN_CALL}, method_id${PARAMS_IN_CALL})${POST_CALL}; ${CHECK_EXCEPTION} ${RETURN_CLAUSE} }""") values = self.GetCalledByNativeValues(called_by_native) values['FUNCTION_SIGNATURE'] = ( function_signature_template.substitute(values)) if called_by_native.system_class: values['FUNCTION_HEADER'] = ( function_header_with_unused_template.substitute(values)) else: values['FUNCTION_HEADER'] = function_header_template.substitute(values) return template.substitute(values) def GetKMethodArrayEntry(self, native): template = Template(' { "native${NAME}", ${JNI_SIGNATURE}, ' + 'reinterpret_cast<void*>(${STUB_NAME}) },') values = {'NAME': native.name, 'JNI_SIGNATURE': JniParams.Signature(native.params, native.return_type, True), 'STUB_NAME': self.GetStubName(native)} return template.substitute(values) def GetUniqueClasses(self, origin): ret = {self.class_name: self.fully_qualified_class} for entry in origin: class_name = self.class_name jni_class_path = self.fully_qualified_class if entry.java_class_name: class_name = entry.java_class_name jni_class_path = self.fully_qualified_class + '$' + class_name ret[class_name] = jni_class_path return ret def GetClassPathDefinitions(self): """Returns the ClassPath constants.""" ret = [] template = Template("""\ const char k${JAVA_CLASS}ClassPath[] = "${JNI_CLASS_PATH}";""") native_classes = self.GetUniqueClasses(self.natives) called_by_native_classes = self.GetUniqueClasses(self.called_by_natives) if self.options.native_exports: all_classes = called_by_native_classes else: all_classes = native_classes all_classes.update(called_by_native_classes) for clazz in all_classes: values = { 'JAVA_CLASS': clazz, 'JNI_CLASS_PATH': JniParams.RemapClassName(all_classes[clazz]), } ret += [template.substitute(values)] ret += '' class_getter_methods = [] if self.options.native_exports: template = Template("""\ // Leaking this jclass as we cannot use LazyInstance from some threads. base::subtle::AtomicWord g_${JAVA_CLASS}_clazz __attribute__((unused)) = 0; #define ${JAVA_CLASS}_clazz(env) \ base::android::LazyGetClass(env, k${JAVA_CLASS}ClassPath, \ &g_${JAVA_CLASS}_clazz)""") else: template = Template("""\ // Leaking this jclass as we cannot use LazyInstance from some threads. jclass g_${JAVA_CLASS}_clazz = NULL; #define ${JAVA_CLASS}_clazz(env) g_${JAVA_CLASS}_clazz""") for clazz in called_by_native_classes: values = { 'JAVA_CLASS': clazz, } ret += [template.substitute(values)] return '\n'.join(ret) def GetFindClasses(self): """Returns the imlementation of FindClass for all known classes.""" if self.init_native: if self.options.native_exports: template = Template("""\ base::subtle::Release_Store(&g_${JAVA_CLASS}_clazz, static_cast<base::subtle::AtomicWord>(env->NewWeakGlobalRef(clazz));""") else: template = Template("""\ g_${JAVA_CLASS}_clazz = static_cast<jclass>(env->NewWeakGlobalRef(clazz));""") else: if self.options.native_exports: return '\n' template = Template("""\ g_${JAVA_CLASS}_clazz = reinterpret_cast<jclass>(env->NewGlobalRef( base::android::GetClass(env, k${JAVA_CLASS}ClassPath).Get()));""") ret = [] for clazz in self.GetUniqueClasses(self.called_by_natives): values = {'JAVA_CLASS': clazz} ret += [template.substitute(values)] return '\n'.join(ret) def GetMethodIDImpl(self, called_by_native): """Returns the implementation of GetMethodID.""" if self.options.eager_called_by_natives: template = Template("""\ env->Get${STATIC_METHOD_PART}MethodID( ${JAVA_CLASS}_clazz(env), "${JNI_NAME}", ${JNI_SIGNATURE});""") else: template = Template("""\ base::android::GetMethod( env, ${JAVA_CLASS}_clazz(env), base::android::${STATIC}_METHOD, "${JNI_NAME}", ${JNI_SIGNATURE}, &g_${JAVA_CLASS}_${METHOD_ID_VAR_NAME}); """) jni_name = called_by_native.name jni_return_type = called_by_native.return_type if called_by_native.is_constructor: jni_name = '<init>' jni_return_type = 'void' if called_by_native.signature: signature = called_by_native.signature else: signature = JniParams.Signature(called_by_native.params, jni_return_type, True) values = { 'JAVA_CLASS': called_by_native.java_class_name or self.class_name, 'JNI_NAME': jni_name, 'METHOD_ID_VAR_NAME': called_by_native.method_id_var_name, 'STATIC': 'STATIC' if called_by_native.static else 'INSTANCE', 'STATIC_METHOD_PART': 'Static' if called_by_native.static else '', 'JNI_SIGNATURE': signature, } return template.substitute(values) def WrapOutput(output): ret = [] for line in output.splitlines(): # Do not wrap lines under 80 characters or preprocessor directives. if len(line) < 80 or line.lstrip()[:1] == '#': stripped = line.rstrip() if len(ret) == 0 or len(ret[-1]) or len(stripped): ret.append(stripped) else: first_line_indent = ' ' * (len(line) - len(line.lstrip())) subsequent_indent = first_line_indent + ' ' * 4 if line.startswith('//'): subsequent_indent = '//' + subsequent_indent wrapper = textwrap.TextWrapper(width=80, subsequent_indent=subsequent_indent, break_long_words=False) ret += [wrapped.rstrip() for wrapped in wrapper.wrap(line)] ret += [''] return '\n'.join(ret) def ExtractJarInputFile(jar_file, input_file, out_dir): """Extracts input file from jar and returns the filename. The input file is extracted to the same directory that the generated jni headers will be placed in. This is passed as an argument to script. Args: jar_file: the jar file containing the input files to extract. input_files: the list of files to extract from the jar file. out_dir: the name of the directories to extract to. Returns: the name of extracted input file. """ jar_file = zipfile.ZipFile(jar_file) out_dir = os.path.join(out_dir, os.path.dirname(input_file)) try: os.makedirs(out_dir) except OSError as e: if e.errno != errno.EEXIST: raise extracted_file_name = os.path.join(out_dir, os.path.basename(input_file)) with open(extracted_file_name, 'w') as outfile: outfile.write(jar_file.read(input_file)) return extracted_file_name def GenerateJNIHeader(input_file, output_file, options): try: if os.path.splitext(input_file)[1] == '.class': jni_from_javap = JNIFromJavaP.CreateFromClass(input_file, options) content = jni_from_javap.GetContent() else: jni_from_java_source = JNIFromJavaSource.CreateFromFile( input_file, options) content = jni_from_java_source.GetContent() except ParseError, e: print e sys.exit(1) if output_file: if not os.path.exists(os.path.dirname(os.path.abspath(output_file))): os.makedirs(os.path.dirname(os.path.abspath(output_file))) if options.optimize_generation and os.path.exists(output_file): with file(output_file, 'r') as f: existing_content = f.read() if existing_content == content: return with file(output_file, 'w') as f: f.write(content) else: print output def GetScriptName(): script_components = os.path.abspath(sys.argv[0]).split(os.path.sep) base_index = 0 for idx, value in enumerate(script_components): if value == 'base' or value == 'third_party': base_index = idx break return os.sep.join(script_components[base_index:]) class Opitions(object): def __init__(self): self.ptr_type = 'int' self.jni_init_native_name = '' self.script_name = GetScriptName() self.native_exports = '' self.eager_called_by_natives = '' self.pure_native_methods = '' self.native_exports_optional = '' self.includes = '' self.optimize_generation = '' self.namespace = '' if __name__ == '__main__': """sys.exit(main(sys.argv))""" if(len(sys.argv) == 3): options = Opitions() GenerateJNIHeader(sys.argv[1], sys.argv[2], options)
mit
1,661,477,151,239,666,400
34.739249
103
0.6196
false
gnumdk/eolie
eolie/art.py
1
6729
# Copyright (c) 2017 Cedric Bellegarde <cedric.bellegarde@adishatz.org> # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from gi.repository import Gdk, GdkPixbuf, Gio, GLib from hashlib import sha256 from time import time from urllib.parse import urlparse from eolie.define import EOLIE_CACHE_PATH from eolie.utils import remove_www class Art: """ Base art manager """ __CACHE_DELTA = 43200 def __init__(self): """ Init base art """ self.__use_cache = True self.__create_cache() def disable_cache(self): """ Disable cache """ self.__use_cache = False def save_artwork(self, uri, surface, suffix): """ Save artwork for uri with suffix @param uri as str @param surface as cairo.surface @param suffix as str """ try: filepath = self.get_path(uri, suffix) pixbuf = Gdk.pixbuf_get_from_surface(surface, 0, 0, surface.get_width(), surface.get_height()) pixbuf.savev(filepath, "png", [None], [None]) except Exception as e: print("Art::save_artwork():", e) def get_artwork(self, uri, suffix, scale_factor, width, heigth): """ @param uri as str @param suffix as str @param scale factor as int @param width as int @param height as int @return cairo.surface """ if uri is None: return None filepath = self.get_path(uri, suffix) try: if GLib.file_test(filepath, GLib.FileTest.IS_REGULAR): pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_scale(filepath, width, heigth, True) surface = Gdk.cairo_surface_create_from_pixbuf(pixbuf, scale_factor, None) return surface except: pass return None def get_icon_theme_artwork(self, uri, ephemeral): """ Get artwork from icon theme @param uri as str @param ephemeral as bool @return artwork as str/None """ if ephemeral: return "user-not-tracked-symbolic" elif uri == "populars://": return "emote-love-symbolic" elif uri == "about://": return "applications-internet" else: return None def get_favicon_path(self, uri): """ Return favicon cache path for uri @param uri as str/None @return str/None """ if uri is None: return None for favicon_type in ["favicon", "favicon_alt"]: favicon_path = self.get_path(uri, favicon_type) if GLib.file_test(favicon_path, GLib.FileTest.IS_REGULAR): return favicon_path return None def get_path(self, uri, suffix): """ Return cache image path @param uri as str @param suffix as str @return str/None """ if uri is None: return None parsed = urlparse(uri) cached_uri = remove_www(parsed.netloc) cached_path = parsed.path.rstrip("/") if cached_path: cached_uri += cached_path encoded = sha256(cached_uri.encode("utf-8")).hexdigest() filepath = "%s/%s_%s.png" % (EOLIE_CACHE_PATH, encoded, suffix) return filepath def exists(self, uri, suffix): """ Check if file exists and is cached @param uri as str @param suffix as str @return (exists as bool, cached as bool) """ f = Gio.File.new_for_path(self.get_path(uri, suffix)) exists = f.query_exists() if exists and self.__use_cache: info = f.query_info('time::modified', Gio.FileQueryInfoFlags.NONE, None) mtime = int(info.get_attribute_as_string('time::modified')) return (True, time() - mtime < self.__CACHE_DELTA) else: return (False, False) def vacuum(self): """ Remove artwork older than 1 month """ current_time = time() try: d = Gio.File.new_for_path(EOLIE_CACHE_PATH) children = d.enumerate_children("standard::name", Gio.FileQueryInfoFlags.NONE, None) for child in children: f = children.get_child(child) if child.get_file_type() == Gio.FileType.REGULAR: info = f.query_info("time::modified", Gio.FileQueryInfoFlags.NONE, None) mtime = info.get_attribute_uint64("time::modified") if current_time - mtime > 2592000: f.delete() except Exception as e: print("Art::vacuum():", e) @property def base_uri(self): """ Get cache base uri @return str """ return GLib.filename_to_uri(EOLIE_CACHE_PATH) ####################### # PROTECTED # ####################### ####################### # PRIVATE # ####################### def __create_cache(self): """ Create cache dir """ if not GLib.file_test(EOLIE_CACHE_PATH, GLib.FileTest.IS_DIR): try: GLib.mkdir_with_parents(EOLIE_CACHE_PATH, 0o0750) except Exception as e: print("Art::__create_cache():", e)
gpl-3.0
-2,079,920,480,432,212,500
33.331633
76
0.49413
false
djs55/planex
planex/debiancontrol.py
1
3069
""" Utility functions for generating Debian control files from RPM spec files. """ from planex.tree import Tree from planex import mappkgname import textwrap STANDARDS_VERSION = "3.9.3" def control_from_spec(spec): """ Create the contents of the debian/control file from spec. """ res = Tree() source_deb_from_spec(spec, res) for pkg in spec.packages: binary_deb_from_spec(pkg, res) return res def source_deb_from_spec(spec, tree): """ Create the source package stanza of the debian/source file from spec. """ res = "" res += "Source: %s\n" % \ mappkgname.map_package(spec.sourceHeader['name'])[0] res += "Priority: %s\n" % "optional" res += "Maintainer: %s\n" % "Euan Harris <euan.harris@citrix.com>" res += "Section: %s\n" % mappkgname.map_section(spec.sourceHeader['group']) res += "Standards-Version: %s\n" % STANDARDS_VERSION res += "Build-Depends:\n" build_depends = ["debhelper (>= 8)", "dh-ocaml (>= 0.9)", "ocaml-nox", "python"] for pkg, version in zip(spec.sourceHeader['requires'], spec.sourceHeader['requireVersion']): deps = mappkgname.map_package(pkg) for dep in deps: if version: dep += " (>= %s)" % version build_depends.append(dep) res += ",\n".join(set([" %s" % d for d in build_depends])) res += "\n\n" tree.append('debian/control', res) def binary_deb_from_spec(spec, tree): """ Create the binary package stanza of the debian/source file from spec. """ res = "" res += "Package: %s\n" % mappkgname.map_package_name(spec.header) if spec.header['arch'] in ["x86_64", "i686", "armhf", "armv7l"]: res += "Architecture: any\n" else: res += "Architecture: all\n" res += "Depends:\n" depends = ["${ocaml:Depends}", "${shlibs:Depends}", "${misc:Depends}"] for pkg, version in zip(spec.header['requires'], spec.header['requireVersion']): deps = mappkgname.map_package(pkg) for dep in deps: if version: dep += " (>= %s)" % version depends.append(dep) res += ",\n".join([" %s" % d for d in depends]) res += "\n" # XXX These lines should only be added for ocaml packages res += "Provides: ${ocaml:Provides}\n" res += "Recommends: ocaml-findlib\n" res += "Description: %s\n" % spec.header['summary'] res += format_description(spec.header['description']) res += "\n\n" tree.append('debian/control', res) def format_description(description): """ Format the package description to suit Debian constraints: correct line length; initial one space indent; blank lines must be replaced by dots """ paragraphs = "".join(description).split("\n\n") wrapped = ["\n".join(textwrap.wrap(p, initial_indent=" ", subsequent_indent=" ")) for p in paragraphs] return "\n .\n".join(wrapped)
lgpl-2.1
-5,583,763,980,021,387,000
30
79
0.576735
false
uvemas/ViTables
vitables/csv/export_csv.py
1
11064
# Copyright (C) 2008-2019 Vicent Mas. All rights reserved # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # Author: Vicent Mas - vmas@vitables.org """Plugin that provides export of `tables.Leaf` nodes into `CSV` files. When exporting tables, a header with the field names can be inserted. In general, tables/arrays with Ndimensional fields are not exported because they are not written by np.savetxt() in a way compliant with the CSV format in which each line of the file is a data record. Neither numpy scalar arrays are exported. """ import logging import os import numpy import tables from qtpy import QtCore from qtpy import QtGui from qtpy import QtWidgets import vitables.csv.csvutils as csvutils import vitables.utils __docformat__ = 'restructuredtext' translate = QtWidgets.QApplication.translate log = logging.getLogger(__name__) class ExportToCSV(QtCore.QObject): """Provides `CSV` export capabilities for arrays. """ def __init__(self): """The class constructor. """ super(ExportToCSV, self).__init__() # Get a reference to the application instance self.vtapp = vitables.utils.getVTApp() if self.vtapp is None: return self.vtgui = vitables.utils.getGui() # Add an entry under the Dataset menu self.icons_dictionary = vitables.utils.getIcons() self.addEntry() # Connect signals to slots self.vtgui.dataset_menu.aboutToShow.connect(self.updateDatasetMenu) self.vtgui.leaf_node_cm.aboutToShow.connect(self.updateDatasetMenu) def addEntry(self): """Add the `Export to CSV..`. entry to `Dataset` menu. """ self.export_csv_action = QtWidgets.QAction( translate('ExportToCSV', "E&xport to CSV...", "Save dataset as CSV"), self, shortcut=QtGui.QKeySequence.UnknownKey, triggered=self.export, icon=vitables.utils.getIcons()['document-export'], statusTip=translate( 'ExportToCSV', "Save the dataset as a plain text with CSV format", "Status bar text for the Dataset -> Export to CSV... action")) self.export_csv_action.setObjectName('export_csv') # Add the action to the Dataset menu vitables.utils.addToMenu(self.vtgui.dataset_menu, self.export_csv_action) # Add the action to the leaf context menu vitables.utils.addToLeafContextMenu(self.export_csv_action) def updateDatasetMenu(self): """Update the `export` QAction when the Dataset menu is pulled down. This method is a slot. See class ctor for details. """ enabled = True current = self.vtgui.dbs_tree_view.currentIndex() if current: leaf = self.vtgui.dbs_tree_model.nodeFromIndex(current) if leaf.node_kind in ('group', 'root group'): enabled = False self.export_csv_action.setEnabled(enabled) def getExportInfo(self, is_table): """Get info about the file where dataset will be stored. The info is retrieved from the FileSelector dialog. The returned info is the filepath and whether or not a header must be added. :Parameter is_table: True if the exported dataset is a tables.Table instance """ # Call the file selector (and, if needed, customise it) file_selector = vitables.utils.getFileSelector( self.vtgui, translate('ExportToCSV', 'Exporting dataset to CSV format', 'Caption of the Export to CSV dialog'), dfilter=translate('ExportToCSV', """CSV Files (*.csv);;""" """All Files (*)""", 'Filter for the Export to CSV dialog'), settings={'accept_mode': QtWidgets.QFileDialog.AcceptSave, 'file_mode': QtWidgets.QFileDialog.AnyFile, 'history': self.vtapp.file_selector_history, 'label': translate('ExportToCSV', 'Export', 'Accept button text for QFileDialog')} ) # Customise the file selector dialog for exporting to CSV files if is_table: # We can get the layout of Qt dialogs but not of native dialogs file_selector.setOption(QtWidgets.QFileDialog.DontUseNativeDialog, True) fs_layout = file_selector.layout() header_label = QtWidgets.QLabel('Add header:', file_selector) header_cb = QtWidgets.QCheckBox(file_selector) header_cb.setChecked(True) fs_layout.addWidget(header_label, 4, 0) fs_layout.addWidget(header_cb, 4, 1) # Execute the dialog try: if file_selector.exec_(): # OK clicked filepath = file_selector.selectedFiles()[0] # Make sure filepath contains no backslashes filepath = QtCore.QDir.fromNativeSeparators(filepath) filepath = csvutils.checkFilenameExtension(filepath) # Update the working directory working_dir = file_selector.directory().canonicalPath() else: # Cancel clicked filepath = working_dir = '' finally: add_header = False if is_table: add_header = header_cb.isChecked() del file_selector # Process the returned values if not filepath: # The user has canceled the dialog return # Update the history of the file selector widget self.vtapp.updateFSHistory(working_dir) # Check the returned path if os.path.exists(filepath): log.error(translate( 'ExportToCSV', 'Export failed because destination file already exists.', 'A file creation error')) return if os.path.isdir(filepath): log.error(translate( 'ExportToCSV', 'Export failed because destination container is a directory.', 'A file creation error')) return return filepath, add_header # def _try_exporting_dataframe(self, leaf): # ## FIXME: Hack to export to csv. # # # from ...vttables import df_model # # leaf_model = df_model.try_opening_as_dataframe(leaf) # if not leaf_model: # return # # export_info = self.getExportInfo(is_table=True) # if export_info is None: # return # # leaf_model.to_csv(*export_info) # return True def export(self): """Export a given dataset to a `CSV` file. This method is a slot connected to the `export` QAction. See the :meth:`addEntry` method for details. """ # The PyTables node tied to the current leaf of the databases tree current = self.vtgui.dbs_tree_view.currentIndex() leaf = self.vtgui.dbs_tree_model.nodeFromIndex(current).node # Empty datasets aren't saved as CSV files if leaf.nrows == 0: log.info(translate( 'ExportToCSV', 'Empty dataset. Nothing to export.')) return # Scalar arrays aren't saved as CSV files if leaf.shape == (): log.info(translate( 'ExportToCSV', 'Scalar array. Nothing to export.')) return # Datasets with more than 3 dimensions aren't saved as CSV files # (see module's docstring) if len(leaf.shape) > 3: log.info(translate( 'ExportToCSV', 'The selected node has more than ' '3 dimensions. I can\'t export it to CSV format.')) return # Variable lenght arrays aren't saved as CSV files if isinstance(leaf, tables.VLArray): log.info(translate( 'ExportToCSV', 'The selected node is a VLArray. ' 'I can\'t export it to CSV format.')) return # Tables with Ndimensional fields aren't saved as CSV files is_table = isinstance(leaf, tables.Table) if is_table: if self._try_exporting_dataframe(leaf): return first_row = leaf[0] for item in first_row: if item.shape != (): log.info(translate( 'ExportToCSV', 'Some fields aren\'t scalars. ' 'I can\'t export the table to CSV format.')) return # Get the required info for exporting the dataset export_info = self.getExportInfo(is_table) if export_info is None: return else: filepath, add_header = export_info try: QtWidgets.qApp.setOverrideCursor(QtCore.Qt.WaitCursor) with open(filepath, 'ab') as out_handler: if add_header: from functools import reduce header = reduce(lambda x, y: '{0}, {1}'.format(x, y), leaf.colnames) # To be consistent with numpy.savetxt use \n line breaks out_handler.write(bytearray(header + '\n', 'UTF-8')) chunk_size = 10000 nrows = leaf.nrows if chunk_size > nrows: chunk_size = nrows # Behavior of np.divide in Python 2 and Python 3 is different so # we must explicitly ensure we get an integer nchunks = numpy.floor_divide(nrows, chunk_size) for i in numpy.arange(0, nchunks + 1): QtWidgets.qApp.processEvents() cstart = chunk_size * i if cstart >= nrows: break cstop = cstart + chunk_size if cstop > nrows: cstop = nrows numpy.savetxt(out_handler, leaf.read(cstart, cstop, 1), fmt='%s', delimiter=',') except OSError: vitables.utils.formatExceptionInfo() finally: QtWidgets.qApp.restoreOverrideCursor()
gpl-3.0
3,280,593,357,724,348,400
36.505085
81
0.574657
false
flockchat/pyflock
flockos/models/image.py
1
1421
# coding: utf-8 from pprint import pformat from ..utils import to_dict class Image(object): def __init__(self, src=None, width=None, height=None): self._src = src self._width = width self._height = height @property def src(self): return self._src @src.setter def src(self, src): if src is None: raise ValueError("Invalid value for `src`, must not be `None`") self._src = src @property def width(self): return self._width @width.setter def width(self, width): self._width = width @property def height(self): return self._height @height.setter def height(self, height): self._height = height def to_dict(self): """ Returns the model properties as a dict """ return to_dict(self.__dict__) def to_str(self): """ Returns the string representation of the model """ return pformat(self.to_dict()) def __repr__(self): """ For `print` and `pprint` """ return self.to_str() def __eq__(self, other): """ Returns true if both objects are equal """ return self.__dict__ == other.__dict__ def __ne__(self, other): """ Returns true if both objects are not equal """ return not self == other
apache-2.0
-9,079,618,273,298,583,000
18.202703
75
0.525686
false
Denon/syncPlaylist
WYtoQQ.py
1
5106
import urlparse import re from time import sleep from base import BaseSpider from settings import * from urllib2 import quote from api.wy import get_playlist_detail from utils import _print, retry, RetryException class WYtoQQ(BaseSpider): @retry(retry_times=3, notice_message="login failed and retry") def prepare(self): self.browser.get("https://y.qq.com") self.browser.set_window_size(1920, 1080) self.wait.until(lambda browser: browser.find_element_by_xpath("/html/body/div[1]/div/div[2]/span/a[2]")) self.browser.find_element_by_xpath("/html/body/div[1]/div/div[2]/span/a[2]").click() self.wait.until(lambda browser: browser.find_element_by_id("frame_tips")) self.browser.switch_to.frame("frame_tips") self.wait.until(lambda browser: browser.find_element_by_id("switcher_plogin")) sleep(0.5) self.browser.find_element_by_id("switcher_plogin").click() user_input = self.browser.find_element_by_id("u") user_input.send_keys(self.config.account) pwd_input = self.browser.find_element_by_id("p") pwd_input.send_keys(self.config.password) submit = self.browser.find_element_by_id("login_button") submit.click() sleep(1) self.browser.switch_to.default_content() self.browser.refresh() self.wait.until(lambda browser: browser.find_element_by_class_name("popup_user")) user_info = self.browser.find_element_by_class_name("popup_user") user_info.find_element_by_css_selector("*") print("login sucess") def get_source_playlist(self): pattern = re.compile(r'^.*id=(\d*)') url = self.config.source_playlist_url match = pattern.search(url) if match: playlist_id = match.groups()[0] else: raise Exception("can not find id, please check wy url!!!") detail = get_playlist_detail(playlist_id) song_list = detail['playlist']['tracks'] song_details = list() for song in song_list: ar_name = list() song_name = song['name'] for ar in song['ar']: ar_name.append(ar['name']) album = '' song_details.append((song_name, ' '.join(ar_name), album)) # response = requests.get(self.config.source_playlist_url.replace('#', 'm'), headers=headers) # html = response.content # soup = BeautifulSoup(html, "html.parser") # details = soup.select("span[class='detail']") # song_details = list() # for detail in details: # song_text = detail.text # song_detail = song_text.strip('\n').split('\n\n') # # song = song_detail[0] # singer = song_detail[1].split('- ', 1)[0] # # don't use album yet # album = '' # song_details.append((song, singer.strip('\n'), album)) print("get 163 playlist success") self.source_playlist = song_details def get_target_playlist(self): # self.browser.get(self.config.target_playlist_url) # self.wait.until(lambda browser: browser.find_element_by_class_name("playlist__list")) # playlist = self.browser.find_element_by_class_name("playlist__list") # playlist_items = playlist.find_elements_by_class_name('playlist__item') # # for item in playlist_items: # title = item.find_element_by_class_name('playlist__title').text # item_id = item.get_attribute('data-dirid') # if title == self.config.qq_playlist_name: # self.target_playlist_tag = item_id # return # else: # raise Exception("can not find qq playlist:{}, please check".format(self.config.qq_playlist_name)) self.target_playlist_tag = self.config.target_playlist_url.split('dirid=')[-1] return def sync_song(self): for song_detail in self.source_playlist: song = song_detail[0] singer = song_detail[1] search_word = u"{} {}".format(song, singer) url_sw = quote(search_word.encode('utf8')) self.browser.get(qq_search_url.format(url_sw)) self.wait.until(lambda browser: browser.find_element_by_class_name("songlist__list")) sleep(0.5) @retry(retry_times=3) def _add(browser): browser.execute_script("document.getElementsByClassName('songlist__list')[0].firstElementChild.getElementsByClassName('list_menu__add')[0].click()") sleep(0.5) browser.find_element_by_css_selector("a[data-dirid='{}']".format(self.target_playlist_tag)).click() _print(u"song:{} success".format(song)) try: _add(self.browser) except RetryException: _print(u"song:{}, sync error".format(song)) self.failed_list.append(search_word) else: self.success_list.append(search_word) if __name__ == '__main__': WYtoQQ().run()
mit
7,395,140,910,663,368,000
43.017241
164
0.591069
false
tanium/pytan
EXAMPLES/PYTAN_API/invalid_export_basetype_csv_bad_sort_type.py
1
3820
#!/usr/bin/env python """ Export a BaseType from getting objects using a bad header_sort """ # import the basic python packages we need import os import sys import tempfile import pprint import traceback # disable python from generating a .pyc file sys.dont_write_bytecode = True # change me to the path of pytan if this script is not running from EXAMPLES/PYTAN_API pytan_loc = "~/gh/pytan" pytan_static_path = os.path.join(os.path.expanduser(pytan_loc), 'lib') # Determine our script name, script dir my_file = os.path.abspath(sys.argv[0]) my_dir = os.path.dirname(my_file) # try to automatically determine the pytan lib directory by assuming it is in '../../lib/' parent_dir = os.path.dirname(my_dir) pytan_root_dir = os.path.dirname(parent_dir) lib_dir = os.path.join(pytan_root_dir, 'lib') # add pytan_loc and lib_dir to the PYTHONPATH variable path_adds = [lib_dir, pytan_static_path] [sys.path.append(aa) for aa in path_adds if aa not in sys.path] # import pytan import pytan # create a dictionary of arguments for the pytan handler handler_args = {} # establish our connection info for the Tanium Server handler_args['username'] = "Administrator" handler_args['password'] = "Tanium2015!" handler_args['host'] = "10.0.1.240" handler_args['port'] = "443" # optional handler_args['trusted_certs'] = "certs" # optional, level 0 is no output except warnings/errors # level 1 through 12 are more and more verbose handler_args['loglevel'] = 1 # optional, use a debug format for the logging output (uses two lines per log entry) handler_args['debugformat'] = False # optional, this saves all response objects to handler.session.ALL_REQUESTS_RESPONSES # very useful for capturing the full exchange of XML requests and responses handler_args['record_all_requests'] = True # instantiate a handler using all of the arguments in the handler_args dictionary print "...CALLING: pytan.handler() with args: {}".format(handler_args) handler = pytan.Handler(**handler_args) # print out the handler string print "...OUTPUT: handler string: {}".format(handler) # setup the arguments for the handler() class kwargs = {} kwargs["export_format"] = u'csv' kwargs["header_sort"] = u'bad' # setup the arguments for handler.get() get_kwargs = { 'name': [ "Computer Name", "IP Route Details", "IP Address", 'Folder Contents', ], 'objtype': 'sensor', } # get the objects that will provide the basetype that we want to use print "...CALLING: handler.get() with args: {}".format(get_kwargs) response = handler.get(**get_kwargs) # store the basetype object as the obj we want to export kwargs['obj'] = response # export the object to a string print "...CALLING: handler.export_obj() with args {}".format(kwargs) try: handler.export_obj(**kwargs) except Exception as e: print "...EXCEPTION: {}".format(e) # this should throw an exception of type: pytan.exceptions.HandlerError # uncomment to see full exception # traceback.print_exc(file=sys.stdout) '''STDOUT from running this: ...CALLING: pytan.handler() with args: {'username': 'Administrator', 'record_all_requests': True, 'loglevel': 1, 'debugformat': False, 'host': '10.0.1.240', 'password': 'Tanium2015!', 'port': '443'} ...OUTPUT: handler string: PyTan v2.1.4 Handler for Session to 10.0.1.240:443, Authenticated: True, Platform Version: 6.5.314.4301 ...CALLING: handler.get() with args: {'objtype': 'sensor', 'name': ['Computer Name', 'IP Route Details', 'IP Address', 'Folder Contents']} ...CALLING: handler.export_obj() with args {'export_format': u'csv', 'obj': <taniumpy.object_types.sensor_list.SensorList object at 0x11b1ec750>, 'header_sort': u'bad'} ...EXCEPTION: 'header_sort' must be one of [<type 'bool'>, <type 'list'>, <type 'tuple'>], you supplied <type 'unicode'>! ''' '''STDERR from running this: '''
mit
-7,229,140,398,507,273,000
35.730769
198
0.712565
false
indautgrp/frappe
frappe/api.py
1
4534
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # MIT License. See license.txt from __future__ import unicode_literals import json import frappe import frappe.handler import frappe.client from frappe.utils.response import build_response from frappe import _ from urlparse import urlparse from urllib import urlencode def handle(): """ Handler for `/api` methods ### Examples: `/api/method/{methodname}` will call a whitelisted method `/api/resource/{doctype}` will query a table examples: - `?fields=["name", "owner"]` - `?filters=[["Task", "name", "like", "%005"]]` - `?limit_start=0` - `?limit_page_length=20` `/api/resource/{doctype}/{name}` will point to a resource `GET` will return doclist `POST` will insert `PUT` will update `DELETE` will delete `/api/resource/{doctype}/{name}?run_method={method}` will run a whitelisted controller method """ validate_oauth() parts = frappe.request.path[1:].split("/",3) call = doctype = name = None if len(parts) > 1: call = parts[1] if len(parts) > 2: doctype = parts[2] if len(parts) > 3: name = parts[3] if call=="method": frappe.local.form_dict.cmd = doctype return frappe.handler.handle() elif call=="resource": if "run_method" in frappe.local.form_dict: method = frappe.local.form_dict.pop("run_method") doc = frappe.get_doc(doctype, name) doc.is_whitelisted(method) if frappe.local.request.method=="GET": if not doc.has_permission("read"): frappe.throw(_("Not permitted"), frappe.PermissionError) frappe.local.response.update({"data": doc.run_method(method, **frappe.local.form_dict)}) if frappe.local.request.method=="POST": if not doc.has_permission("write"): frappe.throw(_("Not permitted"), frappe.PermissionError) frappe.local.response.update({"data": doc.run_method(method, **frappe.local.form_dict)}) frappe.db.commit() else: if name: if frappe.local.request.method=="GET": doc = frappe.get_doc(doctype, name) if not doc.has_permission("read"): raise frappe.PermissionError frappe.local.response.update({"data": doc}) if frappe.local.request.method=="PUT": data = json.loads(frappe.local.form_dict.data) doc = frappe.get_doc(doctype, name) if "flags" in data: del data["flags"] # Not checking permissions here because it's checked in doc.save doc.update(data) frappe.local.response.update({ "data": doc.save().as_dict() }) frappe.db.commit() if frappe.local.request.method=="DELETE": # Not checking permissions here because it's checked in delete_doc frappe.delete_doc(doctype, name) frappe.local.response.http_status_code = 202 frappe.local.response.message = "ok" frappe.db.commit() elif doctype: if frappe.local.request.method=="GET": if frappe.local.form_dict.get('fields'): frappe.local.form_dict['fields'] = json.loads(frappe.local.form_dict['fields']) frappe.local.form_dict.setdefault('limit_page_length', 20) frappe.local.response.update({ "data": frappe.call(frappe.client.get_list, doctype, **frappe.local.form_dict)}) if frappe.local.request.method=="POST": data = json.loads(frappe.local.form_dict.data) data.update({ "doctype": doctype }) frappe.local.response.update({ "data": frappe.get_doc(data).insert().as_dict() }) frappe.db.commit() else: raise frappe.DoesNotExistError else: raise frappe.DoesNotExistError return build_response("json") def validate_oauth(): form_dict = frappe.local.form_dict authorization_header = frappe.get_request_header("Authorization").split(" ") if frappe.get_request_header("Authorization") else None if authorization_header and authorization_header[0].lower() == "bearer": from frappe.integration_broker.oauth2 import get_oauth_server token = authorization_header[1] r = frappe.request parsed_url = urlparse(r.url) access_token = { "access_token": token} uri = parsed_url.scheme + "://" + parsed_url.netloc + parsed_url.path + "?" + urlencode(access_token) http_method = r.method body = r.get_data() headers = r.headers required_scopes = frappe.db.get_value("OAuth Bearer Token", token, "scopes").split(";") valid, oauthlib_request = get_oauth_server().verify_request(uri, http_method, body, headers, required_scopes) if valid: frappe.set_user(frappe.db.get_value("OAuth Bearer Token", token, "user")) frappe.local.form_dict = form_dict
mit
2,075,036,741,239,225,300
29.02649
133
0.679091
false
sgmap/openfisca-france
openfisca_france/model/prestations/minima_sociaux/aefa.py
1
2983
# -*- coding: utf-8 -*- from openfisca_france.model.base import * from openfisca_france.model.prestations.prestations_familiales.base_ressource import nb_enf class aefa(Variable): ''' Aide exceptionelle de fin d'année (prime de Noël) Instituée en 1998 Apparaît sous le nom de complément de rmi dans les ERF Le montant de l’aide mentionnée à l’article 1er versée aux bénéficiaires de l’allocation de solidarité spécifique à taux majoré servie aux allocataires âgés de cinquante-cinq ans ou plus justifiant de vingt années d’activité salariée, aux allocataires âgés de cinquante-sept ans et demi ou plus justifiant de dix années d’activité salariée ainsi qu’aux allocataires justifiant d’au moins 160 trimestres validés dans les régimes d’assurance vieillesse ou de périodes reconnues équivalentes est égal à Pour bénéficier de la Prime de Noël 2011, vous devez être éligible pour le compte du mois de novembre 2011 ou au plus de décembre 2011, soit d’une allocation de solidarité spécifique (ASS), de la prime forfaitaire mensuelle de reprise d'activité, de l'allocation équivalent retraite (allocataire AER), du revenu de solidarité active (Bénéficiaires RSA), de l'allocation de parent isolé (API), du revenu minimum d'insertion (RMI), de l’Allocation pour la Création ou la Reprise d'Entreprise (ACCRE-ASS) ou encore allocation chômage. ''' value_type = float entity = Famille label = u"Aide exceptionelle de fin d'année (prime de Noël)" reference = u"https://www.service-public.fr/particuliers/vosdroits/F1325" definition_period = YEAR def formula_2002_01_01(famille, period, parameters): rsa = famille('rsa', period, options = [ADD]) ass_i = famille.members('ass', period, options = [ADD]) ass = famille.sum(ass_i) api = famille('api', period, options = [ADD]) aer_i = famille.members('aer', period, options = [ADD]) aer = famille.sum(aer_i) condition = (ass > 0) + (aer > 0) + (api > 0) + (rsa > 0) condition_majoration = rsa > 0 af = parameters(period).prestations.prestations_familiales.af janvier = period.first_month af_nbenf = famille('af_nbenf', janvier) nb_parents = famille('nb_parents', janvier) if hasattr(af, "age3"): nbPAC = nb_enf(famille, janvier, af.age1, af.age3) else: nbPAC = af_nbenf aefa = parameters(period).prestations.minima_sociaux.aefa # TODO check nombre de PAC pour une famille majoration = 1 + (condition_majoration * ( (nb_parents == 2) * aefa.tx_2p + nbPAC * aefa.tx_supp * (nb_parents <= 2) + nbPAC * aefa.tx_3pac * max_(nbPAC - 2, 0) )) montant_aefa = aefa.mon_seul * majoration montant_aefa += aefa.prime_exceptionnelle return condition * montant_aefa
agpl-3.0
-1,951,170,180,763,779,300
47.583333
120
0.672384
false
olyhaa/OlyEats
olyeats/recipe/forms.py
1
3642
from django.forms import ModelForm from django.template.loader import render_to_string from models import Recipe import django.forms as forms from django.forms.models import BaseInlineFormSet from django.conf import settings from django.contrib.sites.models import Site from django.template import loader, RequestContext from django.http import HttpResponse # Add's a link to a select box to popup a form to allow you to add new items # to a select box via a form. You need to include the js RelatedObjectLookups # on the main form class SelectWithPop(forms.Select): def render(self, name, * args, ** kwargs): html = super(SelectWithPop, self).render(name, * args, ** kwargs) popupplus = render_to_string("recipe_groups/popupplus.html", {'field': name}) return html + popupplus # Used to create new recipes the course and cuisine field are created with a # special widget that appends a link and graphic to the end of select field # to allow users to add new items via a popup form class RecipeForm(ModelForm): # course = forms.ModelChoiceField(Course.objects, widget=SelectWithPop) # cuisine = forms.ModelChoiceField(Cuisine.objects, widget=SelectWithPop) class Meta: model = Recipe exclude=('slug','ingredient') # Require at least two ingredient in the formset to be completed. class IngItemFormSet(BaseInlineFormSet): def clean(self): super(IngItemFormSet, self).clean() for error in self.errors: if error: return completed = 0 for cleaned_data in self.cleaned_data: if cleaned_data and not cleaned_data.get('DELETE', False): completed += 1 if completed < 2: raise forms.ValidationError("At least two %s are required." % self.model._meta.object_name.lower()) """ # Recipe form to send a recipe via email class RecipeSendMail(forms.Form): def __init__(self, data=None, files=None, request=None, *args, **kwargs): if request is None: raise TypeError("Keyword argument 'request must be supplies'") super(RecipeSendMail, self).__init__(data=data, files=files, *args, **kwargs) self.request = request to_email = forms.EmailField(widget=forms.TextInput(),label='email address') id = forms.CharField(widget=forms.HiddenInput()) from_site = Site.objects.get_current() def _get_recipe(self): if self.is_valid(): recipe = Recipe.objects.get(pk = self.cleaned_data['id']) self.recipe = recipe return self.recipe else: raise ValueError('Can not get the recipe id from invalid form data') # get the recipe and return the message body for the email def get_body(self): template_name = 'recipe/recipe_mail_body.html' #template that contains the email body and also shared by the # grocery print view message = loader.render_to_string(template_name, {'recipe': self._get_recipe()}, context_instance=RequestContext(self.request)) return message # gets the email to send the list to from the form def get_toMail(self): if self.is_valid(): return self.cleaned_data['to_email'] else: raise ValueError('Can not get to_email from invalid form data') # sends the email message def save(self, fail_silently=False): self.subject = str(self.from_site) + ' recipe: ' + self._get_recipe().title self.from_email = self.request.user.email if self.subject and self.get_body() and self.from_email: try: msg = EmailMessage(self.subject, self.get_body(), self.from_email, [self.get_toMail()]) msg.content_subtype = 'html' msg.send() except BadHeaderError: return HttpResponse('Invalid header found.') return HttpResponse('Email Sent') else: return HttpResponse('Make sure all fields are entered and valid.') """
gpl-2.0
7,721,903,612,825,831,000
35.42
129
0.731192
false
leonardoarroyo/django-react-cms
react_cms/widgets.py
1
1852
import json from django.forms.widgets import Widget from django.conf import settings from django.utils.safestring import mark_safe from django.template.loader import render_to_string from collections import OrderedDict from react_cms.finders import ComponentFinder class ResourceEditorWidget(Widget): template_name = 'react_cms/widgets/resource_editor.html' def render(self, name, value, attrs=None, renderer=None): context = {'value': mark_safe(value), 'available_languages': mark_safe(self.get_available_languages()), 'strip_parameters': self.get_strip_parameters(), 'components_json': mark_safe(json.dumps(self.build_available_components()))} return mark_safe(render_to_string(self.template_name, context)) def build_available_components(self): components = [] templates = ComponentFinder().find() for template in templates: try: t = json.loads(render_to_string('react_cms/react_components/{}'.format(template)), object_pairs_hook=OrderedDict) except ValueError as e: raise ValueError("Failed decoding JSON on react_cms/react_components/{}. {}".format(template, e)) components.append(self.prepare_component(t["info"])) return components def prepare_component(self, component): if 'props' not in component: component['props'] = {} for editable_prop in component['editableProps']: component['props'][editable_prop] = '' return component def get_available_languages(self): languages = list(settings.LANGUAGES) languages.pop(0) # First language is default return [x[0] for x in languages] def get_strip_parameters(self): s = getattr(settings, 'REACT_CMS', {}) strip = s.get('STRIP_PARAMETERS_FROM_FILE_URL', False) return 'true' if strip else 'false'
mit
8,283,340,780,235,129,000
36.04
121
0.689525
false
autogestion/sh_ctracker
ctracker/sql.py
1
7452
from django.db import connection claim_to_polygon_join = """ LEFT OUTER JOIN ctracker_polygon_organizations ON (houses.polygon_id = ctracker_polygon_organizations.polygon_id) LEFT OUTER JOIN ctracker_organization ON (ctracker_polygon_organizations.organization_id = ctracker_organization.id) LEFT OUTER JOIN ctracker_claim ON (ctracker_organization.id = ctracker_claim.organization_id) """ def get_claims_for_poly(polygon_id): cursor = connection.cursor() cursor.execute(""" SELECT COUNT(*) AS "__count" FROM "ctracker_organization" INNER JOIN "ctracker_polygon_organizations" ON ("ctracker_organization"."id" = "ctracker_polygon_organizations"."organization_id") INNER JOIN "ctracker_claim" ON ("ctracker_organization"."id" = "ctracker_claim"."organization_id") WHERE ("ctracker_polygon_organizations"."polygon_id" = '%s') """ % polygon_id) return cursor.fetchone()[0] def get_sum_for_layers(layers_ids, level): cursor = connection.cursor() if level==4: cursor.execute(""" SELECT ctracker_organization.id, COUNT(ctracker_claim.content_ptr_id) AS claims FROM ctracker_organization LEFT OUTER JOIN ctracker_claim ON (ctracker_organization.id = ctracker_claim.organization_id) WHERE (ctracker_organization.id IN (%s) ) GROUP BY ctracker_organization.id """ % ','.join([str(x) for x in layers_ids]) ) elif level==3: cursor.execute(""" SELECT district_id, SUM(claimz) as sum_claims FROM (SELECT houses.layer_id as district_id, COUNT(ctracker_claim.content_ptr_id) AS claimz FROM ctracker_polygon houses %s WHERE (houses.layer_id IN (%s) ) GROUP BY houses.polygon_id ) x GROUP BY district_id """ % (claim_to_polygon_join, ','.join(["'" + str(x) + "'" for x in layers_ids])) ) elif level==2: cursor.execute(""" SELECT area_id, SUM(claimz) as sum_claims FROM (SELECT districts.layer_id as area_id, COUNT(ctracker_claim.content_ptr_id) AS claimz FROM ctracker_polygon districts LEFT OUTER JOIN ctracker_polygon houses ON (houses.layer_id = districts.polygon_id) %s WHERE (districts.layer_id IN (%s) ) GROUP BY districts.polygon_id ) x GROUP BY area_id """ % (claim_to_polygon_join, ','.join(["'" + str(x) + "'" for x in layers_ids])) ) elif level==1: cursor.execute(""" SELECT region_id, SUM(claimz) as sum_claims FROM (SELECT areas.layer_id as region_id, COUNT(ctracker_claim.content_ptr_id) AS claimz FROM ctracker_polygon areas LEFT OUTER JOIN ctracker_polygon districts ON (districts.layer_id = areas.polygon_id) LEFT OUTER JOIN ctracker_polygon houses ON (houses.layer_id = districts.polygon_id) %s WHERE (areas.layer_id IN (%s) ) GROUP BY areas.polygon_id ) x GROUP BY region_id """ % (claim_to_polygon_join, ','.join(["'" + str(x) + "'" for x in layers_ids])) ) elif level==0: cursor.execute(""" SELECT root_id, SUM(claimz) as sum_claims FROM (SELECT regions.layer_id as root_id, COUNT(ctracker_claim.content_ptr_id) AS claimz FROM ctracker_polygon regions LEFT OUTER JOIN ctracker_polygon areas ON (areas.layer_id = regions.polygon_id) LEFT OUTER JOIN ctracker_polygon districts ON (districts.layer_id = areas.polygon_id) LEFT OUTER JOIN ctracker_polygon houses ON (houses.layer_id = districts.polygon_id) %s WHERE (regions.layer_id IN (%s) ) GROUP BY regions.polygon_id ) x GROUP BY root_id """ % (claim_to_polygon_join, ','.join(["'" + str(x) + "'" for x in layers_ids])) ) return dict(cursor.fetchall()) def get_max_for_layers(layer_id, level): layers_dict = {} cursor = connection.cursor() if level==4: # x = Polygon.objects.filter(layer_id=layer_id).annotate(claimz=Count('organizations__claim')) cursor.execute(""" SELECT layer_id, MAX(claimz) FROM (SELECT houses.layer_id as layer_id, COUNT(ctracker_claim.content_ptr_id) AS claimz FROM ctracker_polygon houses %s WHERE (houses.layer_id IN (%s) ) GROUP BY houses.polygon_id ) x GROUP BY layer_id """ % (claim_to_polygon_join, ','.join(["'" + str(x) + "'" for x in layer_id])) ) elif level==3: cursor = connection.cursor() cursor.execute(""" SELECT district_id, MAX(claimz) as sum_claims FROM ( SELECT districts.layer_id as district_id, COUNT(ctracker_claim.content_ptr_id) AS claimz FROM ctracker_polygon districts LEFT OUTER JOIN ctracker_polygon houses ON (houses.layer_id = districts.polygon_id) %s WHERE (districts.layer_id IN (%s) ) GROUP BY districts.polygon_id) x GROUP BY district_id """ % (claim_to_polygon_join, ','.join(["'" + str(x) + "'" for x in layer_id])) ) elif level==2: cursor.execute(""" SELECT district_id, MAX(claimz) as sum_claims FROM ( SELECT areas.layer_id as district_id, COUNT(ctracker_claim.content_ptr_id) AS claimz FROM ctracker_polygon areas LEFT OUTER JOIN ctracker_polygon districts ON (districts.layer_id = areas.polygon_id) LEFT OUTER JOIN ctracker_polygon houses ON (houses.layer_id = districts.polygon_id) %s WHERE (areas.layer_id IN (%s) ) GROUP BY areas.polygon_id) x GROUP BY district_id """ % (claim_to_polygon_join, ','.join(["'" + str(x) + "'" for x in layer_id])) ) elif level==1: cursor.execute(""" SELECT district_id, MAX(claimz) as sum_claims FROM ( SELECT regions.layer_id as district_id, COUNT(ctracker_claim.content_ptr_id) AS claimz FROM ctracker_polygon regions LEFT OUTER JOIN ctracker_polygon areas ON (areas.layer_id = regions.polygon_id) LEFT OUTER JOIN ctracker_polygon districts ON (districts.layer_id = areas.polygon_id) LEFT OUTER JOIN ctracker_polygon houses ON (houses.layer_id = districts.polygon_id) %s WHERE (regions.layer_id IN (%s) ) GROUP BY regions.polygon_id) x GROUP BY district_id """ % (claim_to_polygon_join, ','.join(["'" + str(x) + "'" for x in layer_id])) ) return dict(cursor.fetchall())
bsd-3-clause
-1,031,291,130,743,859,500
50.4
151
0.543076
false
zynga/jasy
jasy/core/Config.py
1
10980
# # Jasy - Web Tooling Framework # Copyright 2010-2012 Zynga Inc. # import sys, os, yaml, json import jasy.core.Console as Console import jasy.core.File as File from jasy import UserError from jasy.core.Util import getKey __all__ = [ "Config", "findConfig", "loadConfig", "writeConfig" ] def findConfig(fileName): """ Returns the name of a config file based on the given base file name (without extension). Returns either a filename which endswith .json, .yaml or None """ fileExt = os.path.splitext(fileName)[1] # Auto discovery if not fileExt: for tryExt in (".json", ".yaml"): if os.path.exists(fileName + tryExt): return fileName + tryExt return None if os.path.exists(fileName) and fileExt in (".json", ".yaml"): return fileName else: return None def loadConfig(fileName, encoding="utf-8"): """ Loads the given configuration file (filename without extension) and returns the parsed object structure """ configName = findConfig(fileName) if configName is None: raise UserError("Unsupported config file: %s" % fileName) fileHandle = open(configName, mode="r", encoding=encoding) fileExt = os.path.splitext(configName)[1] if fileExt == ".json": result = json.load(fileHandle) elif fileExt == ".yaml": result = yaml.load(fileHandle) fileHandle.close() return result def writeConfig(data, fileName, indent=2, encoding="utf-8"): """ Writes the given data structure to the given file name. Based on the given extension a different file format is choosen. Currently use either .yaml or .json. """ fileHandle = open(fileName, mode="w", encoding=encoding) fileExt = os.path.splitext(fileName)[1] if fileExt == ".json": json.dump(data, fileHandle, indent=indent, ensure_ascii=False) fileHandle.close() elif fileExt == ".yaml": yaml.dump(data, fileHandle, default_flow_style=False, indent=indent, allow_unicode=True) fileHandle.close() else: fileHandle.close() raise UserError("Unsupported config type: %s" % fileExt) def matchesType(value, expected): """ Returns boolean for whether the given value matches the given type. Supports all basic JSON supported value types: primitive, integer/int, float, number/num, string/str, boolean/bool, dict/map, array/list, ... """ result = type(value) expected = expected.lower() if result is int: return expected in ("integer", "number", "int", "num", "primitive") elif result is float: return expected in ("float", "number", "num", "primitive") elif result is str: return expected in ("string", "str", "primitive") elif result is bool: return expected in ("boolean", "bool", "primitive") elif result is dict: return expected in ("dict", "map") elif result is list: return expected in ("array", "list") return False class Config: """ Wrapper around JSON/YAML with easy to use import tools for using question files, command line arguments, etc. """ def __init__(self, data=None): """ Initialized configuration object with destination file name. """ self.__data = data or {} def debug(self): """ Prints data to the console """ print(self.__data) def export(self): """ Returns a flat data structure of the internal data """ result = {} def recurse(data, prefix): for key in data: value = data[key] if type(value) is dict: if prefix: recurse(value, prefix + key + ".") else: recurse(value, key + ".") else: result[prefix + key] = value recurse(self.__data, "") return result def injectValues(self, parse=True, **argv): """ Injects a list of arguments into the configuration file, typically used for injecting command line arguments """ for key in argv: self.set(key, argv[key], parse=parse) def loadValues(self, fileName, optional=False, encoding="utf-8"): """ Imports the values of the given config file Returns True when the file was found and processed. Note: Supports dotted names to store into sub trees Note: This method overrides keys when they are already defined! """ configFile = findConfig(fileName) if configFile is None: if optional: return False else: raise UserError("Could not find configuration file (values): %s" % configFile) data = loadConfig(configFile, encoding=encoding) for key in data: self.set(key, data[key]) return True def readQuestions(self, fileName, force=False, autoDelete=True, optional=False, encoding="utf-8"): """ Reads the given configuration file with questions and deletes the file afterwards (by default). Returns True when the file was found and processed. """ configFile = findConfig(fileName) if configFile is None: if optional: return False else: raise UserError("Could not find configuration file (questions): %s" % configFile) data = loadConfig(configFile, encoding=encoding) for entry in data: question = entry["question"] name = entry["name"] accept = getKey(entry, "accept", None) required = getKey(entry, "required", True) default = getKey(entry, "default", None) force = getKey(entry, "force", False) self.ask(question, name, accept=accept, required=required, default=default, force=force) if autoDelete: File.rm(configFile) return True def executeScript(self, fileName, autoDelete=True, optional=False, encoding="utf-8"): """ Executes the given script for configuration proposes and deletes the file afterwards (by default). Returns True when the file was found and processed. """ if not os.path.exists(fileName): if optional: return False else: raise UserError("Could not find configuration script: %s" % configFile) env = { "config" : self, "file" : File } code = open(fileName, "r", encoding=encoding).read() exec(compile(code, os.path.abspath(fileName), "exec"), globals(), env) if autoDelete: File.rm("jasycreate.py") return True def has(self, name): """ Returns whether there is a value for the given field name. """ if not "." in name: return name in self.__data splits = name.split(".") current = self.__data for split in splits: if split in current: current = current[split] else: return False return True def get(self, name, default=None): """ Returns the value of the given field or None when field is not set """ if not "." in name: return getKey(self.__data, name, default) splits = name.split(".") current = self.__data for split in splits[:-1]: if split in current: current = current[split] else: return None return getKey(current, splits[-1], default) def ask(self, question, name, accept=None, required=True, default=None, force=False, parse=True): """ Asks the user for value for the given configuration field: :param question: Question to ask the user :type question: string :param name: Name of field to store value in :type name: string :param accept: Any of the supported types to validate for (see matchesType) :type accept: string :param required: Whether the field is required :type required: boolean :param default: Default value whenever user has given no value """ while True: msg = "- %s?" % question if accept is not None: msg += Console.colorize(" [%s]" % accept, "grey") if default is None: msg += Console.colorize(" (%s)" % name, "magenta") else: msg += Console.colorize(" (%s=%s)" % (name, default), "magenta") msg += ": " sys.stdout.write(msg) # Do not ask user for solved items if not force and self.has(name): print("%s %s" % (self.get(name), Console.colorize("(pre-filled)", "cyan"))) return # Read user input, but ignore any leading/trailing white space value = input().strip() # Fallback to default if no value is given and field is not required if not required and value == "": value = default # Don't accept empty values if value == "": continue # Try setting the current value if self.set(name, value, accept=accept, parse=parse): break def set(self, name, value, accept=None, parse=False): """ Saves the given value under the given field """ # Don't accept None value if value is None: return False # Parse value for easy type checks if parse: try: parsedValue = eval(value) except: pass else: value = parsedValue # Convert tuples/sets into JSON compatible array if type(value) in (tuple, set): value = list(value) # Check for given type if accept is not None and not matchesType(value, accept): print(Console.colorize(" - Invalid value: %s" % str(value), "red")) return False if "." in name: splits = name.split(".") current = self.__data for split in splits[:-1]: if not split in current: current[split] = {} current = current[split] current[splits[-1]] = value else: self.__data[name] = value return True def write(self, fileName, indent=2, encoding="utf-8"): """ Uses config writer to write the configuration file to the application """ writeConfig(self.__data, fileName, indent=indent, encoding=encoding)
mit
6,042,181,563,522,621,000
27.59375
116
0.563661
false
ceph/autotest
scheduler/monitor_db_functional_test.py
1
42015
#!/usr/bin/python import logging, os, unittest import common from autotest_lib.client.common_lib import enum, global_config, host_protections from autotest_lib.database import database_connection from autotest_lib.frontend import setup_django_environment from autotest_lib.frontend.afe import frontend_test_utils, models from autotest_lib.frontend.afe import model_attributes from autotest_lib.scheduler import drone_manager, email_manager, monitor_db from autotest_lib.scheduler import scheduler_models # translations necessary for scheduler queries to work with SQLite _re_translator = database_connection.TranslatingDatabase.make_regexp_translator _DB_TRANSLATORS = ( _re_translator(r'NOW\(\)', 'time("now")'), _re_translator(r'LAST_INSERT_ID\(\)', 'LAST_INSERT_ROWID()'), # older SQLite doesn't support group_concat, so just don't bother until # it arises in an important query _re_translator(r'GROUP_CONCAT\((.*?)\)', r'\1'), ) HqeStatus = models.HostQueueEntry.Status HostStatus = models.Host.Status class NullMethodObject(object): _NULL_METHODS = () def __init__(self): def null_method(*args, **kwargs): pass for method_name in self._NULL_METHODS: setattr(self, method_name, null_method) class MockGlobalConfig(object): def __init__(self): self._config_info = {} def set_config_value(self, section, key, value): self._config_info[(section, key)] = value def get_config_value(self, section, key, type=str, default=None, allow_blank=False): identifier = (section, key) if identifier not in self._config_info: return default return self._config_info[identifier] # the SpecialTask names here must match the suffixes used on the SpecialTask # results directories _PidfileType = enum.Enum('verify', 'cleanup', 'repair', 'job', 'gather', 'parse', 'archive') _PIDFILE_TO_PIDFILE_TYPE = { drone_manager.AUTOSERV_PID_FILE: _PidfileType.JOB, drone_manager.CRASHINFO_PID_FILE: _PidfileType.GATHER, drone_manager.PARSER_PID_FILE: _PidfileType.PARSE, drone_manager.ARCHIVER_PID_FILE: _PidfileType.ARCHIVE, } _PIDFILE_TYPE_TO_PIDFILE = dict((value, key) for key, value in _PIDFILE_TO_PIDFILE_TYPE.iteritems()) class MockDroneManager(NullMethodObject): """ Public attributes: max_runnable_processes_value: value returned by max_runnable_processes(). tests can change this to activate throttling. """ _NULL_METHODS = ('reinitialize_drones', 'copy_to_results_repository', 'copy_results_on_drone') class _DummyPidfileId(object): """ Object to represent pidfile IDs that is opaque to the scheduler code but still debugging-friendly for us. """ def __init__(self, working_directory, pidfile_name, num_processes=None): self._working_directory = working_directory self._pidfile_name = pidfile_name self._num_processes = num_processes self._paired_with_pidfile = None def key(self): """Key for MockDroneManager._pidfile_index""" return (self._working_directory, self._pidfile_name) def __str__(self): return os.path.join(self._working_directory, self._pidfile_name) def __repr__(self): return '<_DummyPidfileId: %s>' % str(self) def __init__(self): super(MockDroneManager, self).__init__() self.process_capacity = 100 # maps result_dir to set of tuples (file_path, file_contents) self._attached_files = {} # maps pidfile IDs to PidfileContents self._pidfiles = {} # pidfile IDs that haven't been created yet self._future_pidfiles = [] # maps _PidfileType to the most recently created pidfile ID of that type self._last_pidfile_id = {} # maps (working_directory, pidfile_name) to pidfile IDs self._pidfile_index = {} # maps process to pidfile IDs self._process_index = {} # tracks pidfiles of processes that have been killed self._killed_pidfiles = set() # pidfile IDs that have just been unregistered (so will disappear on the # next cycle) self._unregistered_pidfiles = set() # utility APIs for use by the test def finish_process(self, pidfile_type, exit_status=0): pidfile_id = self._last_pidfile_id[pidfile_type] self._set_pidfile_exit_status(pidfile_id, exit_status) def finish_specific_process(self, working_directory, pidfile_name): pidfile_id = self.pidfile_from_path(working_directory, pidfile_name) self._set_pidfile_exit_status(pidfile_id, 0) def _set_pidfile_exit_status(self, pidfile_id, exit_status): assert pidfile_id is not None contents = self._pidfiles[pidfile_id] contents.exit_status = exit_status contents.num_tests_failed = 0 def was_last_process_killed(self, pidfile_type): pidfile_id = self._last_pidfile_id[pidfile_type] return pidfile_id in self._killed_pidfiles def nonfinished_pidfile_ids(self): return [pidfile_id for pidfile_id, pidfile_contents in self._pidfiles.iteritems() if pidfile_contents.exit_status is None] def running_pidfile_ids(self): return [pidfile_id for pidfile_id in self.nonfinished_pidfile_ids() if self._pidfiles[pidfile_id].process is not None] def pidfile_from_path(self, working_directory, pidfile_name): return self._pidfile_index[(working_directory, pidfile_name)] def attached_files(self, working_directory): """ Return dict mapping path to contents for attached files with specified paths. """ return dict((path, contents) for path, contents in self._attached_files.get(working_directory, []) if path is not None) # DroneManager emulation APIs for use by monitor_db def get_orphaned_autoserv_processes(self): return set() def total_running_processes(self): return sum(pidfile_id._num_processes for pidfile_id in self.nonfinished_pidfile_ids()) def max_runnable_processes(self, username, drone_hostnames_allowed): return self.process_capacity - self.total_running_processes() def refresh(self): for pidfile_id in self._unregistered_pidfiles: # intentionally handle non-registered pidfiles silently self._pidfiles.pop(pidfile_id, None) self._unregistered_pidfiles = set() def execute_actions(self): # executing an "execute_command" causes a pidfile to be created for pidfile_id in self._future_pidfiles: # Process objects are opaque to monitor_db process = object() self._pidfiles[pidfile_id].process = process self._process_index[process] = pidfile_id self._future_pidfiles = [] def attach_file_to_execution(self, result_dir, file_contents, file_path=None): self._attached_files.setdefault(result_dir, set()).add((file_path, file_contents)) return 'attach_path' def _initialize_pidfile(self, pidfile_id): if pidfile_id not in self._pidfiles: assert pidfile_id.key() not in self._pidfile_index self._pidfiles[pidfile_id] = drone_manager.PidfileContents() self._pidfile_index[pidfile_id.key()] = pidfile_id def _set_last_pidfile(self, pidfile_id, working_directory, pidfile_name): if working_directory.startswith('hosts/'): # such paths look like hosts/host1/1-verify, we'll grab the end type_string = working_directory.rsplit('-', 1)[1] pidfile_type = _PidfileType.get_value(type_string) else: pidfile_type = _PIDFILE_TO_PIDFILE_TYPE[pidfile_name] self._last_pidfile_id[pidfile_type] = pidfile_id def execute_command(self, command, working_directory, pidfile_name, num_processes, log_file=None, paired_with_pidfile=None, username=None, drone_hostnames_allowed=None): logging.debug('Executing %s in %s', command, working_directory) pidfile_id = self._DummyPidfileId(working_directory, pidfile_name) if pidfile_id.key() in self._pidfile_index: pidfile_id = self._pidfile_index[pidfile_id.key()] pidfile_id._num_processes = num_processes pidfile_id._paired_with_pidfile = paired_with_pidfile self._future_pidfiles.append(pidfile_id) self._initialize_pidfile(pidfile_id) self._pidfile_index[(working_directory, pidfile_name)] = pidfile_id self._set_last_pidfile(pidfile_id, working_directory, pidfile_name) return pidfile_id def get_pidfile_contents(self, pidfile_id, use_second_read=False): if pidfile_id not in self._pidfiles: logging.debug('Request for nonexistent pidfile %s' % pidfile_id) return self._pidfiles.get(pidfile_id, drone_manager.PidfileContents()) def is_process_running(self, process): return True def register_pidfile(self, pidfile_id): self._initialize_pidfile(pidfile_id) def unregister_pidfile(self, pidfile_id): self._unregistered_pidfiles.add(pidfile_id) def declare_process_count(self, pidfile_id, num_processes): pidfile_id.num_processes = num_processes def absolute_path(self, path): return 'absolute/' + path def write_lines_to_file(self, file_path, lines, paired_with_process=None): # TODO: record this pass def get_pidfile_id_from(self, execution_tag, pidfile_name): default_pidfile = self._DummyPidfileId(execution_tag, pidfile_name, num_processes=0) return self._pidfile_index.get((execution_tag, pidfile_name), default_pidfile) def kill_process(self, process): pidfile_id = self._process_index[process] self._killed_pidfiles.add(pidfile_id) self._set_pidfile_exit_status(pidfile_id, 271) class MockEmailManager(NullMethodObject): _NULL_METHODS = ('send_queued_emails', 'send_email') def enqueue_notify_email(self, subject, message): logging.warn('enqueue_notify_email: %s', subject) logging.warn(message) class SchedulerFunctionalTest(unittest.TestCase, frontend_test_utils.FrontendTestMixin): # some number of ticks after which the scheduler is presumed to have # stabilized, given no external changes _A_LOT_OF_TICKS = 10 def setUp(self): self._frontend_common_setup() self._set_stubs() self._set_global_config_values() self._create_dispatcher() logging.basicConfig(level=logging.DEBUG) def _create_dispatcher(self): self.dispatcher = monitor_db.Dispatcher() def tearDown(self): self._database.disconnect() self._frontend_common_teardown() def _set_stubs(self): self.mock_config = MockGlobalConfig() self.god.stub_with(global_config, 'global_config', self.mock_config) self.mock_drone_manager = MockDroneManager() drone_manager._set_instance(self.mock_drone_manager) self.mock_email_manager = MockEmailManager() self.god.stub_with(email_manager, 'manager', self.mock_email_manager) self._database = ( database_connection.TranslatingDatabase.get_test_database( translators=_DB_TRANSLATORS)) self._database.connect(db_type='django') self.god.stub_with(monitor_db, '_db', self._database) self.god.stub_with(scheduler_models, '_db', self._database) monitor_db.initialize_globals() scheduler_models.initialize_globals() def _set_global_config_values(self): self.mock_config.set_config_value('SCHEDULER', 'pidfile_timeout_mins', 1) self.mock_config.set_config_value('SCHEDULER', 'gc_stats_interval_mins', 999999) def _initialize_test(self): self.dispatcher.initialize() def _run_dispatcher(self): for _ in xrange(self._A_LOT_OF_TICKS): self.dispatcher.tick() def test_idle(self): self._initialize_test() self._run_dispatcher() def _assert_process_executed(self, working_directory, pidfile_name): process_was_executed = self.mock_drone_manager.was_process_executed( 'hosts/host1/1-verify', drone_manager.AUTOSERV_PID_FILE) self.assert_(process_was_executed, '%s/%s not executed' % (working_directory, pidfile_name)) def _update_instance(self, model_instance): return type(model_instance).objects.get(pk=model_instance.pk) def _check_statuses(self, queue_entry, queue_entry_status, host_status=None): self._check_entry_status(queue_entry, queue_entry_status) if host_status: self._check_host_status(queue_entry.host, host_status) def _check_entry_status(self, queue_entry, status): # update from DB queue_entry = self._update_instance(queue_entry) self.assertEquals(queue_entry.status, status) def _check_host_status(self, host, status): # update from DB host = self._update_instance(host) self.assertEquals(host.status, status) def _run_pre_job_verify(self, queue_entry): self._run_dispatcher() # launches verify self._check_statuses(queue_entry, HqeStatus.VERIFYING, HostStatus.VERIFYING) self.mock_drone_manager.finish_process(_PidfileType.VERIFY) def test_simple_job(self): self._initialize_test() job, queue_entry = self._make_job_and_queue_entry() self._run_pre_job_verify(queue_entry) self._run_dispatcher() # launches job self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING) self._finish_job(queue_entry) self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY) self._assert_nothing_is_running() def _setup_for_pre_job_cleanup(self): self._initialize_test() job, queue_entry = self._make_job_and_queue_entry() job.reboot_before = model_attributes.RebootBefore.ALWAYS job.save() return queue_entry def _run_pre_job_cleanup_job(self, queue_entry): self._run_dispatcher() # cleanup self._check_statuses(queue_entry, HqeStatus.VERIFYING, HostStatus.CLEANING) self.mock_drone_manager.finish_process(_PidfileType.CLEANUP) self._run_dispatcher() # verify self.mock_drone_manager.finish_process(_PidfileType.VERIFY) self._run_dispatcher() # job self._finish_job(queue_entry) def test_pre_job_cleanup(self): queue_entry = self._setup_for_pre_job_cleanup() self._run_pre_job_cleanup_job(queue_entry) def _run_pre_job_cleanup_one_failure(self): queue_entry = self._setup_for_pre_job_cleanup() self._run_dispatcher() # cleanup self.mock_drone_manager.finish_process(_PidfileType.CLEANUP, exit_status=256) self._run_dispatcher() # repair self._check_statuses(queue_entry, HqeStatus.QUEUED, HostStatus.REPAIRING) self.mock_drone_manager.finish_process(_PidfileType.REPAIR) return queue_entry def test_pre_job_cleanup_failure(self): queue_entry = self._run_pre_job_cleanup_one_failure() # from here the job should run as normal self._run_pre_job_cleanup_job(queue_entry) def test_pre_job_cleanup_double_failure(self): # TODO (showard): this test isn't perfect. in reality, when the second # cleanup fails, it copies its results over to the job directory using # copy_results_on_drone() and then parses them. since we don't handle # that, there appear to be no results at the job directory. the # scheduler handles this gracefully, parsing gets effectively skipped, # and this test passes as is. but we ought to properly test that # behavior. queue_entry = self._run_pre_job_cleanup_one_failure() self._run_dispatcher() # second cleanup self.mock_drone_manager.finish_process(_PidfileType.CLEANUP, exit_status=256) self._run_dispatcher() self._check_statuses(queue_entry, HqeStatus.FAILED, HostStatus.REPAIR_FAILED) # nothing else should run self._assert_nothing_is_running() def _assert_nothing_is_running(self): self.assertEquals(self.mock_drone_manager.running_pidfile_ids(), []) def _setup_for_post_job_cleanup(self): self._initialize_test() job, queue_entry = self._make_job_and_queue_entry() job.reboot_after = model_attributes.RebootAfter.ALWAYS job.save() return queue_entry def _run_post_job_cleanup_failure_up_to_repair(self, queue_entry, include_verify=True): if include_verify: self._run_pre_job_verify(queue_entry) self._run_dispatcher() # job self.mock_drone_manager.finish_process(_PidfileType.JOB) self._run_dispatcher() # parsing + cleanup self.mock_drone_manager.finish_process(_PidfileType.PARSE) self.mock_drone_manager.finish_process(_PidfileType.CLEANUP, exit_status=256) self._run_dispatcher() # repair, HQE unaffected self.mock_drone_manager.finish_process(_PidfileType.ARCHIVE) self._run_dispatcher() return queue_entry def test_post_job_cleanup_failure(self): queue_entry = self._setup_for_post_job_cleanup() self._run_post_job_cleanup_failure_up_to_repair(queue_entry) self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.REPAIRING) self.mock_drone_manager.finish_process(_PidfileType.REPAIR) self._run_dispatcher() self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY) def test_post_job_cleanup_failure_repair_failure(self): queue_entry = self._setup_for_post_job_cleanup() self._run_post_job_cleanup_failure_up_to_repair(queue_entry) self.mock_drone_manager.finish_process(_PidfileType.REPAIR, exit_status=256) self._run_dispatcher() self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.REPAIR_FAILED) def _ensure_post_job_process_is_paired(self, queue_entry, pidfile_type): pidfile_name = _PIDFILE_TYPE_TO_PIDFILE[pidfile_type] queue_entry = self._update_instance(queue_entry) pidfile_id = self.mock_drone_manager.pidfile_from_path( queue_entry.execution_path(), pidfile_name) self.assert_(pidfile_id._paired_with_pidfile) def _finish_job(self, queue_entry): self.mock_drone_manager.finish_process(_PidfileType.JOB) self._run_dispatcher() # launches parsing + cleanup self._check_statuses(queue_entry, HqeStatus.PARSING, HostStatus.CLEANING) self._ensure_post_job_process_is_paired(queue_entry, _PidfileType.PARSE) self._finish_parsing_and_cleanup(queue_entry) def _finish_parsing_and_cleanup(self, queue_entry): self.mock_drone_manager.finish_process(_PidfileType.CLEANUP) self.mock_drone_manager.finish_process(_PidfileType.PARSE) self._run_dispatcher() self._check_entry_status(queue_entry, HqeStatus.ARCHIVING) self.mock_drone_manager.finish_process(_PidfileType.ARCHIVE) self._run_dispatcher() def _create_reverify_request(self): host = self.hosts[0] models.SpecialTask.schedule_special_task( host=host, task=models.SpecialTask.Task.VERIFY) return host def test_requested_reverify(self): host = self._create_reverify_request() self._run_dispatcher() self._check_host_status(host, HostStatus.VERIFYING) self.mock_drone_manager.finish_process(_PidfileType.VERIFY) self._run_dispatcher() self._check_host_status(host, HostStatus.READY) def test_requested_reverify_failure(self): host = self._create_reverify_request() self._run_dispatcher() self.mock_drone_manager.finish_process(_PidfileType.VERIFY, exit_status=256) self._run_dispatcher() # repair self._check_host_status(host, HostStatus.REPAIRING) self.mock_drone_manager.finish_process(_PidfileType.REPAIR) self._run_dispatcher() self._check_host_status(host, HostStatus.READY) def _setup_for_do_not_verify(self): self._initialize_test() job, queue_entry = self._make_job_and_queue_entry() queue_entry.host.protection = host_protections.Protection.DO_NOT_VERIFY queue_entry.host.save() return queue_entry def test_do_not_verify_job(self): queue_entry = self._setup_for_do_not_verify() self._run_dispatcher() # runs job directly self._finish_job(queue_entry) def test_do_not_verify_job_with_cleanup(self): queue_entry = self._setup_for_do_not_verify() queue_entry.job.reboot_before = model_attributes.RebootBefore.ALWAYS queue_entry.job.save() self._run_dispatcher() # cleanup self.mock_drone_manager.finish_process(_PidfileType.CLEANUP) self._run_dispatcher() # job self._finish_job(queue_entry) def test_do_not_verify_pre_job_cleanup_failure(self): queue_entry = self._setup_for_do_not_verify() queue_entry.job.reboot_before = model_attributes.RebootBefore.ALWAYS queue_entry.job.save() self._run_dispatcher() # cleanup self.mock_drone_manager.finish_process(_PidfileType.CLEANUP, exit_status=256) self._run_dispatcher() # failure ignored; job runs self._finish_job(queue_entry) def test_do_not_verify_post_job_cleanup_failure(self): queue_entry = self._setup_for_do_not_verify() self._run_post_job_cleanup_failure_up_to_repair(queue_entry, include_verify=False) # failure ignored, host still set to Ready self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY) self._run_dispatcher() # nothing else runs self._assert_nothing_is_running() def test_do_not_verify_requested_reverify_failure(self): host = self._create_reverify_request() host.protection = host_protections.Protection.DO_NOT_VERIFY host.save() self._run_dispatcher() self.mock_drone_manager.finish_process(_PidfileType.VERIFY, exit_status=256) self._run_dispatcher() self._check_host_status(host, HostStatus.READY) # ignore failure self._assert_nothing_is_running() def test_job_abort_in_verify(self): self._initialize_test() job = self._create_job(hosts=[1]) self._run_dispatcher() # launches verify job.hostqueueentry_set.update(aborted=True) self._run_dispatcher() # kills verify, launches cleanup self.assert_(self.mock_drone_manager.was_last_process_killed( _PidfileType.VERIFY)) self.mock_drone_manager.finish_process(_PidfileType.CLEANUP) self._run_dispatcher() def test_job_abort(self): self._initialize_test() job = self._create_job(hosts=[1]) job.run_verify = False job.save() self._run_dispatcher() # launches job job.hostqueueentry_set.update(aborted=True) self._run_dispatcher() # kills job, launches gathering self.assert_(self.mock_drone_manager.was_last_process_killed( _PidfileType.JOB)) self.mock_drone_manager.finish_process(_PidfileType.GATHER) self._run_dispatcher() # launches parsing + cleanup queue_entry = job.hostqueueentry_set.all()[0] self._finish_parsing_and_cleanup(queue_entry) def test_job_abort_queued_synchronous(self): self._initialize_test() job = self._create_job(hosts=[1,2]) job.synch_count = 2 job.save() job.hostqueueentry_set.update(aborted=True) self._run_dispatcher() for host_queue_entry in job.hostqueueentry_set.all(): self.assertEqual(host_queue_entry.status, HqeStatus.ABORTED) def test_no_pidfile_leaking(self): self._initialize_test() self.test_simple_job() self.assertEquals(self.mock_drone_manager._pidfiles, {}) self.test_job_abort_in_verify() self.assertEquals(self.mock_drone_manager._pidfiles, {}) self.test_job_abort() self.assertEquals(self.mock_drone_manager._pidfiles, {}) def _make_job_and_queue_entry(self): job = self._create_job(hosts=[1]) queue_entry = job.hostqueueentry_set.all()[0] return job, queue_entry def test_recover_running_no_process(self): # recovery should re-execute a Running HQE if no process is found _, queue_entry = self._make_job_and_queue_entry() queue_entry.status = HqeStatus.RUNNING queue_entry.execution_subdir = '1-myuser/host1' queue_entry.save() queue_entry.host.status = HostStatus.RUNNING queue_entry.host.save() self._initialize_test() self._run_dispatcher() self._finish_job(queue_entry) def test_recover_verifying_hqe_no_special_task(self): # recovery should fail on a Verifing HQE with no corresponding # Verify or Cleanup SpecialTask _, queue_entry = self._make_job_and_queue_entry() queue_entry.status = HqeStatus.VERIFYING queue_entry.save() # make some dummy SpecialTasks that shouldn't count models.SpecialTask.objects.create( host=queue_entry.host, task=models.SpecialTask.Task.VERIFY, requested_by=models.User.current_user()) models.SpecialTask.objects.create( host=queue_entry.host, task=models.SpecialTask.Task.CLEANUP, queue_entry=queue_entry, is_complete=True, requested_by=models.User.current_user()) self.assertRaises(monitor_db.SchedulerError, self._initialize_test) def _test_recover_verifying_hqe_helper(self, task, pidfile_type): _, queue_entry = self._make_job_and_queue_entry() queue_entry.status = HqeStatus.VERIFYING queue_entry.save() special_task = models.SpecialTask.objects.create( host=queue_entry.host, task=task, queue_entry=queue_entry) self._initialize_test() self._run_dispatcher() self.mock_drone_manager.finish_process(pidfile_type) self._run_dispatcher() # don't bother checking the rest of the job execution, as long as the # SpecialTask ran def test_recover_verifying_hqe_with_cleanup(self): # recover an HQE that was in pre-job cleanup self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.CLEANUP, _PidfileType.CLEANUP) def test_recover_verifying_hqe_with_verify(self): # recover an HQE that was in pre-job verify self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.VERIFY, _PidfileType.VERIFY) def test_recover_pending_hqes_with_group(self): # recover a group of HQEs that are in Pending, in the same group (e.g., # in a job with atomic hosts) job = self._create_job(hosts=[1,2], atomic_group=1) job.save() job.hostqueueentry_set.all().update(status=HqeStatus.PENDING) self._initialize_test() for queue_entry in job.hostqueueentry_set.all(): self.assertEquals(queue_entry.status, HqeStatus.STARTING) def test_recover_parsing(self): self._initialize_test() job, queue_entry = self._make_job_and_queue_entry() job.run_verify = False job.reboot_after = model_attributes.RebootAfter.NEVER job.save() self._run_dispatcher() # launches job self.mock_drone_manager.finish_process(_PidfileType.JOB) self._run_dispatcher() # launches parsing # now "restart" the scheduler self._create_dispatcher() self._initialize_test() self._run_dispatcher() self.mock_drone_manager.finish_process(_PidfileType.PARSE) self._run_dispatcher() def test_recover_parsing__no_process_already_aborted(self): _, queue_entry = self._make_job_and_queue_entry() queue_entry.execution_subdir = 'host1' queue_entry.status = HqeStatus.PARSING queue_entry.aborted = True queue_entry.save() self._initialize_test() self._run_dispatcher() def test_job_scheduled_just_after_abort(self): # test a pretty obscure corner case where a job is aborted while queued, # another job is ready to run, and throttling is active. the post-abort # cleanup must not be pre-empted by the second job. job1, queue_entry1 = self._make_job_and_queue_entry() job2, queue_entry2 = self._make_job_and_queue_entry() self.mock_drone_manager.process_capacity = 0 self._run_dispatcher() # schedule job1, but won't start verify job1.hostqueueentry_set.update(aborted=True) self.mock_drone_manager.process_capacity = 100 self._run_dispatcher() # cleanup must run here, not verify for job2 self._check_statuses(queue_entry1, HqeStatus.ABORTED, HostStatus.CLEANING) self.mock_drone_manager.finish_process(_PidfileType.CLEANUP) self._run_dispatcher() # now verify starts for job2 self._check_statuses(queue_entry2, HqeStatus.VERIFYING, HostStatus.VERIFYING) def test_reverify_interrupting_pre_job(self): # ensure things behave sanely if a reverify is scheduled in the middle # of pre-job actions _, queue_entry = self._make_job_and_queue_entry() self._run_dispatcher() # pre-job verify self._create_reverify_request() self.mock_drone_manager.finish_process(_PidfileType.VERIFY, exit_status=256) self._run_dispatcher() # repair self.mock_drone_manager.finish_process(_PidfileType.REPAIR) self._run_dispatcher() # reverify runs now self.mock_drone_manager.finish_process(_PidfileType.VERIFY) self._run_dispatcher() # pre-job verify self.mock_drone_manager.finish_process(_PidfileType.VERIFY) self._run_dispatcher() # and job runs... self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING) self._finish_job(queue_entry) # reverify has been deleted self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY) self._assert_nothing_is_running() def test_reverify_while_job_running(self): # once a job is running, a reverify must not be allowed to preempt # Gathering _, queue_entry = self._make_job_and_queue_entry() self._run_pre_job_verify(queue_entry) self._run_dispatcher() # job runs self._create_reverify_request() # make job end with a signal, so gathering will run self.mock_drone_manager.finish_process(_PidfileType.JOB, exit_status=271) self._run_dispatcher() # gathering must start self.mock_drone_manager.finish_process(_PidfileType.GATHER) self._run_dispatcher() # parsing and cleanup self._finish_parsing_and_cleanup(queue_entry) self._run_dispatcher() # now reverify runs self._check_statuses(queue_entry, HqeStatus.FAILED, HostStatus.VERIFYING) self.mock_drone_manager.finish_process(_PidfileType.VERIFY) self._run_dispatcher() self._check_host_status(queue_entry.host, HostStatus.READY) def test_reverify_while_host_pending(self): # ensure that if a reverify is scheduled while a host is in Pending, it # won't run until the host is actually free job = self._create_job(hosts=[1,2]) queue_entry = job.hostqueueentry_set.get(host__hostname='host1') job.synch_count = 2 job.save() host2 = self.hosts[1] host2.locked = True host2.save() self._run_dispatcher() # verify host1 self.mock_drone_manager.finish_process(_PidfileType.VERIFY) self._run_dispatcher() # host1 Pending self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING) self._create_reverify_request() self._run_dispatcher() # nothing should happen here self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING) # now let the job run host2.locked = False host2.save() self._run_dispatcher() # verify host2 self.mock_drone_manager.finish_process(_PidfileType.VERIFY) self._run_dispatcher() # run job self._finish_job(queue_entry) # need to explicitly finish host1's post-job cleanup self.mock_drone_manager.finish_specific_process( 'hosts/host1/4-cleanup', drone_manager.AUTOSERV_PID_FILE) self._run_dispatcher() # the reverify should now be running self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.VERIFYING) self.mock_drone_manager.finish_process(_PidfileType.VERIFY) self._run_dispatcher() self._check_host_status(queue_entry.host, HostStatus.READY) def test_throttling(self): job = self._create_job(hosts=[1,2,3]) job.synch_count = 3 job.save() queue_entries = list(job.hostqueueentry_set.all()) def _check_hqe_statuses(*statuses): for queue_entry, status in zip(queue_entries, statuses): self._check_statuses(queue_entry, status) self.mock_drone_manager.process_capacity = 2 self._run_dispatcher() # verify runs on 1 and 2 _check_hqe_statuses(HqeStatus.VERIFYING, HqeStatus.VERIFYING, HqeStatus.VERIFYING) self.assertEquals(len(self.mock_drone_manager.running_pidfile_ids()), 2) self.mock_drone_manager.finish_specific_process( 'hosts/host1/1-verify', drone_manager.AUTOSERV_PID_FILE) self.mock_drone_manager.finish_process(_PidfileType.VERIFY) self._run_dispatcher() # verify runs on 3 _check_hqe_statuses(HqeStatus.PENDING, HqeStatus.PENDING, HqeStatus.VERIFYING) self.mock_drone_manager.finish_process(_PidfileType.VERIFY) self._run_dispatcher() # job won't run due to throttling _check_hqe_statuses(HqeStatus.STARTING, HqeStatus.STARTING, HqeStatus.STARTING) self._assert_nothing_is_running() self.mock_drone_manager.process_capacity = 3 self._run_dispatcher() # now job runs _check_hqe_statuses(HqeStatus.RUNNING, HqeStatus.RUNNING, HqeStatus.RUNNING) self.mock_drone_manager.process_capacity = 2 self.mock_drone_manager.finish_process(_PidfileType.JOB, exit_status=271) self._run_dispatcher() # gathering won't run due to throttling _check_hqe_statuses(HqeStatus.GATHERING, HqeStatus.GATHERING, HqeStatus.GATHERING) self._assert_nothing_is_running() self.mock_drone_manager.process_capacity = 3 self._run_dispatcher() # now gathering runs self.mock_drone_manager.process_capacity = 0 self.mock_drone_manager.finish_process(_PidfileType.GATHER) self._run_dispatcher() # parsing runs despite throttling _check_hqe_statuses(HqeStatus.PARSING, HqeStatus.PARSING, HqeStatus.PARSING) def test_abort_starting_while_throttling(self): self._initialize_test() job = self._create_job(hosts=[1,2], synchronous=True) queue_entry = job.hostqueueentry_set.all()[0] job.run_verify = False job.reboot_after = model_attributes.RebootAfter.NEVER job.save() self.mock_drone_manager.process_capacity = 0 self._run_dispatcher() # go to starting, but don't start job self._check_statuses(queue_entry, HqeStatus.STARTING, HostStatus.PENDING) job.hostqueueentry_set.update(aborted=True) self._run_dispatcher() self._check_statuses(queue_entry, HqeStatus.GATHERING, HostStatus.RUNNING) self.mock_drone_manager.process_capacity = 5 self._run_dispatcher() self._check_statuses(queue_entry, HqeStatus.ABORTED, HostStatus.CLEANING) def test_simple_atomic_group_job(self): job = self._create_job(atomic_group=1) self._run_dispatcher() # expand + verify queue_entries = job.hostqueueentry_set.all() self.assertEquals(len(queue_entries), 2) self.assertEquals(queue_entries[0].host.hostname, 'host5') self.assertEquals(queue_entries[1].host.hostname, 'host6') self.mock_drone_manager.finish_process(_PidfileType.VERIFY) self._run_dispatcher() # delay task started waiting self.mock_drone_manager.finish_specific_process( 'hosts/host6/1-verify', drone_manager.AUTOSERV_PID_FILE) self._run_dispatcher() # job starts now for entry in queue_entries: self._check_statuses(entry, HqeStatus.RUNNING, HostStatus.RUNNING) # rest of job proceeds normally def test_simple_metahost_assignment(self): job = self._create_job(metahosts=[1]) self._run_dispatcher() entry = job.hostqueueentry_set.all()[0] self.assertEquals(entry.host.hostname, 'host1') self._check_statuses(entry, HqeStatus.VERIFYING, HostStatus.VERIFYING) self.mock_drone_manager.finish_process(_PidfileType.VERIFY) self._run_dispatcher() self._check_statuses(entry, HqeStatus.RUNNING, HostStatus.RUNNING) # rest of job proceeds normally def test_metahost_fail_verify(self): self.hosts[1].labels.add(self.labels[0]) # put label1 also on host2 job = self._create_job(metahosts=[1]) self._run_dispatcher() # assigned to host1 self.mock_drone_manager.finish_process(_PidfileType.VERIFY, exit_status=256) self._run_dispatcher() # host1 failed, gets reassigned to host2 entry = job.hostqueueentry_set.all()[0] self.assertEquals(entry.host.hostname, 'host2') self._check_statuses(entry, HqeStatus.VERIFYING, HostStatus.VERIFYING) self._check_host_status(self.hosts[0], HostStatus.REPAIRING) self.mock_drone_manager.finish_process(_PidfileType.VERIFY) self._run_dispatcher() self._check_statuses(entry, HqeStatus.RUNNING, HostStatus.RUNNING) def test_hostless_job(self): job = self._create_job(hostless=True) entry = job.hostqueueentry_set.all()[0] self._run_dispatcher() self._check_entry_status(entry, HqeStatus.RUNNING) self.mock_drone_manager.finish_process(_PidfileType.JOB) self._run_dispatcher() self._check_entry_status(entry, HqeStatus.PARSING) self.mock_drone_manager.finish_process(_PidfileType.PARSE) self._run_dispatcher() self._check_entry_status(entry, HqeStatus.ARCHIVING) self.mock_drone_manager.finish_process(_PidfileType.ARCHIVE) self._run_dispatcher() self._check_entry_status(entry, HqeStatus.COMPLETED) def test_pre_job_keyvals(self): job = self._create_job(hosts=[1]) job.run_verify = False job.reboot_before = model_attributes.RebootBefore.NEVER job.save() models.JobKeyval.objects.create(job=job, key='mykey', value='myvalue') self._run_dispatcher() self._finish_job(job.hostqueueentry_set.all()[0]) attached_files = self.mock_drone_manager.attached_files( '1-autotest_system/host1') job_keyval_path = '1-autotest_system/host1/keyval' self.assert_(job_keyval_path in attached_files, attached_files) keyval_contents = attached_files[job_keyval_path] keyval_dict = dict(line.strip().split('=', 1) for line in keyval_contents.splitlines()) self.assert_('job_queued' in keyval_dict, keyval_dict) self.assertEquals(keyval_dict['mykey'], 'myvalue') if __name__ == '__main__': unittest.main()
gpl-2.0
-8,264,390,622,760,610,000
38.011142
80
0.629632
false
password123456/lotto
get_random_generate_number_sendmessage.py
1
5900
#!/usr/local/bin/python2.7 # -*- coding: utf-8 -*- __author__ = 'https://github.com/password123456/' import random import numpy as np import sys reload(sys) sys.setdefaultencoding('utf-8') import requests import urllib import urllib2 import json import datetime import time import dateutil.relativedelta as REL class bcolors: HEADER = '\033[95m' OKBLUE = '\033[94m' OKGREEN = '\033[92m' WARNING = '\033[93m' FAIL = '\033[91m' ENDC = '\033[0m' BOLD = '\033[1m' UNDERLINE = '\033[4m' def computer_random(): """let the computer create a list of 6 unique random integers from 1 to 50""" ok = False lotto_num_list = np.arange(1,45) while not ok: ci = np.random.choice(lotto_num_list,6,replace=False) tmp = np.where(ci == 0) (m, )= tmp[0].shape if(m == 0): ok = True return ci def user_random(): time_now = time.strftime('%Y-%m-%d %H:%M:%S') print "=============================" print " 로또 번호 생성기 " print "=============================" print "[+] 시작시간: %s" % time_now print "[+] 번호생성: 1~45 중 임의의 번호 6 개를 만듭니다." ok = False lotto_num_list = np.arange(1,45) while not ok: ui = np.random.choice(lotto_num_list,6,replace=False) tmp = np.where(ui == 0) (m, )= tmp[0].shape if(m == 0): ok = True return ui def match_lists(list1 , list2): """to find the number of matching items in each list use sets""" set1 = set(list1) set2 = set(list2) set3 = set1.intersection(set2) #print '컴퓨터번호-> %s | 내 번호-> %s | 일치번호 개수 %d' % (set1,set2,len(set3)) return len(set3) def calculate(): global user_list # 사용자가 6개의 번호를 뽑는다. user_list = user_random() print "[+] 생성번호: %s" % user_list global match3 global match4 global match5 global match6 match3 = 0 match4 = 0 match5 = 0 match6 = 0 # computer는 아래의 숫자만큼 번호를 다시 뽑는다. tickets_sold = 8145060 print "[+] 번호분석: 1/%d 개의 난수를 생성하여 생성된 번호와 일치할 확률을 계산" % tickets_sold for k in range(tickets_sold): comp_list = computer_random() # 뽑은번호를 서로 비교한다 matches = match_lists(comp_list, user_list) if matches == 3: match3 += 1 elif matches == 4: match4 += 1 elif matches == 5: match5 += 1 elif matches == 6: match6 += 1 def get_next_saturday(): today = datetime.date.today() rd = REL.relativedelta(days=1, weekday=REL.SA) next_saturday = today + rd return next_saturday def read_file(saved_lotto_file): f = open(saved_lotto_file, 'r') lines = f.readlines() data = '' line_count = 0 for line in lines: line_count += 1 data = data + '%s' % line f.close() return data,line_count def delete_file(saved_lotto_file): import os if os.path.isfile(saved_lotto_file): os.remove(saved_lotto_file) else: print("Error: %s file not found" % saved_lotto_file) def main(): saved_lotto_file = './lotto_numbers.txt' count = 2 games = 0 while True: calculate() print "[+] 번호선택: 생성번호가 컴퓨터가 생성한 번호와 1개 이상 같을 경우 선택함" print "[+] 분석결과" print "----------------------------" print " 1. 5등/3 개 번호일치: %d 번" % match3 print " 2. 4등/4 개 번호일치: %d 번" % match4 print " 3. 3등/5 개 번호일치: %d 번" % match5 print " 4. 1등/모두 일치: %d 번" % match6 print "----------------------------" print ">>>>>>>>>>" if (match6 >= count): games += 1 print "[+] 생성 번호: %s" % user_list print "[+] 6 개 번호가 모두 일치 하는 경우가 %d 번 탐지 / 이 번호 저장함." % (match6) print "[+] 총5 게임을 진행합니다. 현재는 %d 게임째 입니다." % games f = open(saved_lotto_file, 'a') f.write('자 동 %s\n' % (user_list)) f.close() else: print " [+] 맞는 조건이 없어 처음부터 다시 번호를 뽑습니다." print continue if games == 5: print "[+] %d 게임이 완료되어 추첨을 종료합니다." % games print "[+] 추첨된 번호는 $YOUR API 로 전송합니다." next_saturday = get_next_saturday() read_lotto_data, read_lotto_data_line_count = read_file(saved_lotto_file) game_price = 1000 total_price = game_price * read_lotto_data_line_count contents = '\ ** 언제나 좋아요 로또 **\n\n\ 「절반의 행운, 절반의 기부\n\ \t\t나눔 Lotto 6/45\n\ 추 첨 일 : %s (토)\n\ -----------------------------------\n\ %s\ -----------------------------------\n\ 총 %d 게임\n\ 금액 %d원\n\ >> 걸리면 반띵알지?' % (next_saturday,read_lotto_data,read_lotto_data_line_count,total_price) print print ">>>>>> 메시지 시작 <<<<<<<" print print contents print print ">>>>>> 메시지 끝 <<<<<<<" try: # send contents your SMS, API # to here except Exception, e: print '%s[-] Exception::%s%s' % (bcolors.WARNING, e, bcolors.ENDC) pass else: delete_file(saved_lotto_file) break if __name__ == '__main__': try: main() except KeyboardInterrupt: sys.exit(0) except Exception, e: print '%s[-] Exception::%s%s' % (bcolors.WARNING, e, bcolors.ENDC)
apache-2.0
-2,032,991,578,046,145,300
25.928571
86
0.512505
false
selvasingh/azure-sdk-for-java
eng/pipelines/scripts/generate_overview_from_readme.py
1
3812
# Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. # Use case: Given a README.md file, generate a readme_overview.html file and place it next # to the README.md. This will allow the javadocs jar step to append the contents of the # readme onto the landing page. # # This script is necessary, instead of just invoking python markdown2 directly from the # command line because the generated overview.html file needs to be enclosed inside of <body> # tags. When the attach-javadocs runs with the overview option it will append it the contents # to the overview-summary.html which is the landing page. If the <body> tags aren't in place # the page won't be formatted correctly. # # Regardless of whether or not there's a readme.md file the doc version and return to index link # will be written to overview file. If there is a readme, its contents will be added after that. import argparse from bs4 import BeautifulSoup import markdown2 import os.path from io import open import re import sys def generate_overview(readme_file, version): readme_exists = False if os.path.exists(readme_file) and os.path.isfile(readme_file): readme_exists = True else: # Not every artifact has a README.md file. If the file doesn't exist then # just output a message which will end up in the build logs. This will # allow processing to continue without failing the build the way a raise would. print('{} does not exist'.format(readme_file)) html_overview_file = str(readme_file).lower().replace('readme.md', 'readme_overview.html') if (readme_exists): with open(readme_file, 'r', encoding='utf-8') as f: readme_content = f.read() # markdown2.markdown will create html from the readme.md file. The fenced-code-blocks # extras being passed into the markdown call is necessary to deal with the embedded # code blocks within the readme so they'll displaye correctly in the html html_readme_content = markdown2.markdown(re.sub(pattern='@', repl='{@literal @}', string=readme_content, flags=re.MULTILINE), extras=["fenced-code-blocks"]) # Due to javadoc's iFrames the links need to target new tabs otherwise hilarity ensues soup = BeautifulSoup(html_readme_content, "html.parser") for a in soup.findAll('a'): a['target'] = '_blank' # The html_readme_content needs to be encapsulated inside of <body> tags in order # for the content to correctly be added to the landing page with open(html_overview_file, 'w', encoding='utf-8') as f: # The literal strings have to be unicode otherwise the write will fail. # This will allow this code to work for python 2 and 3 f.write('<body>') f.write('Current version is {}, click <a href="https://azure.github.io/azure-sdk-for-java" target="new">here</a> for the index'.format(version)) f.write('<br/>') if (readme_exists): f.write(str(soup)) f.write('</body>') def main(): parser = argparse.ArgumentParser(description='Generate a readme_overview.html from a README.md.') parser.add_argument('--readme-file', '--rf', help='path to the README.md file to readme_generate the overview.html file from.', required=True) parser.add_argument('--version', '--v', help='Version, used on the landing page to identify the version.', required=True) args = parser.parse_args() # verify the argument is a readme.md file if str(args.readme_file).lower().endswith('readme.md'): generate_overview(args.readme_file, args.version) else: raise ValueError('{} is not a readmefile. The --readme-file argument must be a readme.md file.'.format(args.readme_file)) if __name__ == '__main__': main()
mit
3,548,446,964,432,801,300
49.157895
164
0.695435
false
crustymonkey/pylibgal3
libg3/Errors.py
1
1431
# # Author: Jay Deiman # Email: admin@splitstreams.com # # This file is part of pylibgal3. # # pylibgal3 is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # pylibgal3 is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with pylibgal3. If not, see <http://www.gnu.org/licenses/>. # __all__ = ['G3Error' , 'G3RequestError' , 'G3InvalidRespError' , 'G3UnknownTypeError' , 'G3AuthError' , 'G3UnknownError'] class G3Error(Exception): pass class G3RequestError(G3Error): def __init__(self , errDict): self.errors = errDict self._message = self._getMessage() def _getMessage(self): ret = '' for e in self.errors.items(): ret += '%s: %r\n' % e return ret def __str__(self): return self._message class G3InvalidRespError(G3Error): pass class G3UnknownTypeError(G3InvalidRespError): pass class G3AuthError(G3Error): pass class G3UnknownError(G3Error): pass
gpl-3.0
4,734,498,918,985,512,000
27.058824
73
0.671558
false
mwilliamson/toodlepip
toodlepip/build.py
1
3620
from . import config, files from .consoles import Console, Command from .platforms import builders from .temp import create_temp_dir def create_builder(shell, stdout): return Builder(builders, Console(shell, stdout)) class Builder(object): def __init__(self, builders, console): self._console = console self._builders = builders def build(self, path): with create_temp_dir() as temp_dir: project_dir = temp_dir.path self._console.run_all( "Copying project", [], quiet=True, ) files.copy(path, project_dir) project_config = config.read(path) language_builder = self._builders[project_config.language](self._console) for entry in language_builder.matrix(project_config): result = self._build_entry(language_builder, project_dir, project_config, entry) if result.return_code != 0: return BuildResult(result.return_code) return BuildResult(0) def _build_entry(self, language_builder, project_dir, project_config, entry): with language_builder.create_runtime(project_dir, entry) as runtime: step_runner = StepRunner(CommandsRunner(self._console, runtime, project_dir)) return step_runner.run_steps(project_config) class StepRunner(object): def __init__(self, commands_runner): self._commands_runner = commands_runner def run_steps(self, project_config): for step_name in ["before_install", "install", "before_script"]: result = self._run_step(project_config, step_name) if result.return_code != 0: return result result = self._run_step(project_config, "script") if result.return_code == 0: after_step = "after_success" else: after_step = "after_failure" self._run_step(project_config, after_step) self._run_step(project_config, "after_script") return result def _run_step(self, project_config, step_name): step = self._step(project_config, step_name) return self._commands_runner.run_commands(step) def _step(self, project_config, name): commands = project_config.get_list(name, []) return Step(name, commands) class CommandsRunner(object): def __init__(self, console, runtime, project_dir): self._console = console self._runtime = runtime self._project_dir = project_dir def run_commands(self, step): def _runtime_run_all(description, commands): commands = map(_runtime_command, commands) return self._console.run_all( description, commands, cwd=self._project_dir ) def _runtime_command(command): before_command = self._runtime.before_step(step) if before_command: return Command.hidden_prefix(command, prefix=before_command) else: return Command.shell(command) return _runtime_run_all("Running {0} commands".format(step.name), step.commands) class Step(object): def __init__(self, name, commands): self.name = name self.commands = commands class BuildResult(object): def __init__(self, return_code): self.return_code = return_code
bsd-2-clause
-7,364,044,656,382,404,000
32.831776
96
0.572652
false
opensvn/python
pygame/bpwpapcode/Chapter06/joystickdemo.py
1
3510
import pygame from pygame.locals import * from sys import exit pygame.init() screen = pygame.display.set_mode((640, 480), 0, 32) # Get a list of joystick objects joysticks = [] for joystick_no in xrange(pygame.joystick.get_count()): stick = pygame.joystick.Joystick(joystick_no) stick.init() joysticks.append(stick) if not joysticks: print "Sorry! No joystick(s) to test." exit() active_joystick = 0 pygame.display.set_caption(joysticks[0].get_name()) def draw_axis(surface, x, y, axis_x, axis_y, size): line_col = (128, 128, 128) num_lines = 40 step = size / float(num_lines) for n in xrange(num_lines): line_col = [(192, 192, 192), (220, 220, 220)][n&1] pygame.draw.line(surface, line_col, (x+n*step, y), (x+n*step, y+size)) pygame.draw.line(surface, line_col, (x, y+n*step), (x+size, y+n*step)) pygame.draw.line(surface, (0, 0, 0), (x, y+size/2), (x+size, y+size/2)) pygame.draw.line(surface, (0, 0, 0), (x+size/2, y), (x+size/2, y+size)) draw_x = int(x + (axis_x * size + size) / 2.) draw_y = int(y + (axis_y * size + size) / 2.) draw_pos = (draw_x, draw_y) center_pos = (x+size/2, y+size/2) pygame.draw.line(surface, (0, 0, 0), center_pos, draw_pos, 5) pygame.draw.circle(surface, (0, 0, 255), draw_pos, 10) def draw_dpad(surface, x, y, axis_x, axis_y): col = (255, 0, 0) if axis_x == -1: pygame.draw.circle(surface, col, (x-20, y), 10) elif axis_x == +1: pygame.draw.circle(surface, col, (x+20, y), 10) if axis_y == -1: pygame.draw.circle(surface, col, (x, y+20), 10) elif axis_y == +1: pygame.draw.circle(surface, col, (x, y-20), 10) while True: joystick = joysticks[active_joystick] for event in pygame.event.get(): if event.type == QUIT: exit() if event.type == KEYDOWN: if event.key >= K_0 and event.key <= K_1: num = event.key - K_0 if num < len(joysticks): active_joystick = num name = joysticks[active_joystick].get_name() pygame.display.set_caption(name) # Get a list of all the axis axes = [] for axis_no in xrange(joystick.get_numaxes()): axes.append( joystick.get_axis(axis_no) ) axis_size = min(256, 640 / (joystick.get_numaxes()/2)) pygame.draw.rect(screen, (255, 255,255), (0, 0, 640, 480)) # Draw all the axes (analogue sticks) x = 0 for axis_no in xrange(0, len(axes), 2): axis_x = axes[axis_no] if axis_no+1 < len(axes): axis_y = axes[axis_no+1] else: axis_y = 0. draw_axis(screen, x, 0, axis_x, axis_y, axis_size) x += axis_size # Draw all the hats (d-pads) x, y = 50, 300 for hat_no in xrange(joystick.get_numhats()): axis_x, axis_y = joystick.get_hat(hat_no) draw_dpad(screen, x, y, axis_x, axis_y) x+= 100 #Draw all the buttons x, y = 0.0, 390.0 button_width = 640 / joystick.get_numbuttons() for button_no in xrange(joystick.get_numbuttons()): if joystick.get_button(button_no): pygame.draw.circle(screen, (0, 255, 0), (int(x), int(y)), 20) x+= button_width pygame.display.update()
gpl-2.0
2,817,038,411,529,330,000
29.909091
78
0.533903
false
cgwalters/imagefactory
imagefactory_plugins/OVA/OVA.py
1
3390
# encoding: utf-8 # Copyright 2013 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import uuid import zope import inspect from imgfac.CloudDelegate import CloudDelegate from imgfac.PersistentImageManager import PersistentImageManager from imgfac.TargetImage import TargetImage from imagefactory_plugins.ovfcommon.ovfcommon import RHEVOVFPackage, VsphereOVFPackage from imgfac.ImageFactoryException import ImageFactoryException from oz.ozutil import copyfile_sparse class OVA(object): zope.interface.implements(CloudDelegate) def __init__(self): self.log = logging.getLogger('%s.%s' % (__name__, self.__class__.__name__)) def builder_should_create_target_image(self, builder, target, image_id, template, parameters): retval = False if isinstance(builder.base_image, TargetImage): if builder.base_image.target in ('vsphere', 'rhevm'): retval = True self.log.info('builder_should_create_target_image() called on OVA plugin - returning %s' % retval) return retval def builder_did_create_target_image(self, builder, target, image_id, template, parameters): self.log.info('builder_did_create_target_image() called in OVA plugin') self.status="BUILDING" self.target_image = builder.base_image self.base_image = PersistentImageManager.default_manager().image_with_id(self.target_image.base_image_id) self.image = builder.target_image self.parameters = parameters # This lets our logging helper know what image is being operated on self.active_image = self.image self.generate_ova() self.percent_complete=100 self.status="COMPLETED" def generate_ova(self): if self.target_image.target == 'rhevm': klass = RHEVOVFPackage elif self.target_image.target == 'vsphere': klass = VsphereOVFPackage else: raise ImageFactoryException("OVA plugin only support rhevm and vsphere images") klass_parameters = dict() if self.parameters: params = ['ovf_cpu_count','ovf_memory_mb', 'rhevm_default_display_type','rhevm_description','rhevm_os_descriptor', 'vsphere_product_name','vsphere_product_vendor_name','vsphere_product_version', 'vsphere_virtual_system_type'] for param in params: if (self.parameters.get(param) and klass.__init__.func_code.co_varnames.__contains__(param)): klass_parameters[param] = self.parameters.get(param) pkg = klass(disk=self.image.data, base_image=self.base_image.data, **klass_parameters) ova = pkg.make_ova_package() copyfile_sparse(ova, self.image.data) pkg.delete()
apache-2.0
5,084,509,486,704,579,000
37.965517
113
0.666372
false
philgyford/django-ditto
setup.py
1
4093
import codecs import os import re import sys from setuptools import setup with open(os.path.join(os.path.dirname(__file__), "README.rst")) as readme: README = readme.read() # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) def read(filepath): return codecs.open(filepath, "r", "utf-8").read() def get_entity(package, entity): """ eg, get_entity('ditto', 'version') returns `__version__` value in `__init__.py`. """ init_py = open(os.path.join(package, "__init__.py")).read() find = "__%s__ = ['\"]([^'\"]+)['\"]" % entity return re.search(find, init_py).group(1) def get_version(): return get_entity("ditto", "version") def get_license(): return get_entity("ditto", "license") def get_author(): return get_entity("ditto", "author") def get_author_email(): return get_entity("ditto", "author_email") # Do `python setup.py tag` to tag with the current version number. if sys.argv[-1] == "tag": os.system("git tag -a %s -m 'version %s'" % (get_version(), get_version())) os.system("git push --tags") sys.exit() # Do `python setup.py publish` to send current version to PyPI. if sys.argv[-1] == "publish": os.system("python setup.py sdist") os.system( "twine upload --config-file=.pypirc dist/django-ditto-%s.tar.gz" % (get_version()) ) sys.exit() # Do `python setup.py testpublish` to send current version to Test PyPI. # OUT OF DATE if sys.argv[-1] == "testpublish": os.system("python setup.py sdist") os.system( "twine upload --config-file=.pypirc --repository-url https://test.pypi.org/legacy/ dist/django-ditto-%s.tar.gz" # noqa: E501 % (get_version()) ) # os.system("python setup.py bdist_wheel upload") sys.exit() dev_require = ["django-debug-toolbar>=2.0,<4.0", "flake8>=3.8,<3.9", "black==20.8b1"] tests_require = dev_require + [ "factory-boy>=2.12.0,<4.0", "freezegun>=0.3.12,<2.0", "responses>=0.10.7,<1.0", "coverage", ] setup( name="django-ditto", version=get_version(), packages=["ditto"], install_requires=[ "django-imagekit>=4.0,<4.1", "django-sortedm2m>=3.0.0,<3.1", "django-taggit>=1.2.0,<1.4", "flickrapi>=2.4,<2.5", "pillow>=7.0.0,<9.0", "pytz", "twitter-text-python>=1.1.1,<1.2", "twython>=3.7.0,<3.9", ], dependency_links=[], tests_require=tests_require, extras_require={"dev": dev_require + ["Django>=3.1,<3.3"], "test": tests_require}, include_package_data=True, license=get_license(), description=( "A Django app to copy stuff from your accounts on " "Flickr, Last.fm, Pinboard and Twitter." ), long_description=read(os.path.join(os.path.dirname(__file__), "README.rst")), url="https://github.com/philgyford/django-ditto", author=get_author(), author_email=get_author_email(), classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Framework :: Django", "Framework :: Django :: 2.2", "Framework :: Django :: 3.1", "Framework :: Django :: 3.2", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: Dynamic Content", ], keywords="ditto twitter flickr pinboard last.fm", project_urls={ "Blog posts": "https://www.gyford.com/phil/writing/tags/django-ditto/", "Bug Reports": "https://github.com/philgyford/django-ditto/issues", "Documentation": "https://django-ditto.readthedocs.io/", "Source": "https://github.com/philgyford/django-ditto", }, )
mit
-8,697,545,742,880,782,000
30.484615
133
0.596384
false
armanpazouki/chrono
src/demos/python/demo_crank_plot.py
1
5655
#------------------------------------------------------------------------------ # Name: pychrono example # Purpose: # # Author: Alessandro Tasora # # Created: 1/01/2019 # Copyright: (c) ProjectChrono 2019 #------------------------------------------------------------------------------ import pychrono.core as chrono import pychrono.irrlicht as chronoirr import matplotlib.pyplot as plt import numpy as np print ("Example: create a slider crank and plot results"); # Change this path to asset path, if running from other working dir. # It must point to the data folder, containing GUI assets (textures, fonts, meshes, etc.) chrono.SetChronoDataPath("../../../data/") # --------------------------------------------------------------------- # # Create the simulation system and add items # mysystem = chrono.ChSystemNSC() # Some data shared in the following crank_center = chrono.ChVectorD(-1,0.5,0) crank_rad = 0.4 crank_thick = 0.1 rod_length = 1.5 # Create four rigid bodies: the truss, the crank, the rod, the piston. # Create the floor truss mfloor = chrono.ChBodyEasyBox(3, 1, 3, 1000) mfloor.SetPos(chrono.ChVectorD(0,-0.5,0)) mfloor.SetBodyFixed(True) mysystem.Add(mfloor) # Create the flywheel crank mcrank = chrono.ChBodyEasyCylinder(crank_rad, crank_thick, 1000) mcrank.SetPos(crank_center + chrono.ChVectorD(0, 0, -0.1)) # Since ChBodyEasyCylinder creates a vertical (y up) cylinder, here rotate it: mcrank.SetRot(chrono.Q_ROTATE_Y_TO_Z) mysystem.Add(mcrank) # Create a stylized rod mrod = chrono.ChBodyEasyBox(rod_length, 0.1, 0.1, 1000) mrod.SetPos(crank_center + chrono.ChVectorD(crank_rad+rod_length/2 , 0, 0)) mysystem.Add(mrod) # Create a stylized piston mpiston = chrono.ChBodyEasyCylinder(0.2, 0.3, 1000) mpiston.SetPos(crank_center + chrono.ChVectorD(crank_rad+rod_length, 0, 0)) mpiston.SetRot(chrono.Q_ROTATE_Y_TO_X) mysystem.Add(mpiston) # Now create constraints and motors between the bodies. # Create crank-truss joint: a motor that spins the crank flywheel my_motor = chrono.ChLinkMotorRotationSpeed() my_motor.Initialize(mcrank, # the first connected body mfloor, # the second connected body chrono.ChFrameD(crank_center)) # where to create the motor in abs.space my_angularspeed = chrono.ChFunction_Const(chrono.CH_C_PI) # ang.speed: 180°/s my_motor.SetMotorFunction(my_angularspeed) mysystem.Add(my_motor) # Create crank-rod joint mjointA = chrono.ChLinkLockRevolute() mjointA.Initialize(mrod, mcrank, chrono.ChCoordsysD( crank_center + chrono.ChVectorD(crank_rad,0,0) )) mysystem.Add(mjointA) # Create rod-piston joint mjointB = chrono.ChLinkLockRevolute() mjointB.Initialize(mpiston, mrod, chrono.ChCoordsysD( crank_center + chrono.ChVectorD(crank_rad+rod_length,0,0) )) mysystem.Add(mjointB) # Create piston-truss joint mjointC = chrono.ChLinkLockPrismatic() mjointC.Initialize(mpiston, mfloor, chrono.ChCoordsysD( crank_center + chrono.ChVectorD(crank_rad+rod_length,0,0), chrono.Q_ROTATE_Z_TO_X) ) mysystem.Add(mjointC) # --------------------------------------------------------------------- # # Create an Irrlicht application to visualize the system # myapplication = chronoirr.ChIrrApp(mysystem, 'PyChrono example', chronoirr.dimension2du(1024,768)) myapplication.AddTypicalSky() myapplication.AddTypicalLogo(chrono.GetChronoDataPath() + 'logo_pychrono_alpha.png') myapplication.AddTypicalCamera(chronoirr.vector3df(1,1,3), chronoirr.vector3df(0,1,0)) myapplication.AddTypicalLights() # ==IMPORTANT!== Use this function for adding a ChIrrNodeAsset to all items # in the system. These ChIrrNodeAsset assets are 'proxies' to the Irrlicht meshes. # If you need a finer control on which item really needs a visualization proxy in # Irrlicht, just use application.AssetBind(myitem); on a per-item basis. myapplication.AssetBindAll(); # ==IMPORTANT!== Use this function for 'converting' into Irrlicht meshes the assets # that you added to the bodies into 3D shapes, they can be visualized by Irrlicht! myapplication.AssetUpdateAll(); # --------------------------------------------------------------------- # # Run the simulation # # Initialize these lists to store values to plot. array_time = [] array_angle = [] array_pos = [] array_speed = [] myapplication.SetTimestep(0.005) # Run the interactive simulation loop while(myapplication.GetDevice().run()): # for plotting, append instantaneous values: array_time.append(mysystem.GetChTime()) array_angle.append(my_motor.GetMotorRot()) array_pos.append(mpiston.GetPos().x) array_speed.append(mpiston.GetPos_dt().x) # here happens the visualization and step time integration myapplication.BeginScene() myapplication.DrawAll() myapplication.DoStep() myapplication.EndScene() # stop simulation after 2 seconds if mysystem.GetChTime() > 2: myapplication.GetDevice().closeDevice() # Use matplotlib to make two plots when simulation ended: fig, (ax1, ax2) = plt.subplots(2, sharex = True) ax1.plot(array_angle, array_pos) ax1.set(ylabel='position [m]') ax1.grid() ax2.plot(array_angle, array_speed, 'r--') ax2.set(ylabel='speed [m]',xlabel='angle [rad]') ax2.grid() # trick to plot \pi on x axis of plots instead of 1 2 3 4 etc. plt.xticks(np.linspace(0, 2*np.pi, 5),['0','$\pi/2$','$\pi$','$3\pi/2$','$2\pi$'])
bsd-3-clause
-6,782,592,407,969,920,000
31.125
99
0.649098
false
iakinsey/moneybot
moneybot/commands/fedspeak.py
1
1981
from moneybot.command import Command from moneybot.exc import InvalidCommand from moneybot.ledger import transfer_balance from random import choice class FedSpeak(Command): prefix = "fedspeak" description = "Speak like the federal reserve!" options = [ "The members of the Board of Governors and the Reserve Bank presidents foresee an implicit strengthening of activity after the current rebalancing is over, although the central tendency of their individual forecasts for real GDP still shows a substantial slowdown, on balance, for the year as a whole.", "I would generally expect that today in Washington DC. the probability of changes in the weather is highly uncertain, but we are monitoring the data in such a way that we will be able to update people on changes that are important.", "Clearly, sustained low inflation implies less uncertainty about the future, and lower risk premiums imply higher prices of stocks and other earning assets. We can see that in the inverse relationship exhibited by price/earnings ratios and the rate of inflation in the past. But how do we know when irrational exuberance has unduly escalated asset values, which then become subject to unexpected and prolonged contractions as they have in Japan over the past decade?", "Risk takers have been encouraged by a perceived increase in economic stability to reach out to more distant time horizons. But long periods of relative stability often engender unrealistic expectations of it[s] permanence and, at times, may lead to financial excess and economic stress.", "Modest preemptive action can obviate the need of more drastic actions at a later date and that could destabilize the economy.", "Blah blah blah blah. My words have no consequence.", "Something something bond markets. Something something interest rates. Something something print money!" ] async def default(self): return choice(self.options)
mit
1,819,040,583,107,992,300
89.045455
476
0.775871
false
Amber819/chatbot_AM
data_utils.py
1
9930
from __future__ import absolute_import import os import re import numpy as np import tensorflow as tf from six.moves import range, reduce stop_words=set(["a","an","the"]) def load_candidates(data_dir, task_id): assert task_id > 0 and task_id < 7 candidates=[] candidates_f=None candid_dic={} if task_id==6: candidates_f='dialog-babi-task6-dstc2-candidates.txt' else: candidates_f='dialog-babi-candidates.txt' with open(os.path.join(data_dir,candidates_f)) as f: for i,line in enumerate(f): candid_dic[line.strip().split(' ',1)[1]] = i line=tokenize(line.strip())[1:] candidates.append(line) # return candidates,dict((' '.join(cand),i) for i,cand in enumerate(candidates)) return candidates,candid_dic def load_dialog_task(data_dir, task_id, candid_dic, isOOV): '''Load the nth task. There are 20 tasks in total. Returns a tuple containing the training and testing data for the task. ''' assert task_id > 0 and task_id < 7 files = os.listdir(data_dir) files = [os.path.join(data_dir, f) for f in files] s = 'dialog-babi-task{}-'.format(task_id) train_file = [f for f in files if s in f and 'trn' in f][0] if isOOV: test_file = [f for f in files if s in f and 'tst-OOV' in f][0] else: test_file = [f for f in files if s in f and 'tst.' in f][0] val_file = [f for f in files if s in f and 'dev' in f][0] train_data = get_dialogs(train_file,candid_dic) test_data = get_dialogs(test_file,candid_dic) val_data = get_dialogs(val_file,candid_dic) return train_data, test_data, val_data def tokenize(sent): '''Return the tokens of a sentence including punctuation. >>> tokenize('Bob dropped the apple. Where is the apple?') ['Bob', 'dropped', 'the', 'apple', '.', 'Where', 'is', 'the', 'apple'] ''' sent=sent.lower() if sent=='<silence>': return [sent] result=[x.strip() for x in re.split('(\W+)?', sent) if x.strip() and x.strip() not in stop_words] if not result: result=['<silence>'] if result[-1]=='.' or result[-1]=='?' or result[-1]=='!': result=result[:-1] return result def parse_dialogs_per_response(lines,candid_dic): ''' Parse dialogs provided in the babi tasks format ''' data=[] context=[] u=None r=None for line in lines: line=line.strip() if line: nid, line = line.split(' ', 1) nid = int(nid) if '\t' in line: u, r = line.split('\t') # a = candid_dic[r] u = tokenize(u) r = tokenize(r) a = r # temporal encoding, and utterance/response encoding # data.append((context[:],u[:],candid_dic[' '.join(r)])) data.append((context[:], u[:], a)) # data.append((u[:], u[:], a)) context.append(u) context.append(r) else: r=tokenize(line) context.append(r) else: # clear context context=[] return data def get_dialogs(f,candid_dic): '''Given a file name, read the file, retrieve the dialogs, and then convert the sentences into a single dialog. If max_length is supplied, any stories longer than max_length tokens will be discarded. ''' with open(f) as f: return parse_dialogs_per_response(f.readlines(),candid_dic) def vectorize_candidates_sparse(candidates,word_idx): shape=(len(candidates),len(word_idx)+1) indices=[] values=[] for i,candidate in enumerate(candidates): for w in candidate: indices.append([i,word_idx[w]]) values.append(1.0) return tf.SparseTensor(indices,values,shape) def vectorize_candidates(candidates,word_idx, sentence_size): shape=(len(candidates),sentence_size) C=[] for i,candidate in enumerate(candidates): lc=max(0,sentence_size-len(candidate)) C.append([word_idx[w] if w in word_idx else 1 for w in candidate] + [0] * lc) return C def vectorize_data(data, word_idx, sentence_size, batch_size, candidates_size, max_memory_size): """ Vectorize stories and queries. If a sentence length < sentence_size, the sentence will be padded with 0's. If a story length < memory_size, the story will be padded with empty memories. Empty memories are 1-D arrays of length sentence_size filled with 0's. The answer array is returned as a one-hot encoding. """ S = [] Q = [] A = [] data.sort(key=lambda x:len(x[0]),reverse=True) for i, (story, query, answer) in enumerate(data): if i % batch_size == 0: memory_size=max(1,min(max_memory_size,len(story))) ss = [] for i, sentence in enumerate(story, 1): ls = max(0, sentence_size - len(sentence)) ss.append([word_idx[w] if w in word_idx else 0 for w in sentence] + [0] * ls) # take only the most recent sentences that fit in memory ss = ss[::-1][:memory_size][::-1] # pad to memory_size lm = max(0, memory_size - len(ss)) for _ in range(lm): ss.append([0] * sentence_size) lq = max(0, sentence_size - len(query)) q = [word_idx[w] if w in word_idx else 0 for w in query] + [0] * lq S.append(np.array(ss)) Q.append(np.array(q)) A.append(np.array(answer)) return S, Q, A def vectorize_attnNew(data, word_idx, sentence_size, candidates_size, max_memory_size): """ Vectorize stories and queries. If a sentence length < sentence_size, the sentence will be padded with 0's. If a story length < memory_size, the story will be padded with empty memories. Empty memories are 1-D arrays of length sentence_size filled with 0's. The answer array is returned as a one-hot encoding. """ S = [] Q = [] A = [] data.sort(key=lambda x:len(x[0]),reverse=True) for i, (story, query, answer) in enumerate(data): ss = [] for sentence in story[::-1]: if len(sentence) + len(ss) <= max_memory_size: ss = [word_idx[w] if w in word_idx else 1 for w in sentence] + ss else: break ss.append(3) ls = max(0, max_memory_size - len(ss)) ss = ss + [0] * ls query.append('</S>') lq = max(0, sentence_size - len(query)) q = [word_idx[w] if w in word_idx else 0 for w in query] + [0] * lq answer.append('</S>') la = max(0, candidates_size - len(answer)) a = [word_idx[w] if w in word_idx else 1 for w in answer[-candidate_sentence_size:]] + [0] * la S.append(np.array(ss)) Q.append(np.array(q)) A.append(np.array(answer)) return S, Q, A def vectorize_seq2seq(data, word_idx, sentence_size, batch_size, candidate_sentence_size): """ Vectorize stories and queries. If a sentence length < sentence_size, the sentence will be padded with 0's. If a story length < memory_size, the story will be padded with empty memories. Empty memories are 1-D arrays of length sentence_size filled with 0's. The answer array is returned as a one-hot encoding. """ S = [] A = [] data.sort(key=lambda x:len(x[0]),reverse=True) newdata = [] for i, (story, query, answer) in enumerate(data): story.append(query) ss = [] for sentence in story[::-1]: if len(sentence) + len(ss) < sentence_size: ss = [word_idx[w] if w in word_idx else 1 for w in sentence] + ss else: break newdata.append((ss, query, answer)) newdata.sort(key=lambda x: len(x[0]), reverse=True) for i, (story, query, answer) in enumerate(newdata): if i % batch_size == 0: memory_size=max(1,min(sentence_size,len(story))) # take only the most recent sentences that fit in memory ls = max(0, sentence_size - len(story)) story.append(3) story = story + [0] * ls ss = story[::-1][:memory_size][::-1] S.append(ss) answer.append('</S>') la = max(0, candidate_sentence_size - len(answer)) a = [word_idx[w] if w in word_idx else 1 for w in answer[-candidate_sentence_size:]] + [0] * la A.append(a) return S, A def vectorize_seq2seq_fix(data, word_idx, sentence_size, batch_size, candidate_sentence_size): """ Vectorize stories and queries. If a sentence length < sentence_size, the sentence will be padded with 0's. If a story length < memory_size, the story will be padded with empty memories. Empty memories are 1-D arrays of length sentence_size filled with 0's. The answer array is returned as a one-hot encoding. """ S = [] A = [] data.sort(key=lambda x:len(x[0]),reverse=True) newdata = [] for i, (story, query, answer) in enumerate(data): story.append(query) ss = [] for sentence in story[::-1]: if len(sentence) + len(ss) < sentence_size: ss = [word_idx[w] if w in word_idx else 1 for w in sentence] + ss else: break newdata.append((ss, query, answer)) newdata.sort(key=lambda x: len(x[0]), reverse=True) memory_size = sentence_size for i, (story, query, answer) in enumerate(newdata): # take only the most recent sentences that fit in memory ls = max(0, sentence_size - len(story)) story.append(3) story = story + [0] * ls S.append(story) answer.append('</S>') la = max(0, candidate_sentence_size - len(answer)) a = [word_idx[w] if w in word_idx else 1 for w in answer[-candidate_sentence_size:]] + [0] * la A.append(a) return S, A
mit
-5,978,191,130,541,190,000
33.010274
115
0.582779
false
vitek/wwwatch
wwwatch/storage.py
1
3245
import socket import json from collections import defaultdict import redis class BasicStorage(object): def __init__(self): self.counters = {} def register_counter(self, name=''): if name not in self.counters: self.counters[name] = defaultdict(int) return self.counters[name] def flush(self, counter, path, position): raise NotImplementedError def get_last_position(self): raise NotImplementedError class RedisStorage(BasicStorage): def __init__(self, hostname, port, prefix): super(RedisStorage, self).__init__() self.redis = redis.StrictRedis(hostname, port) self.prefix = prefix self.hostname = socket.gethostname() self.key_path = '{}@{}:path'.format(self.prefix, self.hostname) self.key_position = '{}@{}:position'.format(self.prefix, self.hostname) def flush_counter(self, counter, pipe, name): for key, value in counter.iteritems(): if type(value) is int: pipe.hincrby(name, key, value) elif type(value) is float: pipe.hincrbyfloat(name, key, value) else: raise ValueError("Unsupported counter type for {}".format(key)) def flush(self, path, position): with self.redis.pipeline() as pipe: for name, counter in self.counters.iteritems(): if name: key_name = '{}:{}'.format(self.prefix, name) else: key_name = self.prefix self.flush_counter(counter, pipe, key_name) self.flush_counter(counter, pipe, '{}@{}'.format(key_name, self.hostname)) pipe.set(self.key_position, position) pipe.set(self.key_path, path) pipe.execute() for counter in self.counters.itervalues(): counter.clear() def get_last_position(self): path, position = self.redis.mget(self.key_path, self.key_position) if position is not None: position = int(position) return path, position class JSONFileStorage(BasicStorage): def __init__(self, path): super(JSONFileStorage, self).__init__() self.path = path def read(self): try: with open(self.path) as fp: data = json.load(fp) except IOError: data = {} return data def write(self, data): data = json.dumps(data) with open(self.path, 'w') as fp: fp.write(data) def get_last_position(self): data = self.read() return data.get('path', None), data.get('position', 0) def flush(self, path, position): data = self.read() data['path'] = path data['position'] = position for name, counter in self.counters.iteritems(): if not name: name = 'counters' json_counters = data.setdefault(name, {}) for key, value in counter.iteritems(): json_counters[key] = json_counters.get(key, 0) + value self.write(data) for counter in self.counters.itervalues(): counter.clear()
mit
-7,546,837,987,552,991,000
31.45
79
0.561479
false
ysrc/xunfeng
vulscan/kunpeng.py
1
4090
# coding:utf-8 from ctypes import * import _ctypes import json import platform import os import urllib2 import sys from urllib import urlretrieve import zipfile class kunpeng: def __init__(self): self.kunpeng = None self.system = platform.system().lower() self.pwd = os.path.split(os.path.realpath(__file__))[0] self.suf_map = { 'windows': '.dll', 'darwin': '.dylib', 'linux': '.so' } self._load_kunpeng() def _get_lib_path(self): file_list = os.listdir(self.pwd) for v in file_list: if 'kunpeng' in v and os.path.splitext(v)[1] == self.suf_map[self.system]: return v def check_version(self): print 'check version' release = self._get_release_latest() # print(release) if release['tag_name'] != self.get_version(): print 'new version', release['tag_name'] self._down_release(release['tag_name']) return release def update_version(self, version): self.close() os.remove(self.pwd + '/' + self._get_lib_path()) save_path = self.pwd + \ '/kunpeng_{}_v{}.zip'.format(self.system, version) z_file = zipfile.ZipFile(save_path, 'r') dat = z_file.read('kunpeng_c' + self.suf_map[self.system]) print len(dat) new_lib = self.pwd + '/kunpeng_v' + version + self.suf_map[self.system] lib_f = open(new_lib,'wb') lib_f.write(dat) lib_f.close() z_file.close() print 'update success',version self._load_kunpeng() def close(self): if self.system == 'windows': _ctypes.FreeLibrary(self.kunpeng._handle) else: handle = self.kunpeng._handle del self.kunpeng _ctypes.dlclose(handle) def _down_release(self, version): print 'kunpeng update ', version save_path = self.pwd + \ '/kunpeng_{}_v{}.zip'.format(self.system, version) down_url = 'https://github.com/opensec-cn/kunpeng/releases/download/{}/kunpeng_{}_v{}.zip'.format( version, self.system.lower(), version) print 'url', down_url urlretrieve(down_url, save_path, self._callbackinfo) def _callbackinfo(self, down, block, size): per = 100.0*(down*block)/size if per > 100: per = 100 print '%.2f%%' % per def _get_release_latest(self): body = urllib2.urlopen( 'https://api.github.com/repos/opensec-cn/kunpeng/releases/latest').read() release = json.loads(body) return release def get_version(self): return self.kunpeng.GetVersion() def _load_kunpeng(self): lib_path = self._get_lib_path() # 加载动态连接库 self.kunpeng = cdll.LoadLibrary( self.pwd + '/' + lib_path) # 定义出入参变量类型 self.kunpeng.GetPlugins.restype = c_char_p self.kunpeng.Check.argtypes = [c_char_p] self.kunpeng.Check.restype = c_char_p self.kunpeng.SetConfig.argtypes = [c_char_p] self.kunpeng.GetVersion.restype = c_char_p print self.get_version() def get_plugin_list(self): result = self.kunpeng.GetPlugins() return json.loads(result) def set_config(self, timeout, pass_list): config = { 'timeout': timeout, 'pass_list': pass_list } self.kunpeng.SetConfig(json.dumps(config)) def check(self, t, netloc, kpid): task_dic = { 'type': t, 'netloc': netloc, 'target': kpid } r = json.loads(self.kunpeng.Check(json.dumps(task_dic))) result = '' if not r: return '' for v in r: result += v['remarks'] + ',' return result if __name__ == '__main__': kp = kunpeng() print(kp.pwd) print(kp._get_lib_path()) # new_release = kp.check_version() # if new_release: kp.update_version('20190225')
gpl-3.0
-7,708,567,594,290,426,000
29.059259
106
0.55101
false
widowild/messcripts
exercice/python3/chap13/base_exercice_13_23.py
1
1776
# -*- coding:Utf8 -*- # Examen de programmation Python - 6TSIb - Juin 2004 from tkinter import * class FaceDom(object): def __init__(self, can, val, pos, taille =70): self.can =can # *** x, y, c = pos[0], pos[1], taille/2 can.create_rectangle(x -c, y-c, x+c, y+c, fill ='ivory', width =2) d = taille/3 # *** self.pList =[] # *** pDispo = [((0,0),), ((-d,d),(d,-d)), ((-d,-d), (0,0), (d,d))] disp = pDispo[val -1] # *** for p in disp: self.cercle(x +p[0], y +p[1], 5, 'red') def cercle(self, x, y, r, coul): # *** self.pList.append(self.can.create_oval(x-r, y-r, x+r, y+r, fill=coul)) def effacer(self): # *** for p in self.pList: self.can.delete(p) class Projet(Frame): def __init__(self, larg, haut): Frame.__init__(self) self.larg, self.haut = larg, haut self.can = Canvas(self, bg='dark green', width =larg, height =haut) self.can.pack(padx =5, pady =5) # *** bList = [("A", self.boutA), ("B", self.boutB), ("C", self.boutC), ("D", self.boutD), ("Quitter", self.boutQuit)] for b in bList: Button(self, text =b[0], command =b[1]).pack(side =LEFT) self.pack() def boutA(self): self.d3 = FaceDom(self.can, 3, (100,100), 50) def boutB(self): self.d2 = FaceDom(self.can, 2, (200,100), 80) def boutC(self): self.d1 = FaceDom(self.can, 1, (350,100), 110) def boutD(self): # *** self.d3.effacer() def boutQuit(self): self.master.destroy() Projet(500, 300).mainloop()
gpl-3.0
-3,131,348,597,347,846,000
27.645161
78
0.469032
false
ingadhoc/demo
demo_simple/__manifest__.py
1
3001
############################################################################## # # Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar) # All Rights Reserved. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Demo Simple', 'version': '13.0.1.0.0', 'category': 'Tools', 'sequence': 14, 'summary': '', 'author': 'ADHOC SA', 'website': 'www.adhoc.com.ar', 'license': 'AGPL-3', 'images': [ ], 'depends': [ 'l10n_ar_demo', # 'l10n_ar_stock', # 'l10n_ar_account_tax_settlement', # 'l10n_ar_account_reports', 'account_accountant', # 'account_multic_fix', # 'account_debt_management', 'base_currency_inverse_rate', # 'l10n_ar_aeroo_purchase', # 'l10n_ar_aeroo_sale', # 'l10n_ar_aeroo_stock', # 'l10n_ar_aeroo_payment_group', 'l10n_ar_bank', # 'product_catalog_aeroo_report_public_categ', # 'product_price_taxes_included', 'purchase_quotation_products', 'sale_quotation_products', # 'project_description', # 'l10n_ar_website_sale', 'account_transfer_unreconcile', 'purchase_subscription', # 'payment_todopago', 'account_accountant_ux', 'account_ux', 'base_ux', 'helpdesk_timesheet_ux', 'hr_timesheet_ux', 'helpdesk_ux', # 'hr_timesheet_attendance_ux', # 'stock_ux', 'purchase_ux', 'project_ux', # 'sale_stock_ux', 'sale_ux', 'product_ux', 'sale_subscription_ux', # 'account_multicompany_ux', 'sale_timesheet_ux', # 'purchase_multic_fix', # 'sale_stock_multic_fix', 'web_decimal_numpad_dot', # 'mail_internal', # oca # 'stock_picking_invoice_link', 'mail_tracking', 'mass_editing', # 'web_advanced_search', # 'web_search_with_and', # 'stock_no_negative', # odoo modules 'stock', 'hr_attendance', 'purchase', 'project', ], 'data': [ ], 'demo': [ 'users_data.xml', ], 'test': [ ], 'installable': True, 'auto_install': False, 'application': False, }
agpl-3.0
-4,743,295,325,270,112,000
29.313131
78
0.528491
false
kellyschrock/ardupilot
Tools/autotest/apmrover2.py
1
80558
#!/usr/bin/env python # Drive APMrover2 in SITL from __future__ import print_function import os import pexpect import time from common import AutoTest from common import AutoTestTimeoutException from common import MsgRcvTimeoutException from common import NotAchievedException from common import PreconditionFailedException from pysim import util from pymavlink import mavutil # get location of scripts testdir = os.path.dirname(os.path.realpath(__file__)) SITL_START_LOCATION = mavutil.location(40.071374969556928, -105.22978898137808, 1583.702759, 246) class AutoTestRover(AutoTest): def log_name(self): return "APMrover2" def test_filepath(self): return os.path.realpath(__file__) def sitl_start_location(self): return SITL_START_LOCATION def default_frame(self): return "rover" def is_rover(self): return True def get_stick_arming_channel(self): return int(self.get_parameter("RCMAP_ROLL")) ########################################################## # TESTS DRIVE ########################################################## # Drive a square in manual mode def drive_square(self, side=50): """Drive a square, Driving N then E .""" self.context_push() ex = None try: self.progress("TEST SQUARE") self.set_parameter("RC7_OPTION", 7) self.set_parameter("RC8_OPTION", 58) self.mavproxy.send('switch 5\n') self.wait_mode('MANUAL') self.wait_ready_to_arm() self.arm_vehicle() self.clear_wp() # first aim north self.progress("\nTurn right towards north") self.reach_heading_manual(10) # save bottom left corner of box as home AND waypoint self.progress("Save HOME") self.save_wp() self.progress("Save WP") self.save_wp() # pitch forward to fly north self.progress("\nGoing north %u meters" % side) self.reach_distance_manual(side) # save top left corner of square as waypoint self.progress("Save WP") self.save_wp() # roll right to fly east self.progress("\nGoing east %u meters" % side) self.reach_heading_manual(100) self.reach_distance_manual(side) # save top right corner of square as waypoint self.progress("Save WP") self.save_wp() # pitch back to fly south self.progress("\nGoing south %u meters" % side) self.reach_heading_manual(190) self.reach_distance_manual(side) # save bottom right corner of square as waypoint self.progress("Save WP") self.save_wp() # roll left to fly west self.progress("\nGoing west %u meters" % side) self.reach_heading_manual(280) self.reach_distance_manual(side) # save bottom left corner of square (should be near home) as waypoint self.progress("Save WP") self.save_wp() self.progress("Checking number of saved waypoints") num_wp = self.save_mission_to_file( os.path.join(testdir, "rover-ch7_mission.txt")) expected = 7 # home + 6 toggled in if num_wp != expected: raise NotAchievedException("Did not get %u waypoints; got %u" % (expected, num_wp)) # TODO: actually drive the mission self.clear_wp() except Exception as e: self.progress("Caught exception: %s" % str(e)) ex = e self.disarm_vehicle() self.context_pop() if ex: raise ex def drive_left_circuit(self): """Drive a left circuit, 50m on a side.""" self.mavproxy.send('switch 6\n') self.wait_mode('MANUAL') self.set_rc(3, 2000) self.progress("Driving left circuit") # do 4 turns for i in range(0, 4): # hard left self.progress("Starting turn %u" % i) self.set_rc(1, 1000) self.wait_heading(270 - (90*i), accuracy=10) self.set_rc(1, 1500) self.progress("Starting leg %u" % i) self.wait_distance(50, accuracy=7) self.set_rc(3, 1500) self.progress("Circuit complete") # def test_throttle_failsafe(self, home, distance_min=10, side=60, # timeout=300): # """Fly east, Failsafe, return, land.""" # # self.mavproxy.send('switch 6\n') # manual mode # self.wait_mode('MANUAL') # self.mavproxy.send("param set FS_ACTION 1\n") # # # first aim east # self.progress("turn east") # if not self.reach_heading_manual(135): # return False # # # fly east 60 meters # self.progress("# Going forward %u meters" % side) # if not self.reach_distance_manual(side): # return False # # # pull throttle low # self.progress("# Enter Failsafe") # self.mavproxy.send('rc 3 900\n') # # tstart = self.get_sim_time() # success = False # while self.get_sim_time() < tstart + timeout and not success: # m = self.mav.recv_match(type='VFR_HUD', blocking=True) # pos = self.mav.location() # home_distance = self.get_distance(home, pos) # self.progress("Alt: %u HomeDistance: %.0f" % # (m.alt, home_distance)) # # check if we've reached home # if home_distance <= distance_min: # self.progress("RTL Complete") # success = True # # # reduce throttle # self.mavproxy.send('rc 3 1500\n') # self.mavproxy.expect('APM: Failsafe ended') # self.mavproxy.send('switch 2\n') # manual mode # self.wait_heartbeat() # self.wait_mode('MANUAL') # # if success: # self.progress("Reached failsafe home OK") # return True # else: # self.progress("Failed to reach Home on failsafe RTL - " # "timed out after %u seconds" % timeout) # return False def test_sprayer(self): """Test sprayer functionality.""" self.context_push() ex = None try: rc_ch = 5 pump_ch = 5 spinner_ch = 6 pump_ch_min = 1050 pump_ch_trim = 1520 pump_ch_max = 1950 spinner_ch_min = 975 spinner_ch_trim = 1510 spinner_ch_max = 1975 self.set_parameter("SPRAY_ENABLE", 1) self.set_parameter("SERVO%u_FUNCTION" % pump_ch, 22) self.set_parameter("SERVO%u_MIN" % pump_ch, pump_ch_min) self.set_parameter("SERVO%u_TRIM" % pump_ch, pump_ch_trim) self.set_parameter("SERVO%u_MAX" % pump_ch, pump_ch_max) self.set_parameter("SERVO%u_FUNCTION" % spinner_ch, 23) self.set_parameter("SERVO%u_MIN" % spinner_ch, spinner_ch_min) self.set_parameter("SERVO%u_TRIM" % spinner_ch, spinner_ch_trim) self.set_parameter("SERVO%u_MAX" % spinner_ch, spinner_ch_max) self.set_parameter("SIM_SPR_ENABLE", 1) self.fetch_parameters() self.set_parameter("SIM_SPR_PUMP", pump_ch) self.set_parameter("SIM_SPR_SPIN", spinner_ch) self.set_parameter("RC%u_OPTION" % rc_ch, 15) self.set_parameter("LOG_DISARMED", 1) self.reboot_sitl() self.wait_ready_to_arm() self.arm_vehicle() self.progress("test bootup state - it's zero-output!") self.wait_servo_channel_value(spinner_ch, 0) self.wait_servo_channel_value(pump_ch, 0) self.progress("Enable sprayer") self.set_rc(rc_ch, 2000) self.progress("Testing zero-speed state") self.wait_servo_channel_value(spinner_ch, spinner_ch_min) self.wait_servo_channel_value(pump_ch, pump_ch_min) self.progress("Testing turning it off") self.set_rc(rc_ch, 1000) self.wait_servo_channel_value(spinner_ch, spinner_ch_min) self.wait_servo_channel_value(pump_ch, pump_ch_min) self.progress("Testing turning it back on") self.set_rc(rc_ch, 2000) self.wait_servo_channel_value(spinner_ch, spinner_ch_min) self.wait_servo_channel_value(pump_ch, pump_ch_min) self.progress("Testing speed-ramping") self.set_rc(3, 1700) # start driving forward # this is somewhat empirical... self.wait_servo_channel_value(pump_ch, 1695, timeout=60) self.progress("Sprayer OK") except Exception as e: self.progress("Caught exception: %s" % str(e)) ex = e self.context_pop() self.disarm_vehicle(force=True) self.reboot_sitl() if ex: raise ex ################################################# # AUTOTEST ALL ################################################# def drive_mission(self, filename): """Drive a mission from a file.""" self.progress("Driving mission %s" % filename) self.load_mission(filename) self.wait_ready_to_arm() self.arm_vehicle() self.mavproxy.send('switch 4\n') # auto mode self.set_rc(3, 1500) self.wait_mode('AUTO') self.wait_waypoint(1, 4, max_dist=5) self.wait_mode('HOLD', timeout=300) self.disarm_vehicle() self.progress("Mission OK") def test_gripper_mission(self): self.load_mission("rover-gripper-mission.txt") self.change_mode('AUTO') self.wait_ready_to_arm() self.arm_vehicle() self.mavproxy.expect("Gripper Grabbed") self.mavproxy.expect("Gripper Released") self.wait_mode("HOLD") self.disarm_vehicle() def do_get_banner(self): self.mavproxy.send("long DO_SEND_BANNER 1\n") start = time.time() while True: m = self.mav.recv_match(type='STATUSTEXT', blocking=True, timeout=1) if m is not None and "ArduRover" in m.text: self.progress("banner received: %s" % m.text) return if time.time() - start > 10: break raise MsgRcvTimeoutException("banner not received") def drive_brake_get_stopping_distance(self, speed): # measure our stopping distance: old_cruise_speed = self.get_parameter('CRUISE_SPEED') old_accel_max = self.get_parameter('ATC_ACCEL_MAX') # controller tends not to meet cruise speed (max of ~14 when 15 # set), thus *1.2 self.set_parameter('CRUISE_SPEED', speed*1.2) # at time of writing, the vehicle is only capable of 10m/s/s accel self.set_parameter('ATC_ACCEL_MAX', 15) self.change_mode("STEERING") self.set_rc(3, 2000) self.wait_groundspeed(15, 100) initial = self.mav.location() initial_time = time.time() while time.time() - initial_time < 2: # wait for a position update from the autopilot start = self.mav.location() if start != initial: break self.set_rc(3, 1500) self.wait_groundspeed(0, 0.2) # why do we not stop?! initial = self.mav.location() initial_time = time.time() while time.time() - initial_time < 2: # wait for a position update from the autopilot stop = self.mav.location() if stop != initial: break delta = self.get_distance(start, stop) self.set_parameter('CRUISE_SPEED', old_cruise_speed) self.set_parameter('ATC_ACCEL_MAX', old_accel_max) return delta def drive_brake(self): old_using_brake = self.get_parameter('ATC_BRAKE') old_cruise_speed = self.get_parameter('CRUISE_SPEED') self.set_parameter('CRUISE_SPEED', 15) self.set_parameter('ATC_BRAKE', 0) self.arm_vehicle() distance_without_brakes = self.drive_brake_get_stopping_distance(15) # brakes on: self.set_parameter('ATC_BRAKE', 1) distance_with_brakes = self.drive_brake_get_stopping_distance(15) # revert state: self.set_parameter('ATC_BRAKE', old_using_brake) self.set_parameter('CRUISE_SPEED', old_cruise_speed) delta = distance_without_brakes - distance_with_brakes if delta < distance_without_brakes * 0.05: # 5% isn't asking for much self.disarm_vehicle() raise NotAchievedException(""" Brakes have negligible effect (with=%0.2fm without=%0.2fm delta=%0.2fm) """ % (distance_with_brakes, distance_without_brakes, delta)) self.disarm_vehicle() self.progress( "Brakes work (with=%0.2fm without=%0.2fm delta=%0.2fm)" % (distance_with_brakes, distance_without_brakes, delta)) def drive_rtl_mission_max_distance_from_home(self): '''maximum distance allowed from home at end''' return 6.5 def drive_rtl_mission(self): self.wait_ready_to_arm() self.arm_vehicle() mission_filepath = os.path.join("ArduRover-Missions", "rtl.txt") self.load_mission(mission_filepath) self.change_mode("AUTO") self.mavproxy.expect('Mission: 3 RTL') self.drain_mav(); m = self.mav.recv_match(type='NAV_CONTROLLER_OUTPUT', blocking=True, timeout=1) if m is None: raise MsgRcvTimeoutException( "Did not receive NAV_CONTROLLER_OUTPUT message") wp_dist_min = 5 if m.wp_dist < wp_dist_min: raise PreconditionFailedException( "Did not start at least %f metres from destination (is=%f)" % (wp_dist_min, m.wp_dist)) self.progress("NAV_CONTROLLER_OUTPUT.wp_dist looks good (%u >= %u)" % (m.wp_dist, wp_dist_min,)) tstart = self.get_sim_time() while True: if self.get_sim_time_cached() - tstart > 600: raise NotAchievedException("Did not get home") self.progress("Distance home: %f (mode=%s)" % (self.distance_to_home(), self.mav.flightmode)) if self.mode_is('HOLD') or self.mode_is('LOITER'): # loiter for balancebot break # the EKF doesn't pull us down to 0 speed: self.wait_groundspeed(0, 0.5, timeout=600) # current Rover blows straight past the home position and ends # up ~6m past the home point. home_distance = self.distance_to_home() home_distance_min = 5.5 home_distance_max = self.drive_rtl_mission_max_distance_from_home() if home_distance > home_distance_max: raise NotAchievedException( "Did not stop near home (%f metres distant (%f > want > %f))" % (home_distance, home_distance_min, home_distance_max)) self.disarm_vehicle() self.progress("RTL Mission OK (%fm)" % home_distance) def wait_distance_home_gt(self, distance, timeout=60): home_distance = None tstart = self.get_sim_time() while self.get_sim_time_cached() - tstart < timeout: # m = self.mav.recv_match(type='VFR_HUD', blocking=True) distance_home = self.distance_to_home(use_cached_home=True) self.progress("distance_home=%f want=%f" % (distance_home, distance)) if distance_home > distance: return self.drain_mav() raise NotAchievedException("Failed to get %fm from home (now=%f)" % (distance, home_distance)) def drive_fence_ac_avoidance(self): self.context_push() ex = None try: avoid_filepath = os.path.join(self.mission_directory(), "rover-fence-ac-avoid.txt") self.mavproxy.send("fence load %s\n" % avoid_filepath) self.mavproxy.expect("Loaded 6 geo-fence") self.set_parameter("FENCE_ENABLE", 0) self.set_parameter("PRX_TYPE", 10) self.set_parameter("RC10_OPTION", 40) # proximity-enable self.reboot_sitl() # start = self.mav.location() self.wait_ready_to_arm() self.arm_vehicle() # first make sure we can breach the fence: self.set_rc(10, 1000) self.change_mode("ACRO") self.set_rc(3, 1550) self.wait_distance_home_gt(25) self.change_mode("RTL") self.mavproxy.expect("APM: Reached destination") # now enable avoidance and make sure we can't: self.set_rc(10, 2000) self.change_mode("ACRO") self.wait_groundspeed(0, 0.7, timeout=60) # watch for speed zero self.wait_groundspeed(0, 0.2, timeout=120) except Exception as e: self.progress("Caught exception: %s" % str(e)) ex = e self.context_pop() self.mavproxy.send("fence clear\n") self.disarm_vehicle(force=True) self.reboot_sitl() if ex: raise ex def test_servorelayevents(self): self.mavproxy.send("relay set 0 0\n") off = self.get_parameter("SIM_PIN_MASK") self.mavproxy.send("relay set 0 1\n") on = self.get_parameter("SIM_PIN_MASK") if on == off: raise NotAchievedException( "Pin mask unchanged after relay cmd") self.progress("Pin mask changed after relay command") def test_setting_modes_via_mavproxy_switch(self): fnoo = [(1, 'MANUAL'), (2, 'MANUAL'), (3, 'RTL'), # (4, 'AUTO'), # no mission, can't set auto (5, 'RTL'), # non-existant mode, should stay in RTL (6, 'MANUAL')] for (num, expected) in fnoo: self.mavproxy.send('switch %u\n' % num) self.wait_mode(expected) def test_setting_modes_via_mavproxy_mode_command(self): fnoo = [(1, 'ACRO'), (3, 'STEERING'), (4, 'HOLD'), ] for (num, expected) in fnoo: self.mavproxy.send('mode manual\n') self.wait_mode("MANUAL") self.mavproxy.send('mode %u\n' % num) self.wait_mode(expected) self.mavproxy.send('mode manual\n') self.wait_mode("MANUAL") self.mavproxy.send('mode %s\n' % expected) self.wait_mode(expected) def test_setting_modes_via_modeswitch(self): # test setting of modes through mode switch self.context_push() ex = None try: self.set_parameter("MODE_CH", 8) self.set_rc(8, 1000) # mavutil.mavlink.ROVER_MODE_HOLD: self.set_parameter("MODE6", 4) # mavutil.mavlink.ROVER_MODE_ACRO self.set_parameter("MODE5", 1) self.set_rc(8, 1800) # PWM for mode6 self.wait_mode("HOLD") self.set_rc(8, 1700) # PWM for mode5 self.wait_mode("ACRO") self.set_rc(8, 1800) # PWM for mode6 self.wait_mode("HOLD") self.set_rc(8, 1700) # PWM for mode5 self.wait_mode("ACRO") except Exception as e: self.progress("Exception caught") ex = e self.context_pop() if ex is not None: raise ex def test_setting_modes_via_auxswitches(self): self.context_push() ex = None try: self.set_parameter("MODE5", 1) self.mavproxy.send('switch 1\n') # random mode self.wait_heartbeat() self.change_mode('MANUAL') self.mavproxy.send('switch 5\n') # acro mode self.wait_mode("ACRO") self.set_rc(9, 1000) self.set_rc(10, 1000) self.set_parameter("RC9_OPTION", 53) # steering self.set_parameter("RC10_OPTION", 54) # hold self.set_rc(9, 1900) self.wait_mode("STEERING") self.set_rc(10, 1900) self.wait_mode("HOLD") # reset both switches - should go back to ACRO self.set_rc(9, 1000) self.set_rc(10, 1000) self.wait_mode("ACRO") self.set_rc(9, 1900) self.wait_mode("STEERING") self.set_rc(10, 1900) self.wait_mode("HOLD") self.set_rc(10, 1000) # this re-polls the mode switch self.wait_mode("ACRO") self.set_rc(9, 1000) except Exception as e: self.progress("Exception caught") ex = e self.context_pop() if ex is not None: raise ex def test_rc_override_cancel(self): self.change_mode('MANUAL') self.wait_ready_to_arm() self.zero_throttle() self.arm_vehicle() # start moving forward a little: normal_rc_throttle = 1700 throttle_override = 1900 self.progress("Establishing baseline RC input") self.mavproxy.send('rc 3 %u\n' % normal_rc_throttle) tstart = self.get_sim_time_cached() while True: if self.get_sim_time_cached() - tstart > 10: raise AutoTestTimeoutException("Did not get rc change") m = self.mav.recv_match(type='RC_CHANNELS', blocking=True) if m.chan3_raw == normal_rc_throttle: break self.progress("Set override with RC_CHANNELS_OVERRIDE") tstart = self.get_sim_time_cached() while True: if self.get_sim_time_cached() - tstart > 10: raise AutoTestTimeoutException("Did not override") self.progress("Sending throttle of %u" % (throttle_override,)) self.mav.mav.rc_channels_override_send( 1, # target system 1, # targe component 65535, # chan1_raw 65535, # chan2_raw throttle_override, # chan3_raw 65535, # chan4_raw 65535, # chan5_raw 65535, # chan6_raw 65535, # chan7_raw 65535) # chan8_raw m = self.mav.recv_match(type='RC_CHANNELS', blocking=True) self.progress("chan3=%f want=%f" % (m.chan3_raw, throttle_override)) if m.chan3_raw == throttle_override: break self.progress("disabling override and making sure we revert to RC input in good time") tstart = self.get_sim_time_cached() while True: if self.get_sim_time_cached() - tstart > 0.5: raise AutoTestTimeoutException("Did not cancel override") self.progress("Sending cancel of throttle override") self.mav.mav.rc_channels_override_send( 1, # target system 1, # targe component 65535, # chan1_raw 65535, # chan2_raw 0, # chan3_raw 65535, # chan4_raw 65535, # chan5_raw 65535, # chan6_raw 65535, # chan7_raw 65535) # chan8_raw m = self.mav.recv_match(type='RC_CHANNELS', blocking=True) self.progress("chan3=%f want=%f" % (m.chan3_raw, normal_rc_throttle)) if m.chan3_raw == normal_rc_throttle: break self.disarm_vehicle() def test_rc_overrides(self): self.context_push() ex = None try: self.set_parameter("RC12_OPTION", 46) self.reboot_sitl() self.mavproxy.send('switch 6\n') # Manual mode self.wait_mode('MANUAL') self.wait_ready_to_arm() self.mavproxy.send('rc 3 1500\n') # throttle at zero self.arm_vehicle() # start moving forward a little: normal_rc_throttle = 1700 self.mavproxy.send('rc 3 %u\n' % normal_rc_throttle) self.wait_groundspeed(5, 100) # allow overrides: self.set_rc(12, 2000) # now override to stop: throttle_override = 1500 tstart = self.get_sim_time_cached() while True: if self.get_sim_time_cached() - tstart > 10: raise AutoTestTimeoutException("Did not reach speed") self.progress("Sending throttle of %u" % (throttle_override,)) self.mav.mav.rc_channels_override_send( 1, # target system 1, # targe component 65535, # chan1_raw 65535, # chan2_raw throttle_override, # chan3_raw 65535, # chan4_raw 65535, # chan5_raw 65535, # chan6_raw 65535, # chan7_raw 65535) # chan8_raw m = self.mav.recv_match(type='VFR_HUD', blocking=True) want_speed = 2.0 self.progress("Speed=%f want=<%f" % (m.groundspeed, want_speed)) if m.groundspeed < want_speed: break # now override to stop - but set the switch on the RC # transmitter to deny overrides; this should send the # speed back up to 5 metres/second: self.set_rc(12, 1000) throttle_override = 1500 tstart = self.get_sim_time_cached() while True: if self.get_sim_time_cached() - tstart > 10: raise AutoTestTimeoutException("Did not stop") print("Sending throttle of %u" % (throttle_override,)) self.mav.mav.rc_channels_override_send( 1, # target system 1, # targe component 65535, # chan1_raw 65535, # chan2_raw throttle_override, # chan3_raw 65535, # chan4_raw 65535, # chan5_raw 65535, # chan6_raw 65535, # chan7_raw 65535) # chan8_raw m = self.mav.recv_match(type='VFR_HUD', blocking=True) want_speed = 5.0 print("Speed=%f want=>%f" % (m.groundspeed, want_speed)) if m.groundspeed > want_speed: break # re-enable RC overrides self.set_rc(12, 2000) # check we revert to normal RC inputs when gcs overrides cease: self.progress("Waiting for RC to revert to normal RC input") while True: m = self.mav.recv_match(type='RC_CHANNELS', blocking=True) print("%s" % m) if m.chan3_raw == normal_rc_throttle: break except Exception as e: self.progress("Exception caught") ex = e self.context_pop() self.disarm_vehicle() self.reboot_sitl() if ex is not None: raise ex def test_manual_control(self): self.context_push() ex = None try: self.set_parameter("RC12_OPTION", 46) # enable/disable rc overrides self.reboot_sitl() self.change_mode("MANUAL") self.wait_ready_to_arm() self.zero_throttle() self.arm_vehicle() self.progress("start moving forward a little") normal_rc_throttle = 1700 self.mavproxy.send('rc 3 %u\n' % normal_rc_throttle) self.wait_groundspeed(5, 100) self.progress("allow overrides") self.set_rc(12, 2000) self.progress("now override to stop") throttle_override_normalized = 0 expected_throttle = 0 # in VFR_HUD tstart = self.get_sim_time_cached() while True: if self.get_sim_time_cached() - tstart > 10: raise AutoTestTimeoutException("Did not reach speed") self.progress("Sending normalized throttle of %d" % (throttle_override_normalized,)) self.mav.mav.manual_control_send( 1, # target system 32767, # x (pitch) 32767, # y (roll) throttle_override_normalized, # z (thrust) 32767, # r (yaw) 0) # button mask m = self.mav.recv_match(type='VFR_HUD', blocking=True) want_speed = 2.0 self.progress("Speed=%f want=<%f throttle=%u want=%u" % (m.groundspeed, want_speed, m.throttle, expected_throttle)) if m.groundspeed < want_speed and m.throttle == expected_throttle: break self.progress("now override to stop - but set the switch on the RC transmitter to deny overrides; this should send the speed back up to 5 metres/second") self.set_rc(12, 1000) throttle_override_normalized = 500 expected_throttle = 36 # in VFR_HUD, corresponding to normal_rc_throttle adjusted for channel min/max tstart = self.get_sim_time_cached() while True: if self.get_sim_time_cached() - tstart > 10: raise AutoTestTimeoutException("Did not stop") print("Sending normalized throttle of %u" % (throttle_override_normalized,)) self.mav.mav.manual_control_send( 1, # target system 32767, # x (pitch) 32767, # y (roll) throttle_override_normalized, # z (thrust) 32767, # r (yaw) 0) # button mask m = self.mav.recv_match(type='VFR_HUD', blocking=True) want_speed = 5.0 self.progress("Speed=%f want=>%f throttle=%u want=%u" % (m.groundspeed, want_speed, m.throttle, expected_throttle)) if m.groundspeed > want_speed and m.throttle == expected_throttle: break # re-enable RC overrides self.set_rc(12, 2000) # check we revert to normal RC inputs when gcs overrides cease: self.progress("Waiting for RC to revert to normal RC input") while True: m = self.mav.recv_match(type='RC_CHANNELS', blocking=True) print("%s" % m) if m.chan3_raw == normal_rc_throttle: break except Exception as e: self.progress("Exception caught") ex = e self.context_pop() self.disarm_vehicle() self.reboot_sitl() if ex is not None: raise ex def test_camera_mission_items(self): self.context_push() ex = None try: self.load_mission("rover-camera-mission.txt") self.wait_ready_to_arm() self.change_mode("AUTO") self.wait_ready_to_arm() self.arm_vehicle() prev_cf = None while True: cf = self.mav.recv_match(type='CAMERA_FEEDBACK', blocking=True) if prev_cf is None: prev_cf = cf continue dist_travelled = self.get_distance_int(prev_cf, cf) prev_cf = cf mc = self.mav.messages.get("MISSION_CURRENT", None) if mc is None: continue elif mc.seq == 2: expected_distance = 2 elif mc.seq == 4: expected_distance = 5 elif mc.seq == 5: break else: continue self.progress("Expected distance %f got %f" % (expected_distance, dist_travelled)) error = abs(expected_distance - dist_travelled) # Rover moves at ~5m/s; we appear to do something at # 5Hz, so we do see over a meter of error! max_error = 1.5 if error > max_error: raise NotAchievedException("Camera distance error: %f (%f)" % (error, max_error)) self.disarm_vehicle() except Exception as e: self.progress("Exception caught") ex = e self.context_pop() if ex is not None: raise ex def test_do_set_mode_via_command_long(self): self.do_set_mode_via_command_long("HOLD") self.do_set_mode_via_command_long("MANUAL") def test_mavproxy_do_set_mode_via_command_long(self): self.mavproxy_do_set_mode_via_command_long("HOLD") self.mavproxy_do_set_mode_via_command_long("MANUAL") def test_sysid_enforce(self): '''Run the same arming code with correct then incorrect SYSID''' self.context_push() ex = None try: # if set_parameter is ever changed to not use MAVProxy # this test is going to break horribly. Sorry. self.set_parameter("SYSID_MYGCS", 255) # assume MAVProxy does this! self.set_parameter("SYSID_ENFORCE", 1) # assume MAVProxy does this! self.change_mode('MANUAL') self.progress("make sure I can arm ATM") self.wait_ready_to_arm() self.arm_vehicle(timeout=5) self.disarm_vehicle() # temporarily set a different system ID than MAVProxy: self.progress("Attempting to arm vehicle myself") old_srcSystem = self.mav.mav.srcSystem try: self.mav.mav.srcSystem = 243 self.arm_vehicle(timeout=5) self.disarm_vehicle() success = False except AutoTestTimeoutException as e: success = True pass self.mav.mav.srcSystem = old_srcSystem if not success: raise NotAchievedException( "Managed to arm with SYSID_ENFORCE set") self.progress("Attempting to arm vehicle from vehicle component") old_srcSystem = self.mav.mav.srcSystem comp_arm_exception = None try: self.mav.mav.srcSystem = 1 self.arm_vehicle(timeout=5) self.disarm_vehicle() except Exception as e: comp_arm_exception = e pass self.mav.mav.srcSystem = old_srcSystem if comp_arm_exception is not None: raise comp_arm_exception except Exception as e: self.progress("Exception caught") ex = e self.context_pop() if ex is not None: raise ex def drain_mav_seconds(self, seconds): tstart = self.get_sim_time_cached() while self.get_sim_time_cached() - tstart < seconds: self.drain_mav(); self.delay_sim_time(0.5) def test_button(self): self.set_parameter("SIM_PIN_MASK", 0) self.set_parameter("BTN_ENABLE", 1) btn = 2 pin = 3 self.set_parameter("BTN_PIN%u" % btn, pin) self.drain_mav() m = self.mav.recv_match(type='BUTTON_CHANGE', blocking=True, timeout=1) self.progress("m: %s" % str(m)) if m is None: raise NotAchievedException("Did not get BUTTON_CHANGE event") mask = 1<<btn if m.state & mask: raise NotAchievedException("Bit incorrectly set in mask (got=%u dontwant=%u)" % (m.state, mask)) # SITL instantly reverts the pin to its old value m2 = self.mav.recv_match(type='BUTTON_CHANGE', blocking=True, timeout=1) self.progress("m2: %s" % str(m2)) if m2 is None: raise NotAchievedException("Did not get repeat message") # wait for messages to stop coming: self.drain_mav_seconds(15) self.set_parameter("SIM_PIN_MASK", 0) m3 = self.mav.recv_match(type='BUTTON_CHANGE', blocking=True, timeout=1) self.progress("m3: %s" % str(m3)) if m3 is None: raise NotAchievedException("Did not get new message") if m.last_change_ms == m3.last_change_ms: raise NotAchievedException("last_change_ms same as first message") if m3.state != 0: raise NotAchievedException("Didn't get expected mask back in message (mask=0 state=%u" % (m3.state)) def test_rally_points(self): self.reboot_sitl() # to ensure starting point is as expected self.load_rally("rover-test-rally.txt") accuracy = self.get_parameter("WP_RADIUS") self.wait_ready_to_arm() self.arm_vehicle() self.reach_heading_manual(10) self.reach_distance_manual(50) self.change_mode("RTL") # location copied in from rover-test-rally.txt: loc = mavutil.location(40.071553, -105.229401, 0, 0) self.wait_location(loc, accuracy=accuracy) self.disarm_vehicle() def upload_using_mission_protocol(self, mission_type, items): '''mavlink2 required''' target_system = 1 target_component = 1 self.mav.mav.mission_count_send(target_system, target_component, len(items), mission_type) tstart = self.get_sim_time_cached() remaining_to_send = set(range(0, len(items))) sent = set() while True: if self.get_sim_time_cached() - tstart > 10: raise NotAchievedException("timeout uploading %s" % str(mission_type)) if len(remaining_to_send) == 0: self.progress("All sent") break m = self.mav.recv_match(type=['MISSION_REQUEST', 'MISSION_ACK'], blocking=True, timeout=1) if m is None: continue if m.get_type() == 'MISSION_ACK': raise NotAchievedException("Received unexpected mission ack %s" % str(m)) self.progress("Handling request for item %u" % m.seq) if m.seq in sent: raise NotAchievedException("received duplicate request for item %u" % m.seq) if m.seq not in remaining_to_send: raise NotAchievedException("received request for unknown item %u" % m.seq) if m.mission_type != mission_type: raise NotAchievedException("received request for item from wrong mission type") if items[m.seq].mission_type != mission_type: raise NotAchievedException("supplied item not of correct mission type") if items[m.seq].target_system != target_system: raise NotAchievedException("supplied item not of correct target system") if items[m.seq].target_component != target_component: raise NotAchievedException("supplied item not of correct target component") if items[m.seq].seq != m.seq: raise NotAchievedException("requested item has incorrect sequence number") items[m.seq].pack(self.mav.mav) self.progress("Sending (%s)" % str(items[m.seq])) self.mav.mav.send(items[m.seq]) remaining_to_send.discard(m.seq) sent.add(m.seq) m = self.mav.recv_match(type='MISSION_ACK', blocking=True, timeout=1) if m is None: raise NotAchievedException("Did not receive MISSION_ACK") if m.mission_type != mission_type: raise NotAchievedException("Mission ack not of expected mission type") if m.type != mavutil.mavlink.MAV_MISSION_ACCEPTED: raise NotAchievedException("Mission upload failed (%u)" % m.type) self.progress("Upload succeeded") def download_using_mission_protocol(self, mission_type): '''mavlink2 required''' target_system = 1 target_component = 1 self.mav.mav.mission_request_list_send(target_system, target_component, mission_type) while True: m = self.mav.recv_match(type='MISSION_COUNT', blocking=True, timeout=1) self.progress(str(m)) if m is None: raise NotAchievedException("Did not get MISSION_COUNT response") if m.target_component != 250: continue if m.mission_type != mission_type: raise NotAchievedException("Mission count response of incorrect type") break items = [] tstart = self.get_sim_time_cached() remaining_to_receive = set(range(0, m.count)) next_to_request = 0 while True: if self.get_sim_time_cached() - tstart > 10: raise NotAchievedException("timeout downloading %s" % str(mission_type)) if len(remaining_to_receive) == 0: self.progress("All received") return items self.progress("Requesting item %u" % next_to_request) self.mav.mav.mission_request_int_send(target_system, target_component, next_to_request, mission_type) m = self.mav.recv_match(type='MISSION_ITEM_INT', blocking=True, timeout=1) if m is None: raise NotAchievedException("Did not receive MISSION_ITEM_INT") if m.mission_type != mission_type: raise NotAchievedException("Received waypoint of wrong type") if m.seq != next_to_request: raise NotAchievedException("Received waypoint is out of sequence") self.progress("Got item %u" % m.seq) items.append(m) next_to_request += 1 remaining_to_receive.discard(m.seq) def test_gcs_fence(self): self.progress("Testing FENCE_POINT protocol") self.set_parameter("FENCE_TOTAL", 1) target_system = 1 target_component = 1 lat = 1.2345 lng = 5.4321 self.mav.mav.fence_point_send(target_system, target_component, 0, 1, lat, lng) self.progress("Requesting fence return point") self.mav.mav.fence_fetch_point_send(target_system, target_component, 0) m = self.mav.recv_match(type="FENCE_POINT", blocking=True, timeout=1) print("m: %s" % str(m)) if m is None: raise NotAchievedException("Did not get fence return point back") if abs(m.lat - lat) > 0.000001: raise NotAchievedException("Did not get correct lat in fencepoint: got=%f want=%f" % (m.lat, lat)) if abs(m.lng - lng) > 0.000001: raise NotAchievedException("Did not get correct lng in fencepoint: got=%f want=%f" % (m.lng, lng)) self.progress("Now testing a different value") lat = 2.345 lng = 4.321 self.mav.mav.fence_point_send(target_system, target_component, 0, 1, lat, lng) self.progress("Requesting fence return point") self.mav.mav.fence_fetch_point_send(target_system, target_component, 0) m = self.mav.recv_match(type="FENCE_POINT", blocking=True, timeout=1) print("m: %s" % str(m)) if abs(m.lat - lat) > 0.000001: raise NotAchievedException("Did not get correct lat in fencepoint: got=%f want=%f" % (m.lat, lat)) if abs(m.lng - lng) > 0.000001: raise NotAchievedException("Did not get correct lng in fencepoint: got=%f want=%f" % (m.lng, lng)) def test_offboard(self, timeout=90): self.load_mission("rover-guided-mission.txt") self.wait_ready_to_arm(require_absolute=True) self.arm_vehicle() self.change_mode("AUTO") offboard_expected_duration = 10 # see mission file if self.mav.messages.get("SET_POSITION_TARGET_GLOBAL_INT", None): raise PreconditionFailedException("Already have SET_POSITION_TARGET_GLOBAL_INT") tstart = self.get_sim_time_cached() last_heartbeat_sent = 0 got_sptgi = False magic_waypoint_tstart = 0 magic_waypoint_tstop = 0 while True: if self.mode_is("HOLD", cached=True): break now = self.get_sim_time_cached() if now - last_heartbeat_sent > 1: last_heartbeat_sent = now self.mav.mav.heartbeat_send(mavutil.mavlink.MAV_TYPE_ONBOARD_CONTROLLER, mavutil.mavlink.MAV_AUTOPILOT_INVALID, 0, 0, 0) if now - tstart > timeout: raise AutoTestTimeoutException("Didn't complete") magic_waypoint = 3 # mc = self.mav.messages.get("MISSION_CURRENT", None) mc = self.mav.recv_match(type="MISSION_CURRENT", blocking=False) if mc is not None: print("%s" % str(mc)) if mc.seq == magic_waypoint: print("At magic waypoint") if magic_waypoint_tstart == 0: magic_waypoint_tstart = self.get_sim_time_cached() sptgi = self.mav.messages.get("SET_POSITION_TARGET_GLOBAL_INT", None) if sptgi is not None: got_sptgi = True elif mc.seq > magic_waypoint: if magic_waypoint_tstop == 0: magic_waypoint_tstop = self.get_sim_time_cached() self.disarm_vehicle() offboard_duration = magic_waypoint_tstop - magic_waypoint_tstart if abs(offboard_duration - offboard_expected_duration) > 1: raise NotAchievedException("Did not stay in offboard control for correct time (want=%f got=%f)" % (offboard_expected_duration, offboard_duration)) if not got_sptgi: raise NotAchievedException("Did not get sptgi message") print("spgti: %s" % str(sptgi)) def assert_mission_count_on_link(self, mav, expected_count, target_system, target_component, mission_type): mav.mav.mission_request_list_send(target_system, target_component, mission_type) m = mav.recv_match(type="MISSION_COUNT", blocking=True, timeout=1) if m is None: raise NotAchievedException("Did not receive MISSION_COUNT on link") if m.count != expected_count: raise NotAchievedException("Bad count received (want=%u got=%u)" % (expected_count, m.count)) def get_mission_item_on_link(self, item, mav, target_system, target_component, mission_type): mav.mav.mission_request_int_send(target_system, target_component, item, mission_type) m = mav.recv_match(type='MISSION_ITEM_INT', blocking=True, timeout=1) if m is None: raise NotAchievedException("Did not receive mission item int") if m.target_system != mav.mav.srcSystem: raise NotAchievedException("Unexpected target system %u want=%u" % (m.target_system, mav.mav.srcSystem)) if m.target_component != mav.mav.srcComponent: raise NotAchievedException("Unexpected target component %u want=%u" % (m.target_component, mav.mav.srcComponent)) return m def clear_mission(self, mission_type, target_system, target_component): self.mav.mav.mission_count_send(target_system, target_component, 0, mission_type) m = self.mav.recv_match(type='MISSION_ACK', blocking=True, timeout=1) if m is None: raise NotAchievedException("Expected ACK for clearing mission") if m.target_system != self.mav.mav.srcSystem: raise NotAchievedException("ACK not targetted at correct system want=%u got=%u" % (self.mav.mav.srcSystem, m.target_system)) if m.target_component != self.mav.mav.srcComponent: raise NotAchievedException("ACK not targetted at correct component want=%u got=%u" % (self.mav.mav.srcComponent, m.target_component)) if m.type != mavutil.mavlink.MAV_MISSION_ACCEPTED: raise NotAchievedException("Expected MAV_MISSION_ACCEPTED got %u" % (m.type)) def assert_receive_mission_item_request(self, mission_type, seq): self.progress("Expecting request for item %u" % seq) m = self.mav.recv_match(type='MISSION_REQUEST', blocking=True, timeout=1) if m is None: raise NotAchievedException("Did not get item request") if m.mission_type != mission_type: raise NotAchievedException("Incorrect mission type (wanted=%u got=%u)" % (mission_type, m.mission_type)) if m.seq != seq: raise NotAchievedException("Unexpected sequence number (want=%u got=%u)" % (seq, m.seq)) self.progress("Received item request OK") def assert_receive_mission_ack(self, mission_type, want_type=mavutil.mavlink.MAV_MISSION_ACCEPTED, target_system=None, target_component=None): if target_system is None: target_system = self.mav.mav.srcSystem if target_component is None: target_component = self.mav.mav.srcComponent self.progress("Expecting mission ack") m = self.mav.recv_match(type='MISSION_ACK', blocking=True, timeout=1) if m is None: raise NotAchievedException("Expected mission ACK") if m.target_system != target_system: raise NotAchievedException("ACK not targetted at correct system want=%u got=%u" % (self.mav.mav.srcSystem, m.target_system)) if m.target_component != target_component: raise NotAchievedException("ACK not targetted at correct component") if m.mission_type != mission_type: raise NotAchievedException("Unexpected mission type %u want=%u" % (m.mission_type, mission_type)) if m.type != want_type: raise NotAchievedException("Expected ack type got %u got %u" % (want_type, m.type)) def test_gcs_rally(self): target_system = 1 target_component = 1 self.mavproxy.send('rally clear\n') self.delay_sim_time(1) if self.get_parameter("RALLY_TOTAL") != 0: raise NotAchievedException("Failed to clear rally points") old_srcSystem = self.mav.mav.srcSystem # stop MAVProxy poking the autopilot: self.mavproxy.send('module unload rally\n') self.mavproxy.expect("Unloaded module rally") self.mavproxy.send('module unload wp\n') self.mavproxy.expect("Unloaded module wp") try: item1_lat = int(2.0000 *1e7) items = [ self.mav.mav.mission_item_int_encode( target_system, target_component, 0, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT, mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(1.0000 *1e7), # latitude int(1.0000 *1e7), # longitude 31.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_RALLY), self.mav.mav.mission_item_int_encode( target_system, target_component, 1, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT, mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 item1_lat, # latitude int(2.0000 *1e7), # longitude 32.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_RALLY), self.mav.mav.mission_item_int_encode( target_system, target_component, 2, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT, mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(3.0000 *1e7), # latitude int(3.0000 *1e7), # longitude 33.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_RALLY), ] self.upload_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, items) downloaded = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) print("Got items (%s)" % str(items)) if len(downloaded) != len(items): raise NotAchievedException("Did not download correct number of items want=%u got=%u" % (len(downloaded), len(items))) rally_total = self.get_parameter("RALLY_TOTAL") if rally_total != len(downloaded): raise NotAchievedException("Unexpected rally point count: want=%u got=%u" % (len(items), rally_total)) self.progress("Pruning count by setting parameter (urgh)") self.set_parameter("RALLY_TOTAL", 2) downloaded = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) if len(downloaded) != 2: raise NotAchievedException("Failed to prune rally points by setting parameter. want=%u got=%u" % (2, len(downloaded))) self.progress("Uploading a third item using old protocol") new_item2_lat = int(6.0 *1e7) self.set_parameter("RALLY_TOTAL", 3) self.mav.mav.rally_point_send(target_system, target_component, 2, # sequence number 3, # total count new_item2_lat, int(7.0 *1e7), 15, 0, # "break" alt?! 0, # "land dir" 0) # flags downloaded = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) if len(downloaded) != 3: raise NotAchievedException("resetting rally point count didn't change items returned") if downloaded[2].x != new_item2_lat: raise NotAchievedException("Bad lattitude in downloaded item: want=%u got=%u" % (new_item2_lat, downloaded[2].x)) self.progress("Grabbing original item 1 using original protocol") self.mav.mav.rally_fetch_point_send(target_system, target_component, 1) m = self.mav.recv_match(type="RALLY_POINT", blocking=True, timeout=1) if m.target_system != 255: raise NotAchievedException("Bad target_system on received rally point (want=%u got=%u)" % (255, m.target_system)) if m.target_component != 250: # autotest's component ID raise NotAchievedException("Bad target_component on received rally point") if m.lat != item1_lat: raise NotAchievedException("Bad latitude on received rally point") self.start_subtest("Test upload lockout and timeout") self.progress("Starting upload from normal sysid") self.mav.mav.mission_count_send(target_system, target_component, len(items), mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.drain_mav() # throw away requests for items self.mav.mav.srcSystem = 243 self.progress("Attempting upload from sysid=%u" % (self.mav.mav.srcSystem,)) self.mav.mav.mission_count_send(target_system, target_component, len(items), mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, want_type=mavutil.mavlink.MAV_MISSION_DENIED) self.progress("Attempting download from sysid=%u" % (self.mav.mav.srcSystem,)) self.mav.mav.mission_request_list_send(target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, want_type=mavutil.mavlink.MAV_MISSION_DENIED) # wait for the upload from sysid=1 to time out: self.mavproxy.expect("upload timeout") self.progress("Now trying to upload empty mission after timeout") self.mav.mav.mission_count_send(target_system, target_component, 0, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.drain_mav() self.start_subtest("Check rally upload/download across separate links") self.upload_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, items) mav2 = mavutil.mavlink_connection("tcp:localhost:5763", robust_parsing=True, source_system = 7, source_component=7) expected_count = 3 self.progress("Assert mision count on new link") self.assert_mission_count_on_link(mav2, expected_count, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.progress("Assert mission count on original link") self.assert_mission_count_on_link(self.mav, expected_count, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.progress("Get first item on new link") m2 = self.get_mission_item_on_link(2, mav2, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.progress("Get first item on original link") m = self.get_mission_item_on_link(2, self.mav, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) if m2.x != m.x: raise NotAchievedException("mission items do not match (%d vs %d)" % (m2.x, m.x)) self.start_subtest("Should enforce items come from correct GCS") self.mav.mav.mission_count_send(target_system, target_component, 1, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_receive_mission_item_request(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, 0) self.progress("Attempting to upload from bad sysid") old_sysid = self.mav.mav.srcSystem self.mav.mav.srcSystem = 17 items[0].pack(self.mav.mav) self.mav.mav.send(items[0]) self.mav.mav.srcSystem = old_sysid self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, want_type=mavutil.mavlink.MAV_MISSION_DENIED, target_system=17) self.progress("Sending from correct sysid") items[0].pack(self.mav.mav) self.mav.mav.send(items[0]) self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.drain_mav() self.drain_all_pexpects() self.start_subtest("Attempt to send item on different link to that which we are sending requests on") self.progress("Sending count") self.mav.mav.mission_count_send(target_system, target_component, 2, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_receive_mission_item_request(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, 0) old_mav2_system = mav2.mav.srcSystem old_mav2_component = mav2.mav.srcComponent mav2.mav.srcSystem = self.mav.mav.srcSystem mav2.mav.srcComponent = self.mav.mav.srcComponent self.progress("Sending item on second link") # note that the routing table in ArduPilot now will say # this sysid/compid is on both links which may cause # weirdness... items[0].pack(mav2.mav) mav2.mav.send(items[0]) mav2.mav.srcSystem = old_mav2_system mav2.mav.srcComponent = old_mav2_component # we continue to receive requests on the original link: m = self.mav.recv_match(type='MISSION_REQUEST', blocking=True, timeout=1) if m is None: raise NotAchievedException("Did not get mission request") if m.seq != 1: raise NotAchievedException("Unexpected sequence number (expected=%u got=%u)" % (1, m.seq)) items[1].pack(self.mav.mav) self.mav.mav.send(items[1]) self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.drain_mav() self.drain_all_pexpects() self.start_subtest("Upload mission and rally points at same time") self.progress("Sending rally count") self.mav.mav.mission_count_send(target_system, target_component, 3, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_receive_mission_item_request(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, 0) self.progress("Sending wp count") self.mav.mav.mission_count_send(target_system, target_component, 3, mavutil.mavlink.MAV_MISSION_TYPE_MISSION) self.assert_receive_mission_item_request(mavutil.mavlink.MAV_MISSION_TYPE_MISSION, 0) self.progress("Answering request for mission item 0") wp = self.mav.mav.mission_item_int_send( target_system, target_component, 0, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT, mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(1.1000 *1e7), # latitude int(1.2000 *1e7), # longitude 321.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_MISSION), self.assert_receive_mission_item_request(mavutil.mavlink.MAV_MISSION_TYPE_MISSION, 1) self.progress("Answering request for rally point 0") items[0].pack(self.mav.mav) self.mav.mav.send(items[0]) self.progress("Expecting request for rally item 1") self.assert_receive_mission_item_request(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, 1) self.progress("Answering request for rally point 1") items[1].pack(self.mav.mav) self.mav.mav.send(items[1]) self.progress("Expecting request for rally item 2") self.assert_receive_mission_item_request(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, 2) self.progress("Answering request for rally point 2") items[2].pack(self.mav.mav) self.mav.mav.send(items[2]) self.progress("Expecting mission ack for rally") self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.progress("Answering request for waypoints item 1") wp = self.mav.mav.mission_item_int_send( target_system, target_component, 1, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT, mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(1.1000 *1e7), # latitude int(1.2000 *1e7), # longitude 321.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_MISSION), self.assert_receive_mission_item_request(mavutil.mavlink.MAV_MISSION_TYPE_MISSION, 2) self.progress("Answering request for waypoints item 2") self.mav.mav.mission_item_int_send( target_system, target_component, 2, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT, mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(1.1000 *1e7), # latitude int(1.2000 *1e7), # longitude 321.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_MISSION), self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_MISSION) self.start_subtest("Test write-partial-list") self.progress("Clearing rally points using count-send") self.clear_mission(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, target_system, target_component) self.progress("Should not be able to set items completely past the waypoint count") self.upload_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, items) self.mav.mav.mission_write_partial_list_send( target_system, target_component, 17, 20, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, want_type=mavutil.mavlink.MAV_MISSION_ERROR) self.progress("Should not be able to set items overlapping the waypoint count") self.mav.mav.mission_write_partial_list_send( target_system, target_component, 0, 20, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, want_type=mavutil.mavlink.MAV_MISSION_ERROR) self.progress("try to overwrite items 1 and 2") self.mav.mav.mission_write_partial_list_send( target_system, target_component, 1, 2, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_receive_mission_item_request(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, 1) self.progress("Try shoving up an incorrectly sequenced item") self.mav.mav.mission_item_int_send( target_system, target_component, 0, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT, mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(1.1000 *1e7), # latitude int(1.2000 *1e7), # longitude 321.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_RALLY), self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, want_type=mavutil.mavlink.MAV_MISSION_INVALID_SEQUENCE) self.progress("Try shoving up an incorrectly sequenced item (but within band)") self.mav.mav.mission_item_int_send( target_system, target_component, 2, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT, mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(1.1000 *1e7), # latitude int(1.2000 *1e7), # longitude 321.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_RALLY), self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, want_type=mavutil.mavlink.MAV_MISSION_INVALID_SEQUENCE) self.progress("Now provide correct item") item1_latitude = int(1.2345*1e7) self.mav.mav.mission_item_int_send( target_system, target_component, 1, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT, mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 item1_latitude, # latitude int(1.2000 *1e7), # longitude 321.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_RALLY), self.assert_receive_mission_item_request(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, 2) self.progress("Answering request for rally point 2") items[2].pack(self.mav.mav) self.mav.mav.send(items[2]) self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.progress("TODO: ensure partial mission write was good") self.start_subtest("clear mission types") self.assert_mission_count_on_link(self.mav, 3, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_mission_count_on_link(self.mav, 3, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_MISSION) self.mav.mav.mission_clear_all_send(target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_mission_count_on_link(self.mav, 0, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_mission_count_on_link(self.mav, 3, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_MISSION) self.mav.mav.mission_clear_all_send(target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_MISSION) self.assert_mission_count_on_link(self.mav, 0, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_mission_count_on_link(self.mav, 0, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_MISSION) self.start_subtest("try sending out-of-range counts") self.mav.mav.mission_count_send(target_system, target_component, 1, 230) self.assert_receive_mission_ack(230, want_type=mavutil.mavlink.MAV_MISSION_UNSUPPORTED) self.mav.mav.mission_count_send(target_system, target_component, 16000, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, want_type=mavutil.mavlink.MAV_MISSION_NO_SPACE) except Exception as e: self.progress("Received exception (%s)" % self.get_exception_stacktrace(e)) self.mav.mav.srcSystem = old_srcSystem raise e self.mavproxy.send('module load rally\n') self.mavproxy.expect("Loaded module rally") self.mavproxy.send('module load wp\n') self.mavproxy.expect("Loaded module wp") def tests(self): '''return list of all tests''' ret = super(AutoTestRover, self).tests() ret.extend([ ("MAVProxy_SetModeUsingSwitch", "Set modes via mavproxy switch", self.test_setting_modes_via_mavproxy_switch), ("MAVProxy_SetModeUsingMode", "Set modes via mavproxy mode command", self.test_setting_modes_via_mavproxy_mode_command), ("ModeSwitch", "Set modes via modeswitch", self.test_setting_modes_via_modeswitch), ("AuxModeSwitch", "Set modes via auxswitches", self.test_setting_modes_via_auxswitches), ("DriveRTL", "Drive an RTL Mission", self.drive_rtl_mission), ("DriveSquare", "Learn/Drive Square with Ch7 option", self.drive_square), ("DriveMission", "Drive Mission %s" % "rover1.txt", lambda: self.drive_mission("rover1.txt")), # disabled due to frequent failures in travis. This test needs re-writing # ("Drive Brake", self.drive_brake), ("GetBanner", "Get Banner", self.do_get_banner), ("GetCapabilities", "Get Capabilities", self.do_get_autopilot_capabilities), ("DO_SET_MODE", "Set mode via MAV_COMMAND_DO_SET_MODE", self.test_do_set_mode_via_command_long), ("MAVProxy_DO_SET_MODE", "Set mode via MAV_COMMAND_DO_SET_MODE with MAVProxy", self.test_mavproxy_do_set_mode_via_command_long), ("ServoRelayEvents", "Test ServoRelayEvents", self.test_servorelayevents), ("RCOverrides", "Test RC overrides", self.test_rc_overrides), ("RCOverridesCancel", "Test RC overrides Cancel", self.test_rc_override_cancel), ("MANUAL_CONTROL", "Test mavlink MANUAL_CONTROL", self.test_manual_control), ("Sprayer", "Test Sprayer", self.test_sprayer), ("AC_Avoidance", "Test AC Avoidance switch", self.drive_fence_ac_avoidance), ("CameraMission", "Test Camera Mission Items", self.test_camera_mission_items), # Gripper test ("Gripper", "Test gripper", self.test_gripper), ("GripperMission", "Test Gripper Mission Items", self.test_gripper_mission), ("SET_MESSAGE_INTERVAL", "Test MAV_CMD_SET_MESSAGE_INTERVAL", self.test_set_message_interval), ("REQUEST_MESSAGE", "Test MAV_CMD_REQUEST_MESSAGE", self.test_request_message), ("SYSID_ENFORCE", "Test enforcement of SYSID_MYGCS", self.test_sysid_enforce), ("Button", "Test Buttons", self.test_button), ("Rally", "Test Rally Points", self.test_rally_points), ("Offboard", "Test Offboard Control", self.test_offboard), ("GCSFence", "Upload and download of fence", self.test_gcs_fence), ("GCSRally", "Upload and download of rally", self.test_gcs_rally), ("DataFlashOverMAVLink", "Test DataFlash over MAVLink", self.test_dataflash_over_mavlink), ("DownLoadLogs", "Download logs", lambda: self.log_download( self.buildlogs_path("APMrover2-log.bin"), upload_logs=len(self.fail_list) > 0)), ]) return ret def rc_defaults(self): ret = super(AutoTestRover, self).rc_defaults() ret[3] = 1500 ret[8] = 1800 return ret; def default_mode(self): return 'MANUAL'
gpl-3.0
2,298,468,115,287,737,000
41.176963
165
0.522592
false
praekelt/molo.polls
molo/polls/admin_views.py
1
1756
import csv from collections import OrderedDict from django.http.response import HttpResponse from django.shortcuts import get_object_or_404, render from django.views.generic.edit import FormView from molo.polls.models import Question class QuestionResultsAdminView(FormView): def get(self, request, *args, **kwargs): parent = kwargs['parent'] question = get_object_or_404(Question, pk=parent) data_headings = ['Submission Date', 'Answer', 'User'] data_rows = [] if hasattr(question, 'freetextquestion'): votes = question.freetextquestion.freetextvote_set.all() else: votes = question.choicevote_set.all() for vote in votes: data_rows.append(OrderedDict({ 'submission_date': vote.submission_date, 'answer': vote.answer, 'user': vote.user })) action = request.GET.get('action', None) if action == 'download': return self.send_csv(question.title, data_headings, data_rows) context = { 'page_title': question.title, 'data_headings': ['Submission Date', 'Answer', 'User'], 'data_rows': data_rows } return render(request, 'admin/question_results.html', context) def send_csv(self, question_title, data_headings, data_rows): response = HttpResponse(content_type='text/csv') response['Content-Disposition'] = \ 'attachment;filename="question-{0}-results.csv"'.format( question_title) writer = csv.writer(response) writer.writerow(data_headings) for item in data_rows: writer.writerow(item.values()) return response
bsd-2-clause
-880,368,398,644,271,100
31.518519
74
0.610478
false
liboyin/algo-prac
arrays/weighted_schedule.py
1
2080
from lib import bin_search_right, snd def search(arr): """ Given a list of tasks, represented as tuple of starting time, finishing time, and profit (non-negative). Returns the maximum profit achievable by choosing non-conflicting tasks. Solution is binary search on tasks sorted by finishing time. Time complexity is O(n\log n). Space complexity is O(n). :param arr: list[tuple[num,num,num]]. requires unique finishing time :return: num """ if not arr: return 0 a = sorted(arr, key=snd) # sort on finishing time dp = [0] # dp[i]: max profit considering a[:i]. when finished, len(dp) == n + 1 for i, x in enumerate(a): start, _, val = x j = bin_search_right(a, start, right=i, key=snd) - 1 # j: index (in arr) of the last task that finishes before the starting time of this one if j == -1: # no task finishes before the starting time of this one dp.append(max(dp[-1], val)) # carry over from the previous, or start a new sequence of tasks else: dp.append(max(dp[-1], dp[j+1] + val)) # j + 1 is the index of j in dp return dp[-1] if __name__ == '__main__': from itertools import compress, product from lib import fst, sliding_window from random import randint def control(arr): # O(n 2^n) def step(mask): a = sorted(compress(arr, mask), key=fst) # selected tasks, sorted by starting time if all(x[1] <= y[0] for x, y in sliding_window(a, 2)): return sum(x[2] for x in a) return 0 return max(step(m) for m in product(*([(0, 1)] * len(arr)))) for k, v in {((3, 10, 20), (1, 2, 50), (6, 19, 100), (10, 100, 200)): 270, ((3, 10, 20), (1, 2, 50), (6, 19, 100), (2, 100, 200)): 250}.items(): assert search(k) == v for size in range(15): a = [] for _ in range(size): start = randint(0, size) a.append((start, randint(start+1, size*2), randint(0, size*2))) assert search(a) == control(a)
gpl-3.0
-3,536,597,105,274,693,000
45.222222
116
0.574519
false
purpleidea/gedit-plugins
plugins/commander/commander/commands/method.py
1
3558
# -*- coding: utf-8 -*- # # method.py - commander # # Copyright (C) 2010 - Jesse van den Kieboom # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, # Boston, MA 02110-1301, USA. import commands.exceptions as exceptions import types import inspect import sys import utils class Method: def __init__(self, method, name, parent): self.method = method self.real_name = name self.name = name.replace('_', '-') self.parent = parent self._func_props = None def __str__(self): return self.name def autocomplete_func(self): if hasattr(self.method, 'autocomplete'): return getattr(self.method, 'autocomplete') return None def accelerator(self): if hasattr(self.method, 'accelerator'): return getattr(self.method, 'accelerator') return None def args(self): fp = self.func_props() return fp.args, fp.varargs def func_props(self): if not self._func_props: # Introspect the function arguments self._func_props = utils.getargspec(self.method) return self._func_props def commands(self): return [] def cancel(self, view): if self.parent: self.parent.cancel(view, self) def cancel_continuation(self, view): if self.parent: self.parent.continuation(view, self) def doc(self): if self.method.__doc__: return self.method.__doc__ else: return '' def oneline_doc(self): return self.doc().split("\n")[0] def execute(self, argstr, words, entry, modifier, kk = {}): fp = self.func_props() kwargs = {'argstr': argstr, 'args': words, 'entry': entry, 'view': entry.view(), 'modifier': modifier, 'window': entry.view().get_toplevel()} oargs = list(fp.args) args = [] idx = 0 if fp.defaults: numdef = len(fp.defaults) else: numdef = 0 for k in fp.args: if k in kwargs: args.append(kwargs[k]) oargs.remove(k) del kwargs[k] elif idx >= len(words): if numdef < len(oargs): raise exceptions.Execute('Invalid number of arguments (need %s)' % (oargs[0],)) else: args.append(words[idx]) oargs.remove(k) idx += 1 # Append the rest if it can handle varargs if fp.varargs and idx < len(words): args.extend(words[idx:]) if not fp.keywords: kwargs = {} for k in kk: kwargs[k] = kk[k] return self.method(*args, **kwargs) def __lt__(self, other): if isinstance(other, Method): return self.name < other.name else: return self.name < other # vi:ex:ts=4:et
gpl-2.0
-6,270,523,070,650,933,000
27.015748
149
0.577853
false
sirio81/vmcli
libguest.py
1
3653
from libvmcli import * class Guest: def __init__(self, all_opt, global_cluster_options, cluster_options): self.global_cluster_options = global_cluster_options self.cluster_options = cluster_options self.all_opt = all_opt self.opt = self.parse_opt(all_opt) self.host_name = None self.name = self.opt['name'] # Note: this will not bother migration, because 'all_opt' is going to be used to start the new process. if ',' in self.opt['vnc']: self.opt['vnc'] = self.opt['vnc'].split(',')[0] def parse_opt(self, all_opt): '''Take a string with the whole qemu commands and creates a dictionary with the option name as key. It's value will be a list because many options may be repeated (i.e. -drive).''' opt = [] opt_b = [] d = {} repeatable_options = ['drive', 'net', 'chardev', 'iscsi', 'bt'] all_opt = all_opt[1:-1] for e in all_opt.split(' -'): pair = e.split() if pair[0] in repeatable_options: opt_b.append(pair) continue elif len(pair) == 1: pair.append(None) opt.append(pair) opt = dict(opt) for c in opt_b: if c[0] not in opt: opt[c[0]] = [] opt[c[0]].append(c[1]) return opt def start(self, to_host): '''Simply starts the qemu process on the target host. No controls are made. They are demand to higher classes. Retruns ssh exit status''' out = subprocess.getstatusoutput('ssh {0} "{1} {2}"'.format(to_host, self.cluster_options['bin'], self.all_opt)) if out[0] == 0: self.host_name = to_host else: error(out[1]) return out[0] def shutdown(self): '''Shutdown the guest''' out = subprocess.getstatusoutput('ssh {0} "echo system_powerdown | socat - UNIX-CONNECT:/tmp/{1}.sock"'.format(self.host_name, self.name)) return out[0] def ssh_bridge_vnc(self): port = 5900 + int(self.opt['vnc'].replace(':','')) os.system('pkill -f --exact "ssh -fN {0} -L {1}:localhost:{1}"'.format(self.host_name, port)) os.system('ssh -fN {0} -L {1}:localhost:{1}'.format(self.host_name, port)) def show(self): vncviewer = self.global_cluster_options['vncviewer'] if self.cluster_options['vnc_over_ssh'] == 'true': self.ssh_bridge_vnc() host_name = 'localhost' else: host_name = self.host_name os.system('{} {}{} &'.format(vncviewer, host_name, self.opt['vnc'])) def kill(self): subprocess.getstatusoutput('ssh {0} \'pkill -f "name {1}"\''.format(self.host_name, self.name)) sleep(2) subprocess.getstatusoutput('ssh {0} \'pkill -9 -f "name {1}"\''.format(self.host_name, self.name)) sleep(1) return subprocess.getstatusoutput('ssh {0} \'pgrep -f "name {1}"\''.format(self.host_name, self.name))[0] def stop(self): return subprocess.getstatusoutput('ssh {0} "echo stop | socat - UNIX-CONNECT:/tmp/{1}.sock"'.format(self.host_name, self.name))[0] def cont(self): return subprocess.getstatusoutput('ssh {0} "echo cont | socat - UNIX-CONNECT:/tmp/{1}.sock"'.format(self.host_name, self.name))[0] def info(self): info = ''' host: {} vnc: {} mem: {} smp: {} ''' return info.format(self.host_name, self.opt['vnc'], self.opt['m'], self.opt['smp'])
lgpl-2.1
-349,600,212,241,541,060
39.142857
146
0.545305
false
umago/kabukiman
kabukiman/module.py
1
1400
#!/usr/bin/python # coding: utf-8 # Copyright (C) 2010 Lucas Alvares Gomes <lucasagomes@gmail.com> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, see <http://www.gnu.org/licenses/>. class Module(object): """The base class of all modules""" def _load_informations(self, module, name, module_path, author='', \ version='', website='', description=''): self._name = name self._module = module self._author = author self._version = version self._website = website self._description = description self.my_path = module_path def is_configurable(self): # To be overrided return False def look_up(self, word): # To be overrided return '' def show_config_dialog(self, parent): # To be overrided pass
gpl-2.0
-1,178,434,302,922,153,500
33.146341
72
0.661429
false
ArcherSys/ArcherSys
Lib/sre_compile.py
1
19904
# # Secret Labs' Regular Expression Engine # # convert template to internal format # # Copyright (c) 1997-2001 by Secret Labs AB. All rights reserved. # # See the sre.py file for information on usage and redistribution. # """Internal support module for sre""" import _sre import sre_parse from sre_constants import * from _sre import MAXREPEAT assert _sre.MAGIC == MAGIC, "SRE module mismatch" if _sre.CODESIZE == 2: MAXCODE = 65535 else: MAXCODE = 0xFFFFFFFF _LITERAL_CODES = set([LITERAL, NOT_LITERAL]) _REPEATING_CODES = set([REPEAT, MIN_REPEAT, MAX_REPEAT]) _SUCCESS_CODES = set([SUCCESS, FAILURE]) _ASSERT_CODES = set([ASSERT, ASSERT_NOT]) # Sets of lowercase characters which have the same uppercase. _equivalences = ( # LATIN SMALL LETTER I, LATIN SMALL LETTER DOTLESS I (0x69, 0x131), # iı # LATIN SMALL LETTER S, LATIN SMALL LETTER LONG S (0x73, 0x17f), # sſ # MICRO SIGN, GREEK SMALL LETTER MU (0xb5, 0x3bc), # µμ # COMBINING GREEK YPOGEGRAMMENI, GREEK SMALL LETTER IOTA, GREEK PROSGEGRAMMENI (0x345, 0x3b9, 0x1fbe), # \u0345ιι # GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS, GREEK SMALL LETTER IOTA WITH DIALYTIKA AND OXIA (0x390, 0x1fd3), # ΐΐ # GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS, GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND OXIA (0x3b0, 0x1fe3), # ΰΰ # GREEK SMALL LETTER BETA, GREEK BETA SYMBOL (0x3b2, 0x3d0), # βϐ # GREEK SMALL LETTER EPSILON, GREEK LUNATE EPSILON SYMBOL (0x3b5, 0x3f5), # εϵ # GREEK SMALL LETTER THETA, GREEK THETA SYMBOL (0x3b8, 0x3d1), # θϑ # GREEK SMALL LETTER KAPPA, GREEK KAPPA SYMBOL (0x3ba, 0x3f0), # κϰ # GREEK SMALL LETTER PI, GREEK PI SYMBOL (0x3c0, 0x3d6), # πϖ # GREEK SMALL LETTER RHO, GREEK RHO SYMBOL (0x3c1, 0x3f1), # ρϱ # GREEK SMALL LETTER FINAL SIGMA, GREEK SMALL LETTER SIGMA (0x3c2, 0x3c3), # ςσ # GREEK SMALL LETTER PHI, GREEK PHI SYMBOL (0x3c6, 0x3d5), # φϕ # LATIN SMALL LETTER S WITH DOT ABOVE, LATIN SMALL LETTER LONG S WITH DOT ABOVE (0x1e61, 0x1e9b), # ṡẛ # LATIN SMALL LIGATURE LONG S T, LATIN SMALL LIGATURE ST (0xfb05, 0xfb06), # ſtst ) # Maps the lowercase code to lowercase codes which have the same uppercase. _ignorecase_fixes = {i: tuple(j for j in t if i != j) for t in _equivalences for i in t} def _compile(code, pattern, flags): # internal: compile a (sub)pattern emit = code.append _len = len LITERAL_CODES = _LITERAL_CODES REPEATING_CODES = _REPEATING_CODES SUCCESS_CODES = _SUCCESS_CODES ASSERT_CODES = _ASSERT_CODES if (flags & SRE_FLAG_IGNORECASE and not (flags & SRE_FLAG_LOCALE) and flags & SRE_FLAG_UNICODE): fixes = _ignorecase_fixes else: fixes = None for op, av in pattern: if op in LITERAL_CODES: if flags & SRE_FLAG_IGNORECASE: lo = _sre.getlower(av, flags) if fixes and lo in fixes: emit(OPCODES[IN_IGNORE]) skip = _len(code); emit(0) if op is NOT_LITERAL: emit(OPCODES[NEGATE]) for k in (lo,) + fixes[lo]: emit(OPCODES[LITERAL]) emit(k) emit(OPCODES[FAILURE]) code[skip] = _len(code) - skip else: emit(OPCODES[OP_IGNORE[op]]) emit(lo) else: emit(OPCODES[op]) emit(av) elif op is IN: if flags & SRE_FLAG_IGNORECASE: emit(OPCODES[OP_IGNORE[op]]) def fixup(literal, flags=flags): return _sre.getlower(literal, flags) else: emit(OPCODES[op]) fixup = None skip = _len(code); emit(0) _compile_charset(av, flags, code, fixup, fixes) code[skip] = _len(code) - skip elif op is ANY: if flags & SRE_FLAG_DOTALL: emit(OPCODES[ANY_ALL]) else: emit(OPCODES[ANY]) elif op in REPEATING_CODES: if flags & SRE_FLAG_TEMPLATE: raise error("internal: unsupported template operator") elif _simple(av) and op is not REPEAT: if op is MAX_REPEAT: emit(OPCODES[REPEAT_ONE]) else: emit(OPCODES[MIN_REPEAT_ONE]) skip = _len(code); emit(0) emit(av[0]) emit(av[1]) _compile(code, av[2], flags) emit(OPCODES[SUCCESS]) code[skip] = _len(code) - skip else: emit(OPCODES[REPEAT]) skip = _len(code); emit(0) emit(av[0]) emit(av[1]) _compile(code, av[2], flags) code[skip] = _len(code) - skip if op is MAX_REPEAT: emit(OPCODES[MAX_UNTIL]) else: emit(OPCODES[MIN_UNTIL]) elif op is SUBPATTERN: if av[0]: emit(OPCODES[MARK]) emit((av[0]-1)*2) # _compile_info(code, av[1], flags) _compile(code, av[1], flags) if av[0]: emit(OPCODES[MARK]) emit((av[0]-1)*2+1) elif op in SUCCESS_CODES: emit(OPCODES[op]) elif op in ASSERT_CODES: emit(OPCODES[op]) skip = _len(code); emit(0) if av[0] >= 0: emit(0) # look ahead else: lo, hi = av[1].getwidth() if lo != hi: raise error("look-behind requires fixed-width pattern") emit(lo) # look behind _compile(code, av[1], flags) emit(OPCODES[SUCCESS]) code[skip] = _len(code) - skip elif op is CALL: emit(OPCODES[op]) skip = _len(code); emit(0) _compile(code, av, flags) emit(OPCODES[SUCCESS]) code[skip] = _len(code) - skip elif op is AT: emit(OPCODES[op]) if flags & SRE_FLAG_MULTILINE: av = AT_MULTILINE.get(av, av) if flags & SRE_FLAG_LOCALE: av = AT_LOCALE.get(av, av) elif flags & SRE_FLAG_UNICODE: av = AT_UNICODE.get(av, av) emit(ATCODES[av]) elif op is BRANCH: emit(OPCODES[op]) tail = [] tailappend = tail.append for av in av[1]: skip = _len(code); emit(0) # _compile_info(code, av, flags) _compile(code, av, flags) emit(OPCODES[JUMP]) tailappend(_len(code)); emit(0) code[skip] = _len(code) - skip emit(0) # end of branch for tail in tail: code[tail] = _len(code) - tail elif op is CATEGORY: emit(OPCODES[op]) if flags & SRE_FLAG_LOCALE: av = CH_LOCALE[av] elif flags & SRE_FLAG_UNICODE: av = CH_UNICODE[av] emit(CHCODES[av]) elif op is GROUPREF: if flags & SRE_FLAG_IGNORECASE: emit(OPCODES[OP_IGNORE[op]]) else: emit(OPCODES[op]) emit(av-1) elif op is GROUPREF_EXISTS: emit(OPCODES[op]) emit(av[0]-1) skipyes = _len(code); emit(0) _compile(code, av[1], flags) if av[2]: emit(OPCODES[JUMP]) skipno = _len(code); emit(0) code[skipyes] = _len(code) - skipyes + 1 _compile(code, av[2], flags) code[skipno] = _len(code) - skipno else: code[skipyes] = _len(code) - skipyes + 1 else: raise ValueError("unsupported operand type", op) def _compile_charset(charset, flags, code, fixup=None, fixes=None): # compile charset subprogram emit = code.append for op, av in _optimize_charset(charset, fixup, fixes, flags & SRE_FLAG_UNICODE): emit(OPCODES[op]) if op is NEGATE: pass elif op is LITERAL: emit(av) elif op is RANGE: emit(av[0]) emit(av[1]) elif op is CHARSET: code.extend(av) elif op is BIGCHARSET: code.extend(av) elif op is CATEGORY: if flags & SRE_FLAG_LOCALE: emit(CHCODES[CH_LOCALE[av]]) elif flags & SRE_FLAG_UNICODE: emit(CHCODES[CH_UNICODE[av]]) else: emit(CHCODES[av]) else: raise error("internal: unsupported set operator") emit(OPCODES[FAILURE]) def _optimize_charset(charset, fixup, fixes, isunicode): # internal: optimize character set out = [] tail = [] charmap = bytearray(256) for op, av in charset: while True: try: if op is LITERAL: if fixup: i = fixup(av) charmap[i] = 1 if fixes and i in fixes: for k in fixes[i]: charmap[k] = 1 else: charmap[av] = 1 elif op is RANGE: r = range(av[0], av[1]+1) if fixup: r = map(fixup, r) if fixup and fixes: for i in r: charmap[i] = 1 if i in fixes: for k in fixes[i]: charmap[k] = 1 else: for i in r: charmap[i] = 1 elif op is NEGATE: out.append((op, av)) else: tail.append((op, av)) except IndexError: if len(charmap) == 256: # character set contains non-UCS1 character codes charmap += b'\0' * 0xff00 continue # character set contains non-BMP character codes if fixup and isunicode and op is RANGE: lo, hi = av ranges = [av] # There are only two ranges of cased astral characters: # 10400-1044F (Deseret) and 118A0-118DF (Warang Citi). _fixup_range(max(0x10000, lo), min(0x11fff, hi), ranges, fixup) for lo, hi in ranges: if lo == hi: tail.append((LITERAL, hi)) else: tail.append((RANGE, (lo, hi))) else: tail.append((op, av)) break # compress character map runs = [] q = 0 while True: p = charmap.find(1, q) if p < 0: break if len(runs) >= 2: runs = None break q = charmap.find(0, p) if q < 0: runs.append((p, len(charmap))) break runs.append((p, q)) if runs is not None: # use literal/range for p, q in runs: if q - p == 1: out.append((LITERAL, p)) else: out.append((RANGE, (p, q - 1))) out += tail # if the case was changed or new representation is more compact if fixup or len(out) < len(charset): return out # else original character set is good enough return charset # use bitmap if len(charmap) == 256: data = _mk_bitmap(charmap) out.append((CHARSET, data)) out += tail return out # To represent a big charset, first a bitmap of all characters in the # set is constructed. Then, this bitmap is sliced into chunks of 256 # characters, duplicate chunks are eliminated, and each chunk is # given a number. In the compiled expression, the charset is # represented by a 32-bit word sequence, consisting of one word for # the number of different chunks, a sequence of 256 bytes (64 words) # of chunk numbers indexed by their original chunk position, and a # sequence of 256-bit chunks (8 words each). # Compression is normally good: in a typical charset, large ranges of # Unicode will be either completely excluded (e.g. if only cyrillic # letters are to be matched), or completely included (e.g. if large # subranges of Kanji match). These ranges will be represented by # chunks of all one-bits or all zero-bits. # Matching can be also done efficiently: the more significant byte of # the Unicode character is an index into the chunk number, and the # less significant byte is a bit index in the chunk (just like the # CHARSET matching). charmap = bytes(charmap) # should be hashable comps = {} mapping = bytearray(256) block = 0 data = bytearray() for i in range(0, 65536, 256): chunk = charmap[i: i + 256] if chunk in comps: mapping[i // 256] = comps[chunk] else: mapping[i // 256] = comps[chunk] = block block += 1 data += chunk data = _mk_bitmap(data) data[0:0] = [block] + _bytes_to_codes(mapping) out.append((BIGCHARSET, data)) out += tail return out def _fixup_range(lo, hi, ranges, fixup): for i in map(fixup, range(lo, hi+1)): for k, (lo, hi) in enumerate(ranges): if i < lo: if l == lo - 1: ranges[k] = (i, hi) else: ranges.insert(k, (i, i)) break elif i > hi: if i == hi + 1: ranges[k] = (lo, i) break else: break else: ranges.append((i, i)) _CODEBITS = _sre.CODESIZE * 8 _BITS_TRANS = b'0' + b'1' * 255 def _mk_bitmap(bits, _CODEBITS=_CODEBITS, _int=int): s = bits.translate(_BITS_TRANS)[::-1] return [_int(s[i - _CODEBITS: i], 2) for i in range(len(s), 0, -_CODEBITS)] def _bytes_to_codes(b): # Convert block indices to word array a = memoryview(b).cast('I') assert a.itemsize == _sre.CODESIZE assert len(a) * a.itemsize == len(b) return a.tolist() def _simple(av): # check if av is a "simple" operator lo, hi = av[2].getwidth() return lo == hi == 1 and av[2][0][0] != SUBPATTERN def _generate_overlap_table(prefix): """ Generate an overlap table for the following prefix. An overlap table is a table of the same size as the prefix which informs about the potential self-overlap for each index in the prefix: - if overlap[i] == 0, prefix[i:] can't overlap prefix[0:...] - if overlap[i] == k with 0 < k <= i, prefix[i-k+1:i+1] overlaps with prefix[0:k] """ table = [0] * len(prefix) for i in range(1, len(prefix)): idx = table[i - 1] while prefix[i] != prefix[idx]: if idx == 0: table[i] = 0 break idx = table[idx - 1] else: table[i] = idx + 1 return table def _compile_info(code, pattern, flags): # internal: compile an info block. in the current version, # this contains min/max pattern width, and an optional literal # prefix or a character map lo, hi = pattern.getwidth() if lo == 0: return # not worth it # look for a literal prefix prefix = [] prefixappend = prefix.append prefix_skip = 0 charset = [] # not used charsetappend = charset.append if not (flags & SRE_FLAG_IGNORECASE): # look for literal prefix for op, av in pattern.data: if op is LITERAL: if len(prefix) == prefix_skip: prefix_skip = prefix_skip + 1 prefixappend(av) elif op is SUBPATTERN and len(av[1]) == 1: op, av = av[1][0] if op is LITERAL: prefixappend(av) else: break else: break # if no prefix, look for charset prefix if not prefix and pattern.data: op, av = pattern.data[0] if op is SUBPATTERN and av[1]: op, av = av[1][0] if op is LITERAL: charsetappend((op, av)) elif op is BRANCH: c = [] cappend = c.append for p in av[1]: if not p: break op, av = p[0] if op is LITERAL: cappend((op, av)) else: break else: charset = c elif op is BRANCH: c = [] cappend = c.append for p in av[1]: if not p: break op, av = p[0] if op is LITERAL: cappend((op, av)) else: break else: charset = c elif op is IN: charset = av ## if prefix: ## print "*** PREFIX", prefix, prefix_skip ## if charset: ## print "*** CHARSET", charset # add an info block emit = code.append emit(OPCODES[INFO]) skip = len(code); emit(0) # literal flag mask = 0 if prefix: mask = SRE_INFO_PREFIX if len(prefix) == prefix_skip == len(pattern.data): mask = mask + SRE_INFO_LITERAL elif charset: mask = mask + SRE_INFO_CHARSET emit(mask) # pattern length if lo < MAXCODE: emit(lo) else: emit(MAXCODE) prefix = prefix[:MAXCODE] if hi < MAXCODE: emit(hi) else: emit(0) # add literal prefix if prefix: emit(len(prefix)) # length emit(prefix_skip) # skip code.extend(prefix) # generate overlap table code.extend(_generate_overlap_table(prefix)) elif charset: _compile_charset(charset, flags, code) code[skip] = len(code) - skip def isstring(obj): return isinstance(obj, (str, bytes)) def _code(p, flags): flags = p.pattern.flags | flags code = [] # compile info block _compile_info(code, p, flags) # compile the pattern _compile(code, p.data, flags) code.append(OPCODES[SUCCESS]) return code def compile(p, flags=0): # internal: convert pattern list to internal format if isstring(p): pattern = p p = sre_parse.parse(p, flags) else: pattern = None code = _code(p, flags) # print code # XXX: <fl> get rid of this limitation! if p.pattern.groups > 100: raise AssertionError( "sorry, but this version only supports 100 named groups" ) # map in either direction groupindex = p.pattern.groupdict indexgroup = [None] * p.pattern.groups for k, i in groupindex.items(): indexgroup[i] = k return _sre.compile( pattern, flags | p.pattern.flags, code, p.pattern.groups-1, groupindex, indexgroup )
mit
-6,332,745,429,824,574,000
32.50253
109
0.4963
false
vipul-tm/DAG
dags-ttpl/createPreviousState.py
1
14429
from airflow import DAG from airflow.operators import PythonOperator from airflow.hooks import RedisHook from datetime import datetime, timedelta from airflow.models import Variable from airflow.hooks import RedisHook from shutil import copyfile import logging import traceback from airflow.hooks import MemcacheHook default_args = { 'owner': 'wireless', 'depends_on_past': False, 'start_date': datetime.now() - timedelta(minutes=5), 'email': ['vipulsharma144@gmail.com'], 'email_on_failure': False, 'email_on_retry': False, 'retries': 0, 'retry_delay': timedelta(minutes=1), 'catchup': False, 'provide_context': True, # 'queue': 'bash_queue', # 'pool': 'backfill', # 'priority_weight': 10, # 'end_date': datetime(2016, 1, 1), } #redis_hook = RedisHook(redis_conn_id="redis_4") PARENT_DAG_NAME = "GETSTATES" prev_state_dag=DAG(dag_id=PARENT_DAG_NAME, default_args=default_args, schedule_interval='@once') config = eval(Variable.get('system_config')) redis_hook_5 = RedisHook(redis_conn_id="redis_hook_2") memc_con = MemcacheHook(memc_cnx_id = 'memc_cnx') vrfprv_memc_con = MemcacheHook(memc_cnx_id = 'vrfprv_memc_cnx') pub_memc_con = MemcacheHook(memc_cnx_id = 'pub_memc_cnx') def create_prev_state(**kwargs): #key = ospf1_slave_1_last_pl_info data = {} data_down = {} for conn_id in [1,2,3,4,5,6,7]: redis_hook = RedisHook(redis_conn_id="redis_prev_state_%s"%conn_id) if conn_id <= 5: for site in [1,2,3,4,5,6,7,8]: data_redis_down = redis_hook.hgetall("ospf%s_slave_%s_device_down"%(conn_id,site)) key = "ospf%s_slave_%s_down"%(conn_id,site) data_down[key] = data_redis_down elif conn_id == 6: for site in [1,2,3,4,5,6]: data_redis_prv_down = redis_hook.hgetall("vrfprv_slave_%s_device_down"%(site)) key = "ospf%s_slave_%s_down"%(conn_id,site) data_down[key] = data_redis_prv_down elif conn_id == 7: for site in [1]: data_redis_pub_down = redis_hook.hgetall("pub_slave_%s_device_down"%(site)) key = "ospf%s_slave_%s_down"%(conn_id,site) data_down[key] = data_redis_pub_down for ds in ['pl','rta']: if conn_id <= 5: for site in [1,2,3,4,5,6,7,8]: data_redis = redis_hook.hgetall("ospf%s_slave_%s_last_%s_info"%(conn_id,site,ds)) key = "ospf%s_slave_%s_%s"%(conn_id,site,ds) data[key] = data_redis elif conn_id == 6: for site in [1,2,3,4,5,6]: data_redis_prv = redis_hook.hgetall("vrfprv_slave_%s_last_%s_info"%(site,ds)) key = "ospf%s_slave_%s_%s"%(conn_id,site,ds) data[key] = data_redis_prv elif conn_id == 7: for site in [1]: data_redis_pub = redis_hook.hgetall("pub_slave_%s_last_%s_info"%(site,ds)) key = "ospf%s_slave_%s_%s"%(conn_id,site,ds) data[key] = data_redis_pub machine_state_list_pl = {} machine_state_list_rta = {} machine_state_list_down = {} host_mapping = {} ########################################################################################## logging.info("Creating IP to Host Mapping from HOST to IP mapping") ip_mapping = get_ip_host_mapping() for host_name,ip in ip_mapping.iteritems(): host_mapping[ip] = host_name logging.info("Mapping Completed for %s hosts"%len(host_mapping)) ######################################33################################################### for key in data: site_data = data.get(key) #logging.info("FOR %s is %s"%(key,len(key))) for device in site_data: host = host_mapping.get(device) if "pl" in key: machine_state_list_pl[host] = {'state':eval(site_data.get(device))[0],'since':eval(site_data.get(device))[1]} elif "rta" in key: machine_state_list_rta[host] = {'state':eval(site_data.get(device))[0],'since':eval(site_data.get(device))[1]} i=0 for key in data_down: site_data_down = data_down.get(key) #print "%s ===== %s"%(key,len(site_data_down)) #logging.info("FOR %s is %s"%(key,len(key))) for device in site_data_down: if site_data_down.get(device) != None and site_data_down.get(device) != {} : try: machine_state_list_down[device] = {'state':eval(site_data_down.get(device))[0],'since':eval(site_data_down.get(device))[1]} except Exception: pass #logging.info("Device not found in the ") #print site_data_down.get(device) #traceback.print_exc() else: logging.info("Data not present for device %s "%(host)) logging.info("Total rejected : %s"%(i)) # print data_down print len(machine_state_list_pl),len(machine_state_list_rta) main_redis_key = "all_devices_state" rta = "all_devices_state_rta" down_key = "all_devices_down_state" redis_hook_5.set(main_redis_key,str(machine_state_list_pl)) redis_hook_5.set(rta,str(machine_state_list_rta)) redis_hook_5.set(down_key,str(machine_state_list_down)) logging.info("3 keys generated in redis") def get_ip_host_mapping(): path = Variable.get("hosts_mk_path") try: host_var = load_file(path) ipaddresses = host_var.get('ipaddresses') return ipaddresses except IOError: logging.error("File Name not correct") return None except Exception: logging.error("Please check the HostMK file exists on the path provided ") return None def load_file(file_path): #Reset the global vars host_vars = { "all_hosts": [], "ipaddresses": {}, "host_attributes": {}, "host_contactgroups": [], } try: execfile(file_path, host_vars, host_vars) del host_vars['__builtins__'] except IOError, e: pass return host_vars def create_ul_issue_kpi_prev_state(): all_devices = eval(Variable.get("hostmk.dict")) services_mapping = eval(Variable.get("ul_issue_kpi_to_formula_mapping")) all_services = [] new_prev_states_dict = {} all_device_type = services_mapping.keys() for device_type in services_mapping: all_services.extend(services_mapping.get(device_type)) new_prev_states_dict["kpi_ul_prev_state_%s"%(device_type)] = {} none_count =0 for device in all_devices: hostname = device device_type = all_devices.get(device) device_dict={} if device_type in all_device_type: device_dict[hostname] = {'state':'unknown','since':'unknown'} service = services_mapping.get(device_type)[0] kpi_key = "util:%s:%s"%(hostname,service) try: old_states =memc_con.get(kpi_key) if old_states == None: old_states = vrfprv_memc_con.get(kpi_key) if old_states == None: old_states = pub_memc_con.get(kpi_key) if old_states != None: old_severity = old_states.split(",")[0] old_severity_since = old_states.split(",")[1] device_dict[hostname]= {'state':old_severity,'since':old_severity_since} new_prev_states_dict.get("kpi_ul_prev_state_%s"%(device_type))[hostname] = {'state':old_severity,'since':old_severity_since} else: #print "None for %s %s"%(kpi_key,old_states) none_count = none_count+1 except Exception,e: print "Unable to get UTIL for %s - %s"%(device_type,e) break print len(new_prev_states_dict),new_prev_states_dict.keys() count_total = 0 for d in new_prev_states_dict: print len(new_prev_states_dict.get(d)) count_total = count_total + len(new_prev_states_dict.get(d)) print "None in Memc for %s Devices Total States Found %s"%(none_count,count_total) for key in new_prev_states_dict.keys(): try: redis_hook_5.set(key,str(new_prev_states_dict.get(key))) logging.info("Setting for Key %s is successful"%(key)) except Exception: logging.error("Unable to add %s key in redis"%(key)) def create_provis_kpi_prev_state(): all_devices = eval(Variable.get("hostmk.dict")) services_mapping = eval(Variable.get("provision_kpi_to_formula_mapping")) all_services = [] new_prev_states_dict = {} all_device_type = services_mapping.keys() for device_type in services_mapping: all_services.extend(services_mapping.get(device_type)) new_prev_states_dict["kpi_provis_prev_state_%s"%(device_type)] = {} none_count =0 for device in all_devices: hostname = device device_type = all_devices.get(device) device_dict={} if device_type in all_device_type: device_dict[hostname] = {'state':'unknown','since':'unknown'} service = services_mapping.get(device_type)[0] kpi_key = "util:%s:%s"%(hostname,service) try: old_states =memc_con.get(kpi_key) if old_states == None: old_states = vrfprv_memc_con.get(kpi_key) if old_states == None: old_states = pub_memc_con.get(kpi_key) if old_states != None: old_severity = old_states.split(",")[0] old_severity_since = old_states.split(",")[1] device_dict[hostname]= {'state':old_severity,'since':old_severity_since} new_prev_states_dict.get("kpi_provis_prev_state_%s"%(device_type))[hostname]={'state':old_severity,'since':old_severity_since} else: #print "None for %s %s"%(kpi_key,old_states) none_count = none_count+1 except Exception,e: print "Unable to get UTIL for %s - %s"%(device_type,e) break print len(new_prev_states_dict),new_prev_states_dict.keys() count_total = 0 for d in new_prev_states_dict: print len(new_prev_states_dict.get(d)) count_total = count_total + len(new_prev_states_dict.get(d)) print "None in Memc for %s Devices Total States Found %s"%(none_count,count_total) for key in new_prev_states_dict.keys(): try: redis_hook_5.set(key,str(new_prev_states_dict.get(key))) logging.info("Setting for Key %s is successful"%(key)) except Exception: logging.error("Unable to add %s key in redis"%(key)) def create_utilization_kpi_prev_state(): all_devices = eval(Variable.get("hostmk.dict")) services_mapping = eval(Variable.get("utilization_kpi_service_mapping")) #services_mapping = eval(Variable.get("back_util")) all_services = [] new_prev_states_dict = {} all_device_type = services_mapping.keys() for device_type in services_mapping: all_services.extend(services_mapping.get(device_type)) new_prev_states_dict["kpi_util_prev_state_%s"%(device_type)] = {} none_count =0 for device in all_devices: hostname = device device_type = all_devices.get(device) device_dict={} if device_type in all_device_type: device_dict[hostname] = {'state':'unknown','since':'unknown'} services = services_mapping.get(device_type) for service in services: kpi_key = "util:%s:%s"%(hostname,service) prev_dict_key = "%s_%s"%(hostname,service) try: old_states =memc_con.get(kpi_key) if old_states == None: old_states = vrfprv_memc_con.get(kpi_key) if old_states == None: old_states = pub_memc_con.get(kpi_key) if old_states != None: old_severity = old_states.split(",")[0] old_severity_since = old_states.split(",")[1] device_dict[hostname]= {'state':old_severity,'since':old_severity_since} new_prev_states_dict.get("kpi_util_prev_state_%s"%(device_type))[prev_dict_key]={'state':old_severity,'since':old_severity_since} else: #print "None for %s %s"%(kpi_key,old_states) none_count = none_count+1 except Exception,e: print "Unable to get UTIL for %s - %s"%(device_type,e) break print len(new_prev_states_dict),new_prev_states_dict.keys() count_total = 0 for d in new_prev_states_dict: print len(new_prev_states_dict.get(d)) count_total = count_total + len(new_prev_states_dict.get(d)) print "None in Memc for %s Devices Total States Found %s"%(none_count,count_total) for key in new_prev_states_dict.keys(): try: redis_hook_5.set(key,str(new_prev_states_dict.get(key))) logging.info("Setting for Key %s is successful"%(key)) except Exception: logging.error("Unable to add %s key in redis"%(key)) redis_copy = PythonOperator( task_id="create_prev_state_for_all", provide_context=False, python_callable=create_prev_state, dag=prev_state_dag ) get_kpi_prev_states_task= PythonOperator( task_id="create_ul_issue_kpi_prev_state", provide_context=False, python_callable=create_ul_issue_kpi_prev_state, dag=prev_state_dag ) get_provis_kpi_prev_states_task= PythonOperator( task_id="create_provision_kpi_prev_state", provide_context=False, python_callable=create_provis_kpi_prev_state, dag=prev_state_dag ) get_utilization_kpi_prev_states_task= PythonOperator( task_id="create_utilization_kpi_prev_state", provide_context=False, python_callable=create_utilization_kpi_prev_state, dag=prev_state_dag )
bsd-3-clause
4,937,185,614,881,251,000
37.787634
153
0.563934
false
andrewyoung1991/supriya
supriya/tools/requesttools/NodeFreeRequest.py
1
1577
# -*- encoding: utf-8 -*- import collections from supriya.tools import osctools from supriya.tools.requesttools.Request import Request class NodeFreeRequest(Request): r'''A /n_free request. :: >>> from supriya.tools import requesttools >>> request = requesttools.NodeFreeRequest( ... node_ids=1000, ... ) >>> request NodeFreeRequest( node_ids=(1000,) ) :: >>> message = request.to_osc_message() >>> message OscMessage(11, 1000) :: >>> message.address == requesttools.RequestId.NODE_FREE True ''' ### CLASS VARIABLES ### __slots__ = ( '_node_ids', ) ### INITIALIZER ### def __init__( self, node_ids=None ): Request.__init__(self) if not isinstance(node_ids, collections.Sequence): node_ids = (node_ids,) node_ids = tuple(int(_) for _ in node_ids) self._node_ids = node_ids ### PUBLIC METHODS ### def to_osc_message(self): request_id = int(self.request_id) contents = [request_id] contents.extend(self.node_ids) message = osctools.OscMessage(*contents) return message ### PUBLIC PROPERTIES ### @property def node_ids(self): return self._node_ids @property def response_specification(self): return None @property def request_id(self): from supriya.tools import requesttools return requesttools.RequestId.NODE_FREE
mit
6,297,036,571,516,061,000
20.324324
63
0.551046
false
maxisi/gwsumm
gwsumm/channels.py
1
6672
# -*- coding: utf-8 -*- # Copyright (C) Duncan Macleod (2013) # # This file is part of GWSumm. # # GWSumm is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # GWSumm is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GWSumm. If not, see <http://www.gnu.org/licenses/>. """Utilities for channel access """ import threading import urllib2 import re from Queue import Queue try: from kerberos import GSSError except ImportError: GSSError = None from gwpy.detector import Channel from . import (globalv, version) from .mode import * __author__ = 'Duncan Macleod <duncan.macleod@ligo.org>' __version__ = version.version class ThreadChannelQuery(threading.Thread): """Threaded CIS `Channel` query. """ def __init__(self, inqueue, outqueue, find_trend_source=False, timeout=5): threading.Thread.__init__(self) self.in_ = inqueue self.out = outqueue self.find_trends = find_trend_source self.timeout = timeout def run(self): i, channel = self.in_.get() self.in_.task_done() try: self.out.put((i, get_channel( channel, find_trend_source=self.find_trends, timeout=self.timeout))) except Exception as e: self.out.put(e) self.out.task_done() def get_channel(channel, find_trend_source=True, timeout=5): """Define a new :class:`~gwpy.detector.channel.Channel` Parameters ---------- channel : `str` name of new channel find_trend_source : `bool`, optional, default: `True` query for raw version of trend channel (trends not in CIS) timeout : `float`, optional, default: `5` number of seconds to wait before connection times out Returns ------- Channel : :class:`~gwpy.detector.channel.Channel` new channel. """ if ' ' in str(channel): name = str(channel) try: type_ = Channel.MATCH.match(name).groupdict()['type'] except AttributeError: type_ = None found = globalv.CHANNELS.sieve(name=name.replace('*', '\*'), exact_match=True) elif ',' in str(channel): name, type_ = str(channel).rsplit(',', 1) found = globalv.CHANNELS.sieve(name=name, type=type_, exact_match=True) else: type_ = isinstance(channel, Channel) and channel.type or None sr = isinstance(channel, Channel) and channel.sample_rate or None name = str(channel) found = globalv.CHANNELS.sieve(name=name, type=type_, sample_rate=sr, exact_match=True) if len(found) == 1: return found[0] elif len(found) > 1: cstrings = ['%s [%s, %s]' % (c.ndsname, c.sample_rate, c.unit) for c in found] raise ValueError("Ambiguous channel request '%s', multiple existing " "channels recovered:\n %s" % (str(channel), '\n '.join(cstrings))) else: matches = list(Channel.MATCH.finditer(name)) # match single raw channel if len(matches) == 1 and not re.search('\.[a-z]+\Z', name): try: raise ValueError("") # XXX: hacky removal of CIS query new = Channel.query(name, timeout=timeout) except (ValueError, urllib2.URLError, GSSError): new = Channel(str(channel)) else: new.name = str(channel) # match single trend elif len(matches) == 1: # set default trend type based on mode if type_ is None and globalv.MODE == SUMMARY_MODE_GPS: type_ = 's-trend' elif type_ is None: type_ = 'm-trend' name += ',%s' % type_ new = Channel(name) if find_trend_source: try: source = get_channel(new.name.rsplit('.')[0]) except ValueError: pass else: new.url = source.url new.unit = source.unit try: new.bits = source.bits except AttributeError: pass try: new.filter = source.filter except AttributeError: pass for param in filter(lambda x: x.endswith('_range') and not hasattr(new, x), vars(source)): setattr(new, param, getattr(source, param)) # determine sample rate for trends if type_ == 'm-trend': new.sample_rate = 1/60. elif type_ == 's-trend': new.sample_rate = 1 # match composite channel else: parts = get_channels([m.group() for m in matches]) new = Channel(name) new.subchannels = parts new._ifo = "".join(set(p.ifo for p in parts if p.ifo)) globalv.CHANNELS.append(new) try: return get_channel(new) except RuntimeError as e: if 'maximum recursion depth' in str(e): raise RuntimeError("Recursion error while access channel " "information for %s" % str(channel)) else: raise def get_channels(channels, **kwargs): """Multi-threaded channel query """ if len(channels) == 0: return [] # set up Queues inqueue = Queue() outqueue = Queue() # open threads for i in range(len(channels)): t = ThreadChannelQuery(inqueue, outqueue, **kwargs) t.setDaemon(True) t.start() # populate input queue for i, c in enumerate(channels): inqueue.put((i, c)) # block inqueue.join() outqueue.join() result = [] for i in range(len(channels)): c = outqueue.get() if isinstance(c, Exception): raise c else: result.append(c) return zip(*sorted(result, key=lambda (idx, chan): idx))[1]
gpl-3.0
-6,511,334,062,866,690,000
32.86802
79
0.542866
false
goneri/dci-control-server
sample/tox-agent.py
1
2235
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (C) 2015 Red Hat, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import shutil import sys import tempfile import client try: remoteci_name = sys.argv[1] except IndexError: print("Usage: %s remoteci_name" % sys.argv[0]) sys.exit(1) workdir = tempfile.mkdtemp(suffix='dci_tox_agent') dci_client = client.DCIClient() test_name = "tox" r = dci_client.get("/tests/%s" % test_name) if r.status_code == 404: print("Test '%s' doesn't exist." % test_name) sys.exit(1) else: test_id = r.json()['id'] r = dci_client.get("/remotecis/%s" % remoteci_name) if r.status_code == 404: r = dci_client.post("/remotecis", { 'name': remoteci_name, 'test_id': test_id}) remoteci_id = r.json()['id'] job = dci_client.post("/jobs", {"remoteci_id": remoteci_id}) if job.status_code == 412: print("No jobs to process.") sys.exit(0) job_id = job.json()['id'] job = dci_client.get("/jobs/%s" % job_id).json() structure_from_server = job['data']['components']['dci-control-server'] cmds = [ ['git', 'init', workdir], ['git', 'pull', structure_from_server['git'], structure_from_server.get('ref', '')], ['git', 'fetch', '--all'], ['git', 'clean', '-ffdx'], ['git', 'reset', '--hard'], ['git', 'checkout', '-f', structure_from_server['sha']], ['tox']] for cmd in cmds: r = dci_client.call(job_id, cmd, cwd=workdir) if r != 0: print("Test has failed") shutil.rmtree(workdir) sys.exit(1) state = { "job_id": job["id"], "status": "success", "comment": "Process finished successfully"} jobstate_id = dci_client.post("/jobstates", state) sys.exit(0) shutil.rmtree(workdir)
apache-2.0
2,374,281,384,220,255,000
26.9375
75
0.636689
false
sevenian3/ChromaStarPy
LevelPopsGasServer.py
1
55996
# -*- coding: utf-8 -*- """ Created on Mon Apr 24 14:13:47 2017 @author: ishort """ import math import Useful import ToolBox #import numpy #JB# #from matplotlib.pyplot import plot, title, show, scatter #storage for fits (not all may be used) uw = [] uwa = [] uwb = [] uwStage = [] uwbStage = [] uwu = [] uwl = [] uua=[] uub=[] """ #a function to create a cubic function fit extrapolation def cubicFit(x,y): coeffs = numpy.polyfit(x,y,3) #returns an array of coefficents for the cubic fit of the form #Ax^3 + Bx^2 + Cx + D as [A,B,C,D] return coeffs #this will work for any number of data points! def valueFromFit(fit,x): #return the value y for a given fit, at point x return (fit[0]*(x**3)+fit[1]*(x**2)+fit[2]*x+fit[3]) #holds the five temperature at which we have partition function data """ masterTemp = [130, 500, 3000, 8000, 10000] #JB# #def levelPops(lam0In, logNStage, chiL, log10UwStage, gwL, numDeps, temp): def levelPops(lam0In, logNStage, chiL, logUw, gwL, numDeps, temp): """ Returns depth distribution of occupation numbers in lower level of b-b transition, // Input parameters: // lam0 - line centre wavelength in nm // logNStage - log_e density of absorbers in relevent ion stage (cm^-3) // logFlu - log_10 oscillator strength (unitless) // chiL - energy of lower atomic E-level of b-b transition in eV // Also needs atsmopheric structure information: // numDeps // temp structure """ c = Useful.c() logC = Useful.logC() k = Useful.k() logK = Useful.logK() logH = Useful.logH() logEe = Useful.logEe() logMe = Useful.logMe() ln10 = math.log(10.0) logE = math.log10(math.e); #// for debug output log2pi = math.log(2.0 * math.pi) log2 = math.log(2.0) #//double logNl = logNlIn * ln10; // Convert to base e #// Parition functions passed in are 2-element vectore with remperature-dependent base 10 log Us #// Convert to natural logs: #double thisLogUw, Ttheta; thisLogUw = 0.0 # //default initialization #logUw = [ 0.0 for i in range(5) ] logE10 = math.log(10.0) #print("log10UwStage ", log10UwStage) #for kk in range(len(logUw)): # logUw[kk] = logE10*log10UwStage[kk] #// lburns new loop logGwL = math.log(gwL) #//System.out.println("chiL before: " + chiL); #// If we need to subtract chiI from chiL, do so *before* converting to tiny numbers in ergs! #////For testing with Ca II lines using gS3 internal line list only: #//boolean ionized = true; #//if (ionized) { #// //System.out.println("ionized, doing chiL - chiI: " + ionized); #// // chiL = chiL - chiI; #// chiL = chiL - 6.113; #// } #// // #//Log of line-center wavelength in cm logLam0 = math.log(lam0In) #// * 1.0e-7); #// energy of b-b transition logTransE = logH + logC - logLam0 #//ergs if (chiL <= 0.0): chiL = 1.0e-49 logChiL = math.log(chiL) + Useful.logEv() #// Convert lower E-level from eV to ergs logBoltzFacL = logChiL - Useful.logK() #// Pre-factor for exponent of excitation Boltzmann factor boltzFacL = math.exp(logBoltzFacL) boltzFacGround = 0.0 / k #//I know - its zero, but let's do it this way anyway' #// return a 1D numDeps array of logarithmic number densities #// level population of lower level of bb transition (could be in either stage I or II!) logNums = [ 0.0 for i in range(numDeps)] #double num, logNum, expFac; #JB# #print("thisLogUw:",numpy.shape(logUw)) logUwFit = ToolBox.cubicFit(masterTemp,logUw)#u(T) fit uw.append(logUwFit) #JB# for id in range(numDeps): #//Determine temperature dependenet partition functions Uw: #Ttheta = 5040.0 / temp[0][id] #//NEW Determine temperature dependent partition functions Uw: lburns thisTemp = temp[0][id] """ if (Ttheta >= 1.0): thisLogUw = logUw[0] if (Ttheta <= 0.5): thisLogUw = logUw[1] if (Ttheta > 0.5 and Ttheta < 1.0): thisLogUw = ( logUw[1] * (Ttheta - 0.5)/(1.0 - 0.5) ) \ + ( logUw[0] * (1.0 - Ttheta)/(1.0 - 0.5) ) """ #JB# thisLogUw = ToolBox.valueFromFit(logUwFit,thisTemp)#u(T) value extrapolated #JB# if (thisTemp >= 10000.0): thisLogUw = logUw[4] if (thisTemp <= 130.0): thisLogUw = logUw[0] """ if (thisTemp > 130 and thisTemp <= 500): thisLogUw = logUw[1] * (thisTemp - 130)/(500 - 130) \ + logUw[0] * (500 - thisTemp)/(500 - 130) if (thisTemp > 500 and thisTemp <= 3000): thisLogUw = logUw[2] * (thisTemp - 500)/(3000 - 500) \ + logUw[1] * (3000 - thisTemp)/(3000 - 500) if (thisTemp > 3000 and thisTemp <= 8000): thisLogUw = logUw[3] * (thisTemp - 3000)/(8000 - 3000) \ + logUw[2] * (8000 - thisTemp)/(8000 - 3000) if (thisTemp > 8000 and thisTemp < 10000): thisLogUw = logUw[4] * (thisTemp - 8000)/(10000 - 8000) \ + logUw[3] * (10000 - thisTemp)/(10000 - 8000) """ #print("logUw ", logUw, " thisLogUw ", thisLogUw) #//System.out.println("LevPops: ionized branch taken, ionized = " + ionized); #// Take stat weight of ground state as partition function: logNums[id] = logNStage[id] - boltzFacL / temp[0][id] + logGwL - thisLogUw #// lower level of b-b transition #print("LevelPopsServer.stagePops id ", id, " logNStage[id] ", logNStage[id], " boltzFacL ", boltzFacL, " temp[0][id] ", temp[0][id], " logGwL ", logGwL, " thisLogUw ", thisLogUw, " logNums[id] ", logNums[id]); #// System.out.println("LevelPops: id, logNums[0][id], logNums[1][id], logNums[2][id], logNums[3][id]: " + id + " " #// + Math.exp(logNums[0][id]) + " " #// + Math.exp(logNums[1][id]) + " " #// + Math.exp(logNums[2][id]) + " " #// + Math.exp(logNums[3][id])); #//System.out.println("LevelPops: id, logNums[0][id], logNums[1][id], logNums[2][id], logNums[3][id], logNums[4][id]: " + id + " " #// + logE * (logNums[0][id]) + " " #// + logE * (logNums[1][id]) + " " #// + logE * (logNums[2][id]) + " " # // + logE * (logNums[3][id]) + " " #// + logE * (logNums[4][id]) ); #//System.out.println("LevelPops: id, logIonFracI, logIonFracII: " + id + " " + logE*logIonFracI + " " + logE*logIonFracII #// + "logNum, logNumI, logNums[0][id], logNums[1][id] " #// + logE*logNum + " " + logE*logNumI + " " + logE*logNums[0][id] + " " + logE*logNums[1][id]); #//System.out.println("LevelPops: id, logIonFracI: " + id + " " + logE*logIonFracI #// + "logNums[0][id], boltzFacL/temp[0][id], logNums[2][id]: " #// + logNums[0][id] + " " + boltzFacL/temp[0][id] + " " + logNums[2][id]); #//id loop #stop #print (uw) return logNums #//This version - ionization equilibrium *WITHOUT* molecules - logNum is TOTAL element population #def stagePops2(logNum, Ne, chiIArr, log10UwAArr, \ # numMols, logNumB, dissEArr, log10UwBArr, logQwABArr, logMuABArr, \ # numDeps, temp): def stagePops(logNum, Ne, chiIArr, logUw, \ numDeps, temp): #line 1: //species A data - ionization equilibrium of A #line 2: //data for set of species "B" - molecular equlibrium for set {AB} """Ionization equilibrium routine WITHOUT molecule formation: // Returns depth distribution of ionization stage populations // Input parameters: // logNum - array with depth-dependent total element number densities (cm^-3) // chiI1 - ground state ionization energy of neutral stage // chiI2 - ground state ionization energy of singly ionized stage // Also needs atsmopheric structure information: // numDeps // temp structure // rho structure // Atomic element A is the one whose ionization fractions are being computed // """ ln10 = math.log(10.0) logE = math.log10(math.e) #// for debug output log2pi = math.log(2.0 * math.pi) log2 = math.log(2.0) numStages = len(chiIArr) #// + 1; //need one more stage above the highest stage to be populated #// var numMols = dissEArr.length; #// Parition functions passed in are 2-element vectore with remperature-dependent base 10 log Us #// Convert to natural logs: #double Ttheta, thisTemp; #//Default initializations: #//We need one more stage in size of saha factor than number of stages we're actualy populating thisLogUw = [ 0.0 for i in range(numStages+1) ] for i in range(numStages+1): thisLogUw[i] = 0.0 logE10 = math.log(10.0) #//atomic ionization stage Boltzmann factors: #double logChiI, logBoltzFacI; boltzFacI = [ 0.0 for i in range(numStages) ] #print("numStages ", numStages, " Useful.logEv ", Useful.logEv()) for i in range(numStages): #print("i ", i, " chiIArr ", chiIArr[i]) logChiI = math.log(chiIArr[i]) + Useful.logEv() logBoltzFacI = logChiI - Useful.logK() boltzFacI[i] = math.exp(logBoltzFacI) logSahaFac = log2 + (3.0 / 2.0) * (log2pi + Useful.logMe() + Useful.logK() - 2.0 * Useful.logH()) #// return a 2D 5 x numDeps array of logarithmic number densities #// Row 0: neutral stage ground state population #// Row 1: singly ionized stage ground state population #// Row 2: doubly ionized stage ground state population #// Row 3: triply ionized stage ground state population #// Row 4: quadruply ionized stage ground state population #double[][] logNums = new double[numStages][numDeps]; logNums = [ [ 0.0 for i in range(numDeps)] for j in range(numStages) ] #//We need one more stage in size of saha factor than number of stages we're actualy populating #// for index accounting pirposes #// For atomic ionization stages: logSaha = [ [ 0.0 for i in range(numStages+1)] for j in range(numStages+1) ] saha = [ [ 0.0 for i in range(numStages+1)] for j in range(numStages+1) ] #// logIonFrac = [ 0.0 for i in range(numStages) ] #double expFac, logNe; #// Now - molecular variables: thisLogUwA = 0.0 #// element A #thisLogQwAB = math.log(300.0) #//For clarity: neutral stage of atom whose ionization equilibrium is being computed is element A #// for molecule formation: logUwA = [ 0.0 for i in range(5) ] #JB# uua=[] #uub=[] #qwab=[] for iStg in range(numStages): currentUwArr=list(logUw[iStg])#u(T) determined values UwFit = ToolBox.cubicFit(masterTemp,currentUwArr)#u(T) fit uua.append(UwFit) #print(logUw[iStg]) for id in range(numDeps): #//// reduce or enhance number density by over-all Rosseland opcity scale parameter #// #//Row 1 of Ne is log_e Ne in cm^-3 logNe = Ne[1][id] #//Determine temperature dependent partition functions Uw: thisTemp = temp[0][id] #Ttheta = 5040.0 / thisTemp #JB# #use temps and partition values to create a function #then use said function to extrapolate values for all points thisLogUw[numStages] = 0.0 for iStg in range(numStages): thisLogUw[iStg] = ToolBox.valueFromFit(uua[iStg],thisTemp)#u(T) value extrapolated #JB# #// NEW Determine temperature dependent partition functions Uw: lburns if (thisTemp <= 130.0): for iStg in range(numStages): thisLogUw[iStg] = logUw[iStg][0] #for iMol in range(numMols): # thisLogUwB[iMol] = logUwB[iMol][0] if (thisTemp >= 10000.0): for iStg in range(numStages): thisLogUw[iStg] = logUw[iStg][4] #for iMol in range(numMols): # thisLogUwB[iMol] = logUwB[iMol][4] #//For clarity: neutral stage of atom whose ionization equilibrium is being computed is element A #// for molecule formation: thisLogUwA = thisLogUw[0]; #//Ionization stage Saha factors: for iStg in range(numStages): #print("iStg ", iStg) logSaha[iStg+1][iStg] = logSahaFac - logNe - (boltzFacI[iStg] /temp[0][id]) + (3.0 * temp[1][id] / 2.0) + thisLogUw[iStg+1] - thisLogUw[iStg] saha[iStg+1][iStg] = math.exp(logSaha[iStg+1][iStg]) #//Compute log of denominator is ionization fraction, f_stage denominator = 1.0 #//default initialization - leading term is always unity #//ion stage contributions: for jStg in range(1, numStages+1): addend = 1.0 #//default initialization for product series for iStg in range(jStg): #//console.log("jStg " + jStg + " saha[][] indices " + (iStg+1) + " " + iStg); addend = addend * saha[iStg+1][iStg] denominator = denominator + addend #// logDenominator = math.log(denominator) logIonFrac[0] = -1.0 * logDenominator #// log ionization fraction in stage I for jStg in range(1, numStages): addend = 0.0 #//default initialization for product series for iStg in range(jStg): #//console.log("jStg " + jStg + " saha[][] indices " + (iStg+1) + " " + iStg); addend = addend + logSaha[iStg+1][iStg] logIonFrac[jStg] = addend - logDenominator for iStg in range(numStages): logNums[iStg][id] = logNum[id] + logIonFrac[iStg] #//id loop return logNums; #//end method stagePops #end method levelPops #def stagePops2(logNum, Ne, chiIArr, log10UwAArr, \ # numMols, logNumB, dissEArr, log10UwBArr, logQwABArr, logMuABArr, \ # numDeps, temp): def stagePops2(logNum, Ne, chiIArr, logUw, \ numMols, logNumB, dissEArr, logUwB, logQwABArr, logMuABArr, \ numDeps, temp): #line 1: //species A data - ionization equilibrium of A #line 2: //data for set of species "B" - molecular equlibrium for set {AB} """Ionization equilibrium routine that accounts for molecule formation: // Returns depth distribution of ionization stage populations // Input parameters: // logNum - array with depth-dependent total element number densities (cm^-3) // chiI1 - ground state ionization energy of neutral stage // chiI2 - ground state ionization energy of singly ionized stage // Also needs atsmopheric structure information: // numDeps // temp structure // rho structure // Atomic element A is the one whose ionization fractions are being computed // Element B refers to array of other species with which A forms molecules AB """ ln10 = math.log(10.0) logE = math.log10(math.e) #// for debug output log2pi = math.log(2.0 * math.pi) log2 = math.log(2.0) numStages = len(chiIArr) #// + 1; //need one more stage above the highest stage to be populated #// var numMols = dissEArr.length; #// Parition functions passed in are 2-element vectore with remperature-dependent base 10 log Us #// Convert to natural logs: #double Ttheta, thisTemp; #//Default initializations: #//We need one more stage in size of saha factor than number of stages we're actualy populating thisLogUw = [ 0.0 for i in range(numStages+1) ] for i in range(numStages+1): thisLogUw[i] = 0.0 logE10 = math.log(10.0) #//atomic ionization stage Boltzmann factors: #double logChiI, logBoltzFacI; boltzFacI = [ 0.0 for i in range(numStages) ] #print("numStages ", numStages, " Useful.logEv ", Useful.logEv()) for i in range(numStages): #print("i ", i, " chiIArr ", chiIArr[i]) logChiI = math.log(chiIArr[i]) + Useful.logEv() logBoltzFacI = logChiI - Useful.logK() boltzFacI[i] = math.exp(logBoltzFacI) logSahaFac = log2 + (3.0 / 2.0) * (log2pi + Useful.logMe() + Useful.logK() - 2.0 * Useful.logH()) #// return a 2D 5 x numDeps array of logarithmic number densities #// Row 0: neutral stage ground state population #// Row 1: singly ionized stage ground state population #// Row 2: doubly ionized stage ground state population #// Row 3: triply ionized stage ground state population #// Row 4: quadruply ionized stage ground state population #double[][] logNums = new double[numStages][numDeps]; logNums = [ [ 0.0 for i in range(numDeps)] for j in range(numStages) ] #//We need one more stage in size of saha factor than number of stages we're actualy populating #// for index accounting pirposes #// For atomic ionization stages: logSaha = [ [ 0.0 for i in range(numStages+1)] for j in range(numStages+1) ] saha = [ [ 0.0 for i in range(numStages+1)] for j in range(numStages+1) ] #// logIonFrac = [ 0.0 for i in range(numStages) ] #double expFac, logNe; #// Now - molecular variables: #//Treat at least one molecule - if there are really no molecules for an atomic species, #//there will be one phantom molecule in the denominator of the ionization fraction #//with an impossibly high dissociation energy ifMols = True if (numMols == 0): ifMols = False numMols = 1 #//This should be inherited, but let's make sure: dissEArr[0] = 19.0 #//eV #//Molecular partition functions - default initialization: #double[] thisLogUwB = new double[numMols]; thisLogUwB = [ 0.0 for i in range(numMols) ] for iMol in range(numMols): thisLogUwB[iMol] = 0.0 #// variable for temp-dependent computed partn fn of array element B thisLogUwA = 0.0 #// element A thisLogQwAB = math.log(300.0) #//For clarity: neutral stage of atom whose ionization equilibrium is being computed is element A #// for molecule formation: logUwA = [ 0.0 for i in range(5) ] if (numMols > 0): for kk in range(len(logUwA)): logUwA[kk] = logUw[0][kk] #// lburns #//} #//// Molecular partition functions: #//Molecular dissociation Boltzmann factors: boltzFacIAB = [ 0.0 for i in range(numMols) ] logMolSahaFac = [ 0.0 for i in range(numMols) ] #//if (numMols > 0){ #double logDissE, logBoltzFacIAB; for iMol in range(numMols): logDissE = math.log(dissEArr[iMol]) + Useful.logEv() logBoltzFacIAB = logDissE - Useful.logK() boltzFacIAB[iMol] = math.exp(logBoltzFacIAB) logMolSahaFac[iMol] = (3.0 / 2.0) * (log2pi + logMuABArr[iMol] + Useful.logK() - 2.0 * Useful.logH()) #//console.log("iMol " + iMol + " dissEArr[iMol] " + dissEArr[iMol] + " logDissE " + logE*logDissE + " logBoltzFacIAB " + logE*logBoltzFacIAB + " boltzFacIAB[iMol] " + boltzFacIAB[iMol] + " logMuABArr " + logE*logMuABArr[iMol] + " logMolSahaFac " + logE*logMolSahaFac[iMol]); #//} #// For molecular species: logSahaMol = [ 0.0 for i in range(numMols) ] invSahaMol = [ 0.0 for i in range(numMols) ] #JB# uua=[] uub=[] qwab=[] for iStg in range(numStages): currentUwArr=list(logUw[iStg])#u(T) determined values UwFit = ToolBox.cubicFit(masterTemp,currentUwArr)#u(T) fit uua.append(UwFit) #print(logUw[iStg]) for iMol in range(numMols): currentUwBArr=list(logUwB[iMol])#u(T) determined values UwBFit = ToolBox.cubicFit(masterTemp,currentUwBArr)#u(T) fit uub.append(UwBFit) for id in range(numDeps): #//// reduce or enhance number density by over-all Rosseland opcity scale parameter #// #//Row 1 of Ne is log_e Ne in cm^-3 logNe = Ne[1][id] #//Determine temperature dependent partition functions Uw: thisTemp = temp[0][id] #Ttheta = 5040.0 / thisTemp #JB# #use temps and partition values to create a function #then use said function to extrapolate values for all points thisLogUw[numStages] = 0.0 for iStg in range(numStages): thisLogUw[iStg] = ToolBox.valueFromFit(uua[iStg],thisTemp)#u(T) value extrapolated for iMol in range(numMols): thisLogUwB[iMol] = ToolBox.valueFromFit(uub[iMol],thisTemp)#u(T) value extrapolated #JB# #// NEW Determine temperature dependent partition functions Uw: lburns if (thisTemp <= 130.0): for iStg in range(numStages): thisLogUw[iStg] = logUw[iStg][0] for iMol in range(numMols): thisLogUwB[iMol] = logUwB[iMol][0] if (thisTemp >= 10000.0): for iStg in range(numStages): thisLogUw[iStg] = logUw[iStg][4] for iMol in range(numMols): thisLogUwB[iMol] = logUwB[iMol][4] for iMol in range(numMols): if (thisTemp < 3000.0): thisLogQwAB = ( logQwABArr[iMol][1] * (3000.0 - thisTemp)/(3000.0 - 500.0) ) \ + ( logQwABArr[iMol][2] * (thisTemp - 500.0)/(3000.0 - 500.0) ) if ( (thisTemp >= 3000.0) and (thisTemp <= 8000.0) ): thisLogQwAB = ( logQwABArr[iMol][2] * (8000.0 - thisTemp)/(8000.0 - 3000.0) ) \ + ( logQwABArr[iMol][3] * (thisTemp - 3000.0)/(8000.0 - 3000.0) ) if ( thisTemp > 8000.0 ): thisLogQwAB = ( logQwABArr[iMol][3] * (10000.0 - thisTemp)/(10000.0 - 8000.0) ) \ + ( logQwABArr[iMol][4] * (thisTemp - 8000.0)/(10000.0 - 8000.0) ) #// iMol loop #//For clarity: neutral stage of atom whose ionization equilibrium is being computed is element A #// for molecule formation: thisLogUwA = thisLogUw[0]; #//Ionization stage Saha factors: for iStg in range(numStages): #print("iStg ", iStg) logSaha[iStg+1][iStg] = logSahaFac - logNe - (boltzFacI[iStg] /temp[0][id]) + (3.0 * temp[1][id] / 2.0) + thisLogUw[iStg+1] - thisLogUw[iStg] saha[iStg+1][iStg] = math.exp(logSaha[iStg+1][iStg]) #//Molecular Saha factors: for iMol in range(numMols): logSahaMol[iMol] = logMolSahaFac[iMol] - logNumB[iMol][id] - (boltzFacIAB[iMol] / temp[0][id]) + (3.0 * temp[1][id] / 2.0) + thisLogUwB[iMol] + thisLogUwA - thisLogQwAB #//For denominator of ionization fraction, we need *inverse* molecular Saha factors (N_AB/NI): logSahaMol[iMol] = -1.0 * logSahaMol[iMol] invSahaMol[iMol] = math.exp(logSahaMol[iMol]) #//Compute log of denominator is ionization fraction, f_stage denominator = 1.0 #//default initialization - leading term is always unity #//ion stage contributions: for jStg in range(1, numStages+1): addend = 1.0 #//default initialization for product series for iStg in range(jStg): #//console.log("jStg " + jStg + " saha[][] indices " + (iStg+1) + " " + iStg); addend = addend * saha[iStg+1][iStg] denominator = denominator + addend #//molecular contribution if (ifMols == True): for iMol in range(numMols): denominator = denominator + invSahaMol[iMol] #// logDenominator = math.log(denominator) logIonFrac[0] = -1.0 * logDenominator #// log ionization fraction in stage I for jStg in range(1, numStages): addend = 0.0 #//default initialization for product series for iStg in range(jStg): #//console.log("jStg " + jStg + " saha[][] indices " + (iStg+1) + " " + iStg); addend = addend + logSaha[iStg+1][iStg] logIonFrac[jStg] = addend - logDenominator for iStg in range(numStages): logNums[iStg][id] = logNum[id] + logIonFrac[iStg] #//id loop return logNums; #//end method stagePops def stagePops3(logNum, Ne, chiIArr, logUw, numDeps, temp): #Version for ChromaStarPyGas: logNum is now *neutral stage* population from Phil # Bennett's GAS package #line 1: //species A data - ionization equilibrium of A #line 2: //data for set of species "B" - molecular equlibrium for set {AB} """Ionization equilibrium routine that accounts for molecule formation: // Returns depth distribution of ionization stage populations // Input parameters: // logNum - array with depth-dependent neutral stage number densities (cm^-3) // chiI1 - ground state ionization energy of neutral stage // chiI2 - ground state ionization energy of singly ionized stage // Also needs atsmopheric structure information: // numDeps // temp structure // rho structure // Atomic element A is the one whose ionization fractions are being computed // Element B refers to array of other species with which A forms molecules AB """ ln10 = math.log(10.0) logE = math.log10(math.e) #// for debug output log2pi = math.log(2.0 * math.pi) log2 = math.log(2.0) numStages = len(chiIArr) #// + 1; //need one more stage above the highest stage to be populated #// var numMols = dissEArr.length; #// Parition functions passed in are 2-element vectore with remperature-dependent base 10 log Us #// Convert to natural logs: #double Ttheta, thisTemp; #//Default initializations: #//We need one more stage in size of saha factor than number of stages we're actualy populating thisLogUw = [ 0.0 for i in range(numStages+1) ] for i in range(numStages+1): thisLogUw[i] = 0.0 logE10 = math.log(10.0) #//atomic ionization stage Boltzmann factors: #double logChiI, logBoltzFacI; boltzFacI = [ 0.0 for i in range(numStages) ] #print("numStages ", numStages, " Useful.logEv ", Useful.logEv()) for i in range(numStages): #print("i ", i, " chiIArr ", chiIArr[i]) logChiI = math.log(chiIArr[i]) + Useful.logEv() logBoltzFacI = logChiI - Useful.logK() boltzFacI[i] = math.exp(logBoltzFacI) logSahaFac = log2 + (3.0 / 2.0) * (log2pi + Useful.logMe() + Useful.logK() - 2.0 * Useful.logH()) #// return a 2D 5 x numDeps array of logarithmic number densities #// Row 0: neutral stage ground state population #// Row 1: singly ionized stage ground state population #// Row 2: doubly ionized stage ground state population #// Row 3: triply ionized stage ground state population #// Row 4: quadruply ionized stage ground state population #double[][] logNums = new double[numStages][numDeps]; logNums = [ [ 0.0 for i in range(numDeps)] for j in range(numStages) ] #//We need one more stage in size of saha factor than number of stages we're actualy populating #// for index accounting pirposes #// For atomic ionization stages: #logSaha = [ [ 0.0 for i in range(numStages+1)] for j in range(numStages+1) ] #saha = [ [ 0.0 for i in range(numStages+1)] for j in range(numStages+1) ] #// #logIonFrac = [ 0.0 for i in range(numStages) ] #double expFac, logNe; #JB# uua=[] uub=[] qwab=[] for iStg in range(numStages): currentUwArr=list(logUw[iStg])#u(T) determined values UwFit = ToolBox.cubicFit(masterTemp,currentUwArr)#u(T) fit uua.append(UwFit) #print(logUw[iStg]) for id in range(numDeps): #//// reduce or enhance number density by over-all Rosseland opcity scale parameter #// #//Row 1 of Ne is log_e Ne in cm^-3 logNe = Ne[1][id] #//Determine temperature dependent partition functions Uw: thisTemp = temp[0][id] #Ttheta = 5040.0 / thisTemp #JB# #use temps and partition values to create a function #then use said function to extrapolate values for all points thisLogUw[numStages] = 0.0 for iStg in range(numStages): thisLogUw[iStg] = ToolBox.valueFromFit(uua[iStg],thisTemp)#u(T) value extrapolated #JB# #// NEW Determine temperature dependent partition functions Uw: lburns if (thisTemp <= 130.0): for iStg in range(numStages): thisLogUw[iStg] = logUw[iStg][0] if (thisTemp >= 10000.0): for iStg in range(numStages): thisLogUw[iStg] = logUw[iStg][4] #//For clarity: neutral stage of atom whose ionization equilibrium is being computed is element A #// for molecule formation: #thisLogUwA = thisLogUw[0]; #//Ionization stage Saha factors: logNums[0][id] = logNum[id] for iStg in range(1, numStages): #print("iStg ", iStg) thisLogSaha = logSahaFac - logNe - (boltzFacI[iStg-1] /temp[0][id]) + (3.0 * temp[1][id] / 2.0) + thisLogUw[iStg] - thisLogUw[iStg-1] #saha[iStg+1][iStg] = math.exp(logSaha[iStg+1][iStg]) logNums[iStg][id] = logNums[iStg-1][id] + thisLogSaha #//id loop return logNums; #//end method stagePops #def sahaRHS(chiI, log10UwUArr, log10UwLArr, temp): def sahaRHS(chiI, logUwU, logUwL, temp): """RHS of partial pressure formulation of Saha equation in standard form (N_U*P_e/N_L on LHS) // Returns depth distribution of LHS: Phi(T) === N_U*P_e/N_L (David Gray notation) // Input parameters: // chiI - ground state ionization energy of lower stage // log10UwUArr, log10UwLArr - array of temperature-dependent partition function for upper and lower ionization stage // Also needs atsmopheric structure information: // numDeps // temp structure // // Atomic element "A" is the one whose ionization fractions are being computed // Element "B" refers to array of other species with which A forms molecules "AB" """ ln10 = math.log(10.0) logE = math.log10(math.e) #// for debug output log2pi = math.log(2.0 * math.pi) log2 = math.log(2.0) #// var numMols = dissEArr.length; #// Parition functions passed in are 2-element vectore with remperature-dependent base 10 log Us #// Convert to natural logs: #double Ttheta, thisTemp; #//Default initializations: #//We need one more stage in size of saha factor than number of stages we're actualy populating thisLogUwU = 0.0 thisLogUwL = 0.0 logE10 = math.log(10.0) #//We need one more stage in size of saha factor than number of stages we're actualy populating #logUwU = [0.0 for i in range(5)] #logUwL = [0.0 for i in range(5)] for kk in range(len(logUwL)): logUwU[kk] = logUwL[kk] # logUwL[kk] = logE10*log10UwLArr[kk] #//System.out.println("chiL before: " + chiL); #// If we need to subtract chiI from chiL, do so *before* converting to tiny numbers in ergs! #//atomic ionization stage Boltzmann factors: #double logChiI, logBoltzFacI; #double boltzFacI; logChiI = math.log(chiI) + Useful.logEv() logBoltzFacI = logChiI - Useful.logK() boltzFacI = math.exp(logBoltzFacI) #//Extra factor of k to get k^5/2 in the P_e formulation of Saha Eq. logSahaFac = log2 + (3.0 / 2.0) * (log2pi + Useful.logMe() + Useful.logK() - 2.0 * Useful.logH()) + Useful.logK() #//double[] logLHS = new double[numDeps]; #double logLHS; #// For atomic ionization stages: #double logSaha, saha, expFac; #// for (int id = 0; id < numDeps; id++) { #// #//Determine temperature dependent partition functions Uw: thisTemp = temp[0] #Ttheta = 5040.0 / thisTemp """ if (Ttheta >= 1.0): thisLogUwU = logUwU[0] thisLogUwL = logUwL[0] if (Ttheta <= 0.5): thisLogUwU = logUwU[1] thisLogUwL = logUwL[1] if (Ttheta > 0.5 and Ttheta < 1.0): thisLogUwU = ( logUwU[1] * (Ttheta - 0.5)/(1.0 - 0.5) ) + ( logUwU[0] * (1.0 - Ttheta)/(1.0 - 0.5) ) thisLogUwL = ( logUwL[1] * (Ttheta - 0.5)/(1.0 - 0.5) ) + ( logUwL[0] * (1.0 - Ttheta)/(1.0 - 0.5) ) """ #JB# currentUwUArr=list(logUwU)#u(T) determined values UwUFit = ToolBox.cubicFit(masterTemp,currentUwUArr)#u(T) fit thisLogUwU = ToolBox.valueFromFit(UwUFit,thisTemp)#u(T) value extrapolated currentUwLArr=list(logUwL)#u(T) determined values UwLFit = ToolBox.cubicFit(masterTemp,currentUwLArr)#u(T) fit thisLogUwL = ToolBox.valueFromFit(UwLFit,thisTemp)#u(T) value extrapolated #JB# #will need to do this one in Main as it goes through its own loop of temp #if thisTemp == superTemp[0][len(superTemp[0])]: # uwu.append(UwUFit) # uwl.append(UwLFit) # #JB# if (thisTemp <= 130.0): thisLogUwU = logUwU[0] thisLogUwL = logUwL[0] if (thisTemp >= 10000.0): thisLogUwU = logUwU[4] thisLogUwL = logUwL[4] """ if (thisTemp > 130 and thisTemp <= 500): thisLogUwU = logUwU[1] * (thisTemp - 130)/(500 - 130) \ + logUwU[0] * (500 - thisTemp)/(500 - 130) thisLogUwL = logUwL[1] * (thisTemp - 130)/(500 - 130) \ + logUwL[0] * (500 - thisTemp)/(500 - 130) if (thisTemp > 500 and thisTemp <= 3000): thisLogUwU = logUwU[2] * (thisTemp - 500)/(3000 - 500) \ + logUwU[1] * (3000 - thisTemp)/(3000 - 500) thisLogUwL = logUwL[2] * (thisTemp - 500)/(3000 - 500) \ + logUwL[1] * (3000 - thisTemp)/(3000 - 500) if (thisTemp > 3000 and thisTemp <= 8000): thisLogUwU = logUwU[3] * (thisTemp - 3000)/(8000 - 3000) \ + logUwU[2] * (8000 - thisTemp)/(8000 - 3000) thisLogUwL = logUwL[3] * (thisTemp - 3000)/(8000 - 3000) \ + logUwL[2] * (8000 - thisTemp)/(8000 - 3000) if (thisTemp > 8000 and thisTemp < 10000): thisLogUwU = logUwU[4] * (thisTemp - 8000)/(10000 - 8000) \ + logUwU[3] * (10000 - thisTemp)/(10000 - 8000) thisLogUwL = logUwL[4] * (thisTemp - 8000)/(10000 - 8000) \ + logUwL[3] * (10000 - thisTemp)/(10000 - 8000) if (thisTemp >= 10000): thisLogUwU = logUwU[4] thisLogUwL = logUwL[4] """ #//Ionization stage Saha factors: #//Need T_kin^5/2 in the P_e formulation of Saha Eq. logSaha = logSahaFac - (boltzFacI /temp[0]) + (5.0 * temp[1] / 2.0) + thisLogUwU - thisLogUwL #// saha = Math.exp(logSaha); #//logLHS[id] = logSaha; logLHS = logSaha; #// } //id loop return logLHS; #JB #return [logLHS,[[UwUFit,thisLogUwU],[UwLFit,thisLogUwL]]] #// # } //end method sahaRHS #def molPops(nmrtrLogNumB, nmrtrDissE, log10UwA, nmrtrLog10UwB, nmrtrLogQwAB, nmrtrLogMuAB, \ # numMolsB, logNumB, dissEArr, log10UwBArr, logQwABArr, logMuABArr, \ # logGroundRatio, numDeps, temp): def molPops(nmrtrLogNumB, nmrtrDissE, logUwA, nmrtrLogUwB, nmrtrLogQwAB, nmrtrLogMuAB, \ numMolsB, logNumB, dissEArr, logUwB, logQwABArr, logMuABArr, \ logGroundRatio, numDeps, temp): # line 1: //species A data - ionization equilibrium of A # //data for set of species "B" - molecular equlibrium for set {AB} """Diatomic molecular equilibrium routine that accounts for molecule formation: // Returns depth distribution of molecular population // Input parameters: // logNum - array with depth-dependent total element number densities (cm^-3) // chiI1 - ground state ionization energy of neutral stage // chiI2 - ground state ionization energy of singly ionized stage // Also needs atsmopheric structure information: // numDeps // temp structure // rho structure // // Atomic element "A" is the one kept on the LHS of the master fraction, whose ionization fractions are included // in the denominator of the master fraction // Element "B" refers to array of other sintpecies with which A forms molecules "AB" """ logE = math.log10(math.e) #// for debug output #//System.out.println("molPops: nmrtrDissE " + nmrtrDissE + " log10UwA " + log10UwA[0] + " " + log10UwA[1] + " nmrtrLog10UwB " + #// nmrtrLog10UwB[0] + " " + nmrtrLog10UwB[1] + " nmrtrLog10QwAB " + logE*nmrtrLogQwAB[2] + " nmrtrLogMuAB " + logE*nmrtrLogMuAB #// + " numMolsB " + numMolsB + " dissEArr " + dissEArr[0] + " log10UwBArr " + log10UwBArr[0][0] + " " + log10UwBArr[0][1] + " log10QwABArr " + #// logE*logQwABArr[0][2] + " logMuABArr " + logE*logMuABArr[0]); #//System.out.println("Line: nmrtrLog10UwB[0] " + logE*nmrtrLog10UwB[0] + " nmrtrLog10UwB[1] " + logE*nmrtrLog10UwB[1]); ln10 = math.log(10.0) log2pi = math.log(2.0 * math.pi) log2 = math.log(2.0) logE10 = math.log(10.0) #// Convert to natural logs: #double Ttheta, thisTemp; #//Treat at least one molecule - if there are really no molecules for an atomic species, #//there will be one phantom molecule in the denominator of the ionization fraction #//with an impossibly high dissociation energy if (numMolsB == 0): numMolsB = 1 #//This should be inherited, but let's make sure: dissEArr[0] = 29.0 #//eV #//var molPops = function(logNum, numeratorLogNumB, numeratorDissE, numeratorLog10UwA, numeratorLog10QwAB, numeratorLogMuAB, //species A data - ionization equilibrium of A #//Molecular partition functions - default initialization: thisLogUwB = [0.0 for i in range(numMolsB)] for iMol in range(numMolsB): thisLogUwB[iMol] = 0.0 #// variable for temp-dependent computed partn fn of array element B thisLogUwA = 0.0 #// element A nmrtrThisLogUwB = 0.0 #// element A thisLogQwAB = math.log(300.0) nmrtrThisLogQwAB = math.log(300.0) #//For clarity: neutral stage of atom whose ionization equilibrium is being computed is element A #// for molecule formation: #logUwA = [0.0 for i in range(5)] #nmrtrLogUwB = [0.0 for i in range(5)] #for kk in range(len(logUwA)): #logUwA[kk] = logE10*log10UwA[kk] #nmrtrLogUwB[kk] = logE10*nmrtrLog10UwB[kk] #// lburns #// Array of elements B for all molecular species AB: #double[][] logUwB = new double[numMolsB][2]; #logUwB = [ [ 0.0 for i in range(5) ] for j in range(numMolsB) ] #//if (numMolsB > 0){ #for iMol in range(numMolsB): # for kk in range(5): # logUwB[iMol][kk] = logE10*log10UwBArr[iMol][kk] # // lburns new loop #//} #// Molecular partition functions: #// double nmrtrLogQwAB = logE10*nmrtrLog10QwAB; #// double[] logQwAB = new double[numMolsB]; #// //if (numMolsB > 0){ #// for (int iMol = 0; iMol < numMolsB; iMol++){ #// logQwAB[iMol] = logE10*log10QwABArr[iMol]; #// } # //} #//Molecular dissociation Boltzmann factors: nmrtrBoltzFacIAB = 0.0 nmrtrLogMolSahaFac = 0.0 logDissE = math.log(nmrtrDissE) + Useful.logEv() #//System.out.println("logDissE " + logE*logDissE) logBoltzFacIAB = logDissE - Useful.logK() #//System.out.println("logBoltzFacIAB " + logE*logBoltzFacIAB); nmrtrBoltzFacIAB = math.exp(logBoltzFacIAB) nmrtrLogMolSahaFac = (3.0 / 2.0) * (log2pi + nmrtrLogMuAB + Useful.logK() - 2.0 * Useful.logH()) #//System.out.println("nmrtrLogMolSahaFac " + logE*nmrtrLogMolSahaFac); #//System.out.println("nmrtrDissE " + nmrtrDissE + " logDissE " + logE*logDissE + " logBoltzFacIAB " + logE*logBoltzFacIAB + " nmrtrBoltzFacIAB " + nmrtrBoltzFacIAB + " nmrtrLogMuAB " + logE*nmrtrLogMuAB + " nmrtrLogMolSahaFac " + logE*nmrtrLogMolSahaFac); boltzFacIAB = [0.0 for i in range(numMolsB)] logMolSahaFac = [0.0 for i in range(numMolsB)] #//if (numMolsB > 0){ for iMol in range(numMolsB): logDissE = math.log(dissEArr[iMol]) + Useful.logEv() logBoltzFacIAB = logDissE - Useful.logK() boltzFacIAB[iMol] = math.exp(logBoltzFacIAB) logMolSahaFac[iMol] = (3.0 / 2.0) * (log2pi + logMuABArr[iMol] + Useful.logK() - 2.0 * Useful.logH()) #//System.out.println("logMolSahaFac[iMol] " + logE*logMolSahaFac[iMol]); #//System.out.println("iMol " + iMol + " dissEArr[iMol] " + dissEArr[iMol] + " logDissE " + logE*logDissE + " logBoltzFacIAB " + logE*logBoltzFacIAB + " boltzFacIAB[iMol] " + boltzFacIAB[iMol] + " logMuABArr " + logE*logMuABArr[iMol] + " logMolSahaFac " + logE*logMolSahaFac[iMol]); #//double[] logNums = new double[numDeps] #//} #// For molecular species: #double nmrtrSaha, nmrtrLogSahaMol, nmrtrLogInvSahaMol; //, nmrtrInvSahaMol; logMolFrac = [0.0 for i in range(numDeps)] logSahaMol = [0.0 for i in range(numMolsB)] invSahaMol = [0.0 for i in range(numMolsB)] #JB# currentUwAArr=list(logUwA)#u(T) determined values UwAFit = ToolBox.cubicFit(masterTemp, currentUwAArr)#u(T) fit nmrtrLogUwBArr=list(nmrtrLogUwB)#u(T) determined values nmrtrLogUwBFit = ToolBox.cubicFit(masterTemp, nmrtrLogUwBArr)#u(T) fit #uwa.append(UwAFit) #uwb.append(nmrtrLogUwBFit) uwbFits=[] qwabFit = [] for iMol in range(numMolsB): currentUwBArr=list(logUwB[iMol]) UwBFit = ToolBox.cubicFit(masterTemp, currentUwBArr) uwbFits.append(UwBFit) currentLogQwABArr=list(logQwABArr[iMol])#u(T) determined values QwABFit = ToolBox.cubicFit(masterTemp, currentLogQwABArr)#u(T) fit qwabFit.append(QwABFit) #nmrtrQwABArr=list(nmrtrLogQwAB)#u(T) determined values #nmrtrQwABFit = ToolBox.cubicFit(masterTemp, nmrtrQwABArr)#u(T) fit #for Mols in range(numMolsB): # currentLogUwBArr=list(logUwB[Mols])#u(T) determined values # UwBFit=cubicFit(masterTemp,currentLogUwBArr)#u(T) fit #JB# #// temps=[] #valb=[] #vala=[] #valnb=[] #valqab=[] #valnmrtrqwb=[] #// System.out.println("molPops: id nmrtrLogNumB logNumBArr[0] logGroundRatio"); for id in range(numDeps): #//System.out.format("%03d, %21.15f, %21.15f, %21.15f, %n", id, logE*nmrtrLogNumB[id], logE*logNumB[0][id], logE*logGroundRatio[id]); #//// reduce or enhance number density by over-all Rosseland opcity scale parameter #//Determine temparature dependent partition functions Uw: thisTemp = temp[0][id] temps.append(thisTemp) #Ttheta = 5040.0 / thisTemp """ if (Ttheta >= 1.0): thisLogUwA = logUwA[0] nmrtrThisLogUwB = nmrtrLogUwB[0] for iMol in range(numMolsB): thisLogUwB[iMol] = logUwB[iMol][0] if (Ttheta <= 0.5): thisLogUwA = logUwA[1] nmrtrThisLogUwB = nmrtrLogUwB[1] for iMol in range(numMolsB): thisLogUwB[iMol] = logUwB[iMol][1] if (Ttheta > 0.5 and Ttheta < 1.0): thisLogUwA = ( logUwA[1] * ((Ttheta - 0.5)/(1.0 - 0.5)) ) \ + ( logUwA[0] * ((1.0 - Ttheta)/(1.0 - 0.5)) ) nmrtrThisLogUwB = ( nmrtrLogUwB[1] * ((Ttheta - 0.5)/(1.0 - 0.5)) ) \ + ( nmrtrLogUwB[0] * ((1.0 - Ttheta)/(1.0 - 0.5)) ) for iMol in range(numMolsB): thisLogUwB[iMol] = ( logUwB[iMol][1] * ((Ttheta - 0.5)/(1.0 - 0.5)) ) \ + ( logUwB[iMol][0] * ((1.0 - Ttheta)/(1.0 - 0.5)) ) """ #JB# thisLogUwA = float(ToolBox.valueFromFit(UwAFit,thisTemp))#u(T) value extrapolated #vala.append(thisLogUwA) nmrtrThisLogUwB = float(ToolBox.valueFromFit(nmrtrLogUwBFit,thisTemp))#u(T) value extrapolated #valnb.append(nmrtrThisLogUwB) #for iMol in range(numMolsB): # thisLogUwB[iMol]=logUwB[iMol] for iMol in range(numMolsB): thisLogUwB[iMol] = ToolBox.valueFromFit(uwbFits[iMol],thisTemp)#u(T) value extrapolated #valb.append(thisLogUwB[iMol]) #// NEW Determine temperature dependent partition functions Uw: lburns thisTemp = temp[0][id] if (thisTemp <= 130.0): thisLogUwA = logUwA[0] nmrtrThisLogUwB = nmrtrLogUwB[0] for iMol in range(numMolsB): thisLogUwB[iMol] = logUwB[iMol][0] if (thisTemp >= 10000.0): thisLogUwA = logUwA[4] nmrtrThisLogUwB = nmrtrLogUwB[4] for iMol in range(numMolsB): thisLogUwB[iMol] = logUwB[iMol][4] """ if (thisTemp > 130 and thisTemp <= 500): thisLogUwA = logUwA[1] * (thisTemp - 130)/(500 - 130) \ + logUwA[0] * (500 - thisTemp)/(500 - 130) nmrtrThisLogUwB = nmrtrLogUwB[1] * (thisTemp - 130)/(500 - 130) \ + nmrtrLogUwB[0] * (500 - thisTemp)/(500 - 130) for iMol in range(numMolsB): thisLogUwB[iMol] = logUwB[iMol][1] * (thisTemp - 130)/(500 - 130) \ + logUwB[iMol][0] * (500 - thisTemp)/(500 - 130) if (thisTemp > 500 and thisTemp <= 3000): thisLogUwA = logUwA[2] * (thisTemp - 500)/(3000 - 500) \ + logUwA[1] * (3000 - thisTemp)/(3000 - 500) nmrtrThisLogUwB = nmrtrLogUwB[2] * (thisTemp - 500)/(3000 - 500) \ + nmrtrLogUwB[1] * (3000 - thisTemp)/(3000 - 500) for iMol in range(numMolsB): thisLogUwB[iMol] = logUwB[iMol][2] * (thisTemp - 500)/(3000 - 500) \ + logUwB[iMol][1] * (3000 - thisTemp)/(3000 - 500) if (thisTemp > 3000 and thisTemp <= 8000): thisLogUwA = logUwA[3] * (thisTemp - 3000)/(8000 - 3000) \ + logUwA[2] * (8000 - thisTemp)/(8000 - 3000) nmrtrThisLogUwB = nmrtrLogUwB[3] * (thisTemp - 3000)/(8000 - 3000) \ + nmrtrLogUwB[2] * (8000 - thisTemp)/(8000 - 3000) for iMol in range(numMolsB): thisLogUwB[iMol] = logUwB[iMol][3] * (thisTemp - 3000)/(8000 - 3000) \ + logUwB[iMol][2] * (8000 - thisTemp)/(8000 - 3000) if (thisTemp > 8000 and thisTemp < 10000): thisLogUwA = logUwA[4] * (thisTemp - 8000)/(10000 - 8000) \ + logUwA[3] * (10000 - thisTemp)/(10000 - 8000) nmrtrThisLogUwB = nmrtrLogUwB[4] * (thisTemp - 8000)/(10000 - 8000) \ + nmrtrLogUwB[3] * (10000 - thisTemp)/(10000 - 8000) for iMol in range(numMolsB): thisLogUwB[iMol] = logUwB[iMol][4] * (thisTemp - 8000)/(10000 - 8000) \ + logUwB[iMol][3] * (10000 - thisTemp)/(10000 - 8000) if (thisTemp >= 10000): thisLogUwA = logUwA[4] nmrtrThisLogUwB = nmrtrLogUwB[4] for iMol in range(numMolsB): thisLogUwB[iMol] = logUwB[iMol][4] """ #iMol loops for Q's for iMol in range(numMolsB): if (thisTemp < 3000.0): thisLogQwAB = ( logQwABArr[iMol][1] * (3000.0 - thisTemp)/(3000.0 - 500.0) ) \ + ( logQwABArr[iMol][2] * (thisTemp - 500.0)/(3000.0 - 500.0) ) if ( (thisTemp >= 3000.0) and (thisTemp <= 8000.0) ): thisLogQwAB = ( logQwABArr[iMol][2] * (8000.0 - thisTemp)/(8000.0 - 3000.0) ) \ + ( logQwABArr[iMol][3] * (thisTemp - 3000.0)/(8000.0 - 3000.0) ) if ( thisTemp > 8000.0 ): thisLogQwAB = ( logQwABArr[iMol][3] * (10000.0 - thisTemp)/(10000.0 - 8000.0) ) \ + ( logQwABArr[iMol][4] * (thisTemp - 8000.0)/(10000.0 - 8000.0) ) if (thisTemp < 3000.0): nmrtrThisLogQwAB = ( nmrtrLogQwAB[1] * (3000.0 - thisTemp)/(3000.0 - 500.0) ) \ + ( nmrtrLogQwAB[2] * (thisTemp - 500.0)/(3000.0 - 500.0) ) if ( (thisTemp >= 3000.0) and (thisTemp <= 8000.0) ): nmrtrThisLogQwAB = ( nmrtrLogQwAB[2] * (8000.0 - thisTemp)/(8000.0 - 3000.0) ) \ + ( nmrtrLogQwAB[3] * (thisTemp - 3000.0)/(8000.0 - 3000.0) ) if ( thisTemp > 8000.0 ): nmrtrThisLogQwAB = ( nmrtrLogQwAB[3] * (10000.0 - thisTemp)/(10000.0 - 8000.0) ) \ + ( nmrtrLogQwAB[4] * (thisTemp - 8000.0)/(10000.0 - 8000.0) ) #//For clarity: neutral stage of atom whose ionization equilibrium is being computed is element A #// for molecule formation: # //Ionization stage Saha factors: #//System.out.println("id " + id + " nmrtrLogNumB[id] " + logE*nmrtrLogNumB[id]); # // if (id == 16){ # // System.out.println("id " + id + " nmrtrLogNumB[id] " + logE*nmrtrLogNumB[id] + " pp nmrtB " + (logE*(nmrtrLogNumB[id]+temp[1][id]+Useful.logK())) + " nmrtrThisLogUwB " + logE*nmrtrThisLogUwB + " thisLogUwA " + logE*thisLogUwA + " nmrtrLogQwAB " + logE*nmrtrThisLogQwAB); # //System.out.println("nmrtrThisLogUwB " + logE*nmrtrThisLogUwB + " thisLogUwA " + logE*thisLogUwA + " nmrtrThisLogQwAB " + logE*nmrtrThisLogQwAB); # // } nmrtrLogSahaMol = nmrtrLogMolSahaFac - nmrtrLogNumB[id] - (nmrtrBoltzFacIAB / temp[0][id]) + (3.0 * temp[1][id] / 2.0) + nmrtrThisLogUwB + thisLogUwA - nmrtrThisLogQwAB nmrtrLogInvSahaMol = -1.0 * nmrtrLogSahaMol #//System.out.println("nmrtrLogInvSahaMol " + logE*nmrtrLogInvSahaMol); #//nmrtrInvSahaMol = Math.exp(nmrtrLogSahaMol); #// if (id == 16){ #// System.out.println("nmrtrLogInvSahaMol " + logE*nmrtrLogInvSahaMol); #// } #// if (id == 16){ #// System.out.println("nmrtrBoltzFacIAB " + nmrtrBoltzFacIAB + " nmrtrThisLogUwB " + logE*nmrtrThisLogUwB + " thisLogUwA " + logE*thisLogUwA + " nmrtrThisLogQwAB " + nmrtrThisLogQwAB); #// System.out.println("nmrtrLogSahaMol " + logE*nmrtrLogSahaMol); // + " nmrtrInvSahaMol " + nmrtrInvSahaMol); #// } #//Molecular Saha factors: for iMol in range(numMolsB): #//System.out.println("iMol " + iMol + " id " + id + " logNumB[iMol][id] " + logE*nmrtrLogNumB[id]); #//System.out.println("iMol " + iMol + " thisLogUwB[iMol] " + logE*thisLogUwB[iMol] + " thisLogUwA " + logE*thisLogUwA + " thisLogQwAB " + logE*thisLogQwAB); logSahaMol[iMol] = logMolSahaFac[iMol] - logNumB[iMol][id] - (boltzFacIAB[iMol] / temp[0][id]) + (3.0 * temp[1][id] / 2.0) + float(thisLogUwB[iMol]) + thisLogUwA - thisLogQwAB #//For denominator of ionization fraction, we need *inverse* molecular Saha factors (N_AB/NI): logSahaMol[iMol] = -1.0 * logSahaMol[iMol] invSahaMol[iMol] = math.exp(logSahaMol[iMol]) #//TEST invSahaMol[iMol] = 1.0e-99; //test #// if (id == 16){ #// System.out.println("iMol " + iMol + " boltzFacIAB[iMol] " + boltzFacIAB[iMol] + " thisLogUwB[iMol] " + logE*thisLogUwB[iMol] + " logQwAB[iMol] " + logE*thisLogQwAB + " logNumB[iMol][id] " + logE*logNumB[iMol][id] + " logMolSahaFac[iMol] " + logE*logMolSahaFac[iMol]); #// System.out.println("iMol " + iMol + " logSahaMol " + logE*logSahaMol[iMol] + " invSahaMol[iMol] " + invSahaMol[iMol]); #// } #//Compute log of denominator is ionization fraction, f_stage # //default initialization # // - ratio of total atomic particles in all ionization stages to number in ground state: denominator = math.exp(logGroundRatio[id]) #//default initialization - ratio of total atomic particles in all ionization stages to number in ground state #//molecular contribution for iMol in range(numMolsB): #// if (id == 16){ #// System.out.println("invSahaMol[iMol] " + invSahaMol[iMol] + " denominator " + denominator); #// } denominator = denominator + invSahaMol[iMol] #// logDenominator = math.log(denominator) #//System.out.println("logGroundRatio[id] " + logE*logGroundRatio[id] + " logDenominator " + logE*logDenominator); #// if (id == 16){ #// System.out.println("id " + id + " logGroundRatio " + logGroundRatio[id] + " logDenominator " + logDenominator); #// } #//if (id == 36){ #// System.out.println("logDenominator " + logE*logDenominator); #// } #//var logDenominator = Math.log( 1.0 + saha21 + (saha32 * saha21) + (saha43 * saha32 * saha21) + (saha54 * saha43 * saha32 * saha21) ); logMolFrac[id] = nmrtrLogInvSahaMol - logDenominator #// if (id == 16){ #// System.out.println("id " + id + " logMolFrac[id] " + logE*logMolFrac[id]); #// } #//logNums[id] = logNum[id] + logMolFrac; #} //id loop #JB - check (never used)# #print(uwa) #print(uwb) #title("logUwA") """ plot(temps,vala) tempT=[] for t in masterTemp: tempT.append(valueFromFit(UwAFit,t)) scatter(masterTemp,(tempT)) show() #title("nmrtrlogUwB") plot(temps,valnb) tempT=[] for t in masterTemp: tempT.append(valueFromFit(nmrtrLogUwBFit,t)) scatter(masterTemp,(tempT)) show() #title("logUwB") plot(temps,valb) tempT=[] for t in masterTemp: tempT.append(valueFromFit(UwBFit,t)) scatter(masterTemp,(tempT)) show() #title("logQwAB") plot(temps,valqab) tempT=[] for t in masterTemp: tempT.append(valueFromFit(QwABFit,t)) scatter(masterTemp,(tempT)) show() #title("nmrtrlogQwAB") plot(temps,valnmrtrqwb) tempT=[] for t in masterTemp: tempT.append(valueFromFit(nmrtrQwABFit,t)) scatter(masterTemp,(tempT)) show() """ #JB# return logMolFrac #//end method stagePops
mit
3,178,557,225,868,192,000
39.977511
287
0.575238
false
mhvk/baseband
docs/conf.py
1
7827
# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst # # Astropy documentation build configuration file. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this file. # # All configuration values have a default. Some values are defined in # the global Astropy configuration which is loaded here before anything else. # See astropy.sphinx.conf for which values are set there. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.insert(0, os.path.abspath('..')) # IMPORTANT: the above commented section was generated by sphinx-quickstart, but # is *NOT* appropriate for astropy or Astropy affiliated packages. It is left # commented out with this explanation to make it clear why this should not be # done. If the sys.path entry above is added, when the astropy.sphinx.conf # import occurs, it will import the *source* version of astropy instead of the # version installed (if invoked as "make html" or directly with sphinx), or the # version in the build directory (if "python setup.py build_sphinx" is used). # Thus, any C-extensions that are needed to build the documentation will *not* # be accessible, and the documentation will not build correctly. import os import sys import datetime from importlib import import_module import baseband try: from sphinx_astropy.conf.v1 import * # noqa except ImportError: print('ERROR: the documentation requires the sphinx-astropy package to be installed') sys.exit(1) # Get configuration information from setup.cfg from configparser import ConfigParser conf = ConfigParser() conf.read([os.path.join(os.path.dirname(__file__), '..', 'setup.cfg')]) setup_cfg = dict(conf.items('metadata')) # -- General configuration ---------------------------------------------------- # By default, highlight as Python 3. highlight_language = 'python3' # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.2' # To perform a Sphinx version check that needs to be more specific than # major.minor, call `check_sphinx_version("x.y.z")` here. # check_sphinx_version("1.2.1") # add any custom intersphinx mappings intersphinx_mapping['baseband_tasks'] = ( 'https://baseband.readthedocs.io/projects/baseband-tasks/en/stable/', None) # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns.append('_templates') # This is added to the end of RST files - a good place to put substitutions to # be used globally. rst_epilog += """ .. _Python: https://www.python.org/ .. _Astropy: https://www.astropy.org .. _NumPy: https://numpy.org .. _baseband-tasks: https://baseband.readthedocs.io/projects/baseband-tasks/ .. |minimum_python_version| replace:: {0.__minimum_python_version__} .. |minimum_astropy_version| replace:: {0.__minimum_astropy_version__} .. |minimum_numpy_version| replace:: {0.__minimum_numpy_version__} """.format(baseband) # -- Project information ------------------------------------------------------ # This does not *have* to match the package name, but typically does project = setup_cfg['name'] author = setup_cfg['author'] copyright = '{0}, {1}'.format( datetime.datetime.now().year, setup_cfg['author']) # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. import_module(setup_cfg['name']) package = sys.modules[setup_cfg['name']] # The short X.Y version. version = package.__version__.split('-', 1)[0] # The full version, including alpha/beta/rc tags. release = package.__version__ # -- Options for HTML output -------------------------------------------------- # A NOTE ON HTML THEMES # The global astropy configuration uses a custom theme, 'bootstrap-astropy', # which is installed along with astropy. A different theme can be used or # the options for this theme can be modified by overriding some of the # variables set in the global configuration. The variables set in the # global configuration are listed below, commented out. # Add any paths that contain custom themes here, relative to this directory. # To use a different custom theme, add the directory containing the theme. #html_theme_path = [] # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. To override the custom theme, set this to the # name of a builtin theme or the name of a custom theme in html_theme_path. #html_theme = None html_theme_options = { 'logotext1': 'base', # white, semi-bold 'logotext2': 'band', # orange, light 'logotext3': ':docs' # white, light } # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = '' # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = '' # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '' # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". html_title = '{0} v{1}'.format(project, release) # Output file base name for HTML help builder. htmlhelp_basename = project + 'doc' # -- Options for LaTeX output ------------------------------------------------- # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [('index', project + '.tex', project + u' Documentation', author, 'manual')] # -- Options for manual page output ------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [('index', project.lower(), project + u' Documentation', [author], 1)] # -- Options for the edit_on_github extension --------------------------------- if eval(setup_cfg.get('edit_on_github')): extensions += ['sphinx_astropy.ext.edit_on_github'] edit_on_github_project = setup_cfg['github_project'] edit_on_github_branch = "master" edit_on_github_source_root = "" edit_on_github_doc_root = "docs" # -- Resolving issue number to links in changelog ----------------------------- github_issues_url = 'https://github.com/{0}/issues/'.format(setup_cfg['github_project']) # -- Turn on nitpicky mode for sphinx (to warn about references not found) ---- # nitpicky = True nitpick_ignore = [] # # Some warnings are impossible to suppress, and you can list specific references # that should be ignored in a nitpick-exceptions file which should be inside # the docs/ directory. The format of the file should be: # # <type> <class> # # for example: # # py:class astropy.io.votable.tree.Element # py:class astropy.io.votable.tree.SimpleElement # py:class astropy.io.votable.tree.SimpleElementWithContent # # Uncomment the following lines to enable the exceptions: # for line in open('nitpick-exceptions'): if line.strip() == "" or line.startswith("#"): continue dtype, target = line.split(None, 1) target = target.strip() nitpick_ignore.append((dtype, target)) # -- Include inherited members in class documentation ------------------------- automodsumm_inherited_members = True
gpl-3.0
674,455,686,497,305,500
36.629808
89
0.693625
false
codelieche/codelieche.com
apps/article/views/pages/article.py
1
11474
# -*- coding:utf-8 -*- import json from django.contrib.auth.decorators import login_required from django.core.exceptions import ObjectDoesNotExist from django.http import HttpResponse, HttpResponseRedirect, Http404, JsonResponse from django.shortcuts import render, get_object_or_404, redirect from django.views.generic import View from django.core.paginator import Paginator from article.utils import get_article_id, get_page_num_list from article.forms import PostForm, ImageForm from article.models import Category, Post, Tag, Image, UserData # Create your views here. class IndexPageView(View): """ 文章首页PageView """ def get(self, request, page=None): # 超级用户才可以查看所有文章 if request.user.is_superuser: all_posts = Post.objects.all() else: all_posts = Post.published.all() if page: page_num = int(page) else: page_num = 1 p = Paginator(all_posts, 10) posts = p.page(page_num) page_count = p.num_pages # 获取分页器的页码列表,得到当前页面最近的7个页码列表 page_num_list = get_page_num_list(page_count, page_num, 7) content = { 'posts': posts, 'last_page': page_count, 'page_num_list': page_num_list } return render(request, 'article/page.html', content) class ArticleTagListView(View): """ 标签文章列表页View """ def get(self, request, tag_name, page=0): # 先取出tag tag = get_object_or_404(Tag, slug=tag_name) # print(tag) # 超级用户才可以查看所有文章 if request.user.is_superuser: all_posts = tag.articles.all() else: all_posts = tag.articles.all().filter(status='published') if page: page_num = int(page) else: page_num = 1 # 分页 p = Paginator(all_posts, 10) posts = p.page(page_num) # 总共页数 page_count = p.num_pages # 获取分页器的页码列表,得到当前页面最近的7个页码列表 page_num_list = get_page_num_list(page_count, page_num, 7) # 渲染主体内容 content = { 'tag': tag, 'posts': posts, 'last_page': page_count, 'page_num_list': page_num_list } # posts = Post.published.filter(tags__name__in=[tag_name]) return render(request, "article/list_tag.html", content) class PostDetailView(View): """ 文章详情页View 每次访问都需要增加以下阅读量,visit_count 更新阅读量的时候,要传递update_fields=['visit_count'] 默认Model.save方法的参数update_fields=None,不传值,同时会修改掉updated字段的数据 """ def get(self, request, pk): # 根据pk获取到文章 post = get_object_or_404(Post, pk=pk) # 阅读次数+1 post.visit_count += 1 post.save(update_fields=['visit_count']) # 根据状态判断是草稿还是已发布 if post.author == request.user: if post.status == 'draft': pre_title = "【草稿】" post.title = pre_title + post.title elif post.is_deleted: pre_title = "【删除】" post.title = pre_title + post.title else: if post.status == 'draft' or post.is_deleted: # 如果post的状态是草稿,或者文章已经删除,就抛出404 raise Http404 return render(request, 'article/detail.html', {"post": post}) @login_required def create(request): """ 创建文章 :param request: :return: """ ud = None # 从UserData中获取type为article的数据 try: # ud = UserData.objects.get(type="article",user=request.user) ud = request.user.userdatas.get(type="article") except ObjectDoesNotExist: # article的数据还不存在,那么就创建一个吧 ud = UserData(user=request.user, type="article", content="") # ud.content中的false,true没有加双引号的,所以这里定义一下,false,true false = False true = True if ud.content: post = json.loads(ud.content) else: # 当post创建了之后,UserData.content的内容变成空了 post = {} # 也可以用 post = dict(eval(ud.content)) 但是能不用eval就不要用eval categories = Category.objects.all() if request.method == "POST": # print(request.POST) form = PostForm(request.POST) # 获取了提交过来的form数据后 验证是否正确,以及获取cleaned_data # print(form) if form.is_valid(): category = Category.objects.get(pk=form.cleaned_data['category']) title = form.cleaned_data['title'] content = form.cleaned_data['content'] status = form.cleaned_data['status'] tags = form.cleaned_data['tags'] if tags.find(',') > 0: tags = tags.replace(",", ",") is_top = form.cleaned_data['is_top'] is_good = form.cleaned_data['is_good'] is_deleted = form.cleaned_data['is_deleted'] time_added = form.cleaned_data['time_added'] post_pk = get_article_id(time_added) # print(category,title,content,status,tags) post = Post(pk=post_pk, category=category, title=title, content=content, status=status, author=request.user, is_top=is_top, is_good=is_good, is_deleted=is_deleted) # print(post.tags) post.save() post.time_added = time_added post.save(update_fields=['time_added']) # 文章保存后,要对 UserData中的article的内容清空 ud.content = "" ud.save() # 如果有值则添加tag if tags: for tag in tags.split(','): if not tag: # 如果tag为空就跳过一下 continue # get_or_create是Tag的静态方法 # 必须加strip,去除首位空格 tag, created = Tag.objects.get_or_create(name=tag.strip()) post.tags.add(tag) if post.is_deleted: return HttpResponseRedirect(redirect_to="/") else: # return HttpResponseRedirect(redirect_to="/article/%s" % post.pk) return redirect(post) # 如果表单没有验证成功,就重新进入编辑页面 return HttpResponseRedirect(redirect_to="/article/create") else: content = { "post": post, "categories": categories } return render(request, "article/create.html", content) @login_required def editor(request, pk=None): """文章编辑view""" categories = Category.objects.all() # 得到 编辑的文章对象 post = Post.objects.get(pk=pk) # print(post.author == request.user) # print(post.author) if request.method == "POST": # print(request.POST) form = PostForm(request.POST) # 获取了提交过来的form数据后 验证是否正确,以及获取cleaned_data # print(form) if form.is_valid() and post.author == request.user: form = form.cleaned_data category = Category.objects.get(pk=form['category']) title = form['title'] content = form['content'] status = form['status'] tags = form['tags'] if tags.find(',') > 0: tags = tags.replace(",", ",") top = form['is_top'] good = form['is_good'] is_deleted = form['is_deleted'] # print(category,title,content,status,tags,deleted) # 修改post对象的 分类,标题,内容,状态,author,top,good信息 post.category = category post.title = title post.content = content post.status = status post.is_top = top post.is_good = good post.is_deleted = is_deleted # print(post.tags.all()) tags_list = [] if tags: # 如果有值则添加tag for tag in tags.split(','): if tag: # get_or_create是Tag的静态方法 tag, created = Tag.objects.get_or_create(name=tag.strip()) # 必须加strip,去除首位空格 tags_list.append(tag) else: continue # 对post的tags重新赋值 post.tags.set(tags_list) post.save() if is_deleted: return HttpResponseRedirect("/") # return HttpResponseRedirect(redirect_to="/article/%s" % post.pk) return redirect(post) else: form = PostForm() return render(request, "article/editor.html", {"post": post, "categories": categories}) @login_required def save(request): """创建文章中途保存""" # 从UserData中获取type为article的数据 ud = UserData.objects.filter(type="article", user=request.user).first() # ud = request.user.userdatas.get(type="article") if not ud: # article的数据还不存在,那么就创建一个吧 ud = UserData() post = Post() if request.method == "POST": # print(request.POST) form = PostForm(request.POST) # 获取了提交过来的form数据后 验证是否正确,以及获取cleaned_data # print(form) #还没cleaned_data是些html标签 if form.is_valid(): form = form.cleaned_data # 把form转成dict,再用json.dumps成字符串,方便在create中转成字典 form["time_added"] = form["time_added"].strftime("%F %T") ud.content = json.dumps(dict(form)) # print(json.dumps(dict(form))) # print(form) ud.user = request.user ud.save() return HttpResponse(json.dumps({"sucess": True}), content_type="application/json") return HttpResponse(json.dumps({"sucess": False}), content_type="application/json") else: # /article/save 只能是POST访问 raise Http404 @login_required def upload_image(request): """上传图片""" if request.method == "GET": form = ImageForm() else: form = ImageForm(request.POST, request.FILES) # file = request.FILES['filename'] # print(dir(request.FILES['filename'])) # print(file.size) if form.is_valid(): filename = form.cleaned_data['filename'] image = Image(filename=filename, url=filename, user=request.user) image.save() response_data = { 'success': 'true', 'url': '/media/%s' % image.url, } return JsonResponse(response_data) else: return JsonResponse({'success': 'false'}, status=400) return render(request, "article/upload_image.html", {'form': form})
mit
-1,462,004,923,378,391,000
32.350482
101
0.54811
false
DirectXMan12/nova-hacking
nova/tests/virt/hyperv/test_hypervapi.py
1
51735
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 Cloudbase Solutions Srl # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Test suite for the Hyper-V driver and related APIs. """ import io import mox import os import platform import shutil import time import uuid from oslo.config import cfg from nova.api.metadata import base as instance_metadata from nova.compute import power_state from nova.compute import task_states from nova import context from nova import db from nova.image import glance from nova import test from nova.tests import fake_network from nova.tests.image import fake as fake_image from nova.tests import matchers from nova.tests.virt.hyperv import db_fakes from nova.tests.virt.hyperv import fake from nova import utils from nova.virt import configdrive from nova.virt import driver from nova.virt.hyperv import basevolumeutils from nova.virt.hyperv import constants from nova.virt.hyperv import driver as driver_hyperv from nova.virt.hyperv import hostutils from nova.virt.hyperv import livemigrationutils from nova.virt.hyperv import networkutils from nova.virt.hyperv import pathutils from nova.virt.hyperv import vhdutils from nova.virt.hyperv import vmutils from nova.virt.hyperv import volumeops from nova.virt.hyperv import volumeutils from nova.virt.hyperv import volumeutilsv2 from nova.virt import images CONF = cfg.CONF CONF.import_opt('vswitch_name', 'nova.virt.hyperv.vif', 'hyperv') class HyperVAPITestCase(test.TestCase): """Unit tests for Hyper-V driver calls.""" def __init__(self, test_case_name): self._mox = mox.Mox() super(HyperVAPITestCase, self).__init__(test_case_name) def setUp(self): super(HyperVAPITestCase, self).setUp() self._user_id = 'fake' self._project_id = 'fake' self._instance_data = None self._image_metadata = None self._fetched_image = None self._update_image_raise_exception = False self._volume_target_portal = 'testtargetportal:3260' self._volume_id = '0ef5d708-45ab-4129-8c59-d774d2837eb7' self._context = context.RequestContext(self._user_id, self._project_id) self._instance_ide_disks = [] self._instance_ide_dvds = [] self._instance_volume_disks = [] self._test_vm_name = None self._test_instance_dir = 'C:\\FakeInstancesPath\\instance-0000001' self._setup_stubs() self.flags(instances_path=r'C:\Hyper-V\test\instances', network_api_class='nova.network.quantumv2.api.API') self._conn = driver_hyperv.HyperVDriver(None) def _setup_stubs(self): db_fakes.stub_out_db_instance_api(self.stubs) fake_image.stub_out_image_service(self.stubs) fake_network.stub_out_nw_api_get_instance_nw_info(self.stubs) def fake_fetch(context, image_id, target, user, project): self._fetched_image = target self.stubs.Set(images, 'fetch', fake_fetch) def fake_get_remote_image_service(context, name): class FakeGlanceImageService(object): def update(self_fake, context, image_id, image_metadata, f): if self._update_image_raise_exception: raise vmutils.HyperVException( "Simulated update failure") self._image_metadata = image_metadata return (FakeGlanceImageService(), 1) self.stubs.Set(glance, 'get_remote_image_service', fake_get_remote_image_service) def fake_sleep(ms): pass self.stubs.Set(time, 'sleep', fake_sleep) def fake_vmutils__init__(self, host='.'): pass vmutils.VMUtils.__init__ = fake_vmutils__init__ def fake_get_volume_utils(self): return volumeutils.VolumeUtils() volumeops.VolumeOps._get_volume_utils = fake_get_volume_utils self.stubs.Set(pathutils, 'PathUtils', fake.PathUtils) self._mox.StubOutWithMock(fake.PathUtils, 'open') self._mox.StubOutWithMock(fake.PathUtils, 'copyfile') self._mox.StubOutWithMock(fake.PathUtils, 'rmtree') self._mox.StubOutWithMock(fake.PathUtils, 'copy') self._mox.StubOutWithMock(fake.PathUtils, 'remove') self._mox.StubOutWithMock(fake.PathUtils, 'rename') self._mox.StubOutWithMock(fake.PathUtils, 'makedirs') self._mox.StubOutWithMock(fake.PathUtils, 'get_instance_migr_revert_dir') self._mox.StubOutWithMock(fake.PathUtils, 'get_instance_dir') self._mox.StubOutWithMock(vmutils.VMUtils, 'vm_exists') self._mox.StubOutWithMock(vmutils.VMUtils, 'create_vm') self._mox.StubOutWithMock(vmutils.VMUtils, 'destroy_vm') self._mox.StubOutWithMock(vmutils.VMUtils, 'attach_ide_drive') self._mox.StubOutWithMock(vmutils.VMUtils, 'create_scsi_controller') self._mox.StubOutWithMock(vmutils.VMUtils, 'create_nic') self._mox.StubOutWithMock(vmutils.VMUtils, 'set_vm_state') self._mox.StubOutWithMock(vmutils.VMUtils, 'list_instances') self._mox.StubOutWithMock(vmutils.VMUtils, 'get_vm_summary_info') self._mox.StubOutWithMock(vmutils.VMUtils, 'take_vm_snapshot') self._mox.StubOutWithMock(vmutils.VMUtils, 'remove_vm_snapshot') self._mox.StubOutWithMock(vmutils.VMUtils, 'set_nic_connection') self._mox.StubOutWithMock(vmutils.VMUtils, 'get_vm_scsi_controller') self._mox.StubOutWithMock(vmutils.VMUtils, 'get_vm_ide_controller') self._mox.StubOutWithMock(vmutils.VMUtils, 'get_attached_disks_count') self._mox.StubOutWithMock(vmutils.VMUtils, 'attach_volume_to_controller') self._mox.StubOutWithMock(vmutils.VMUtils, 'get_mounted_disk_by_drive_number') self._mox.StubOutWithMock(vmutils.VMUtils, 'detach_vm_disk') self._mox.StubOutWithMock(vmutils.VMUtils, 'get_vm_storage_paths') self._mox.StubOutWithMock(vmutils.VMUtils, 'get_controller_volume_paths') self._mox.StubOutWithMock(vhdutils.VHDUtils, 'create_differencing_vhd') self._mox.StubOutWithMock(vhdutils.VHDUtils, 'reconnect_parent_vhd') self._mox.StubOutWithMock(vhdutils.VHDUtils, 'merge_vhd') self._mox.StubOutWithMock(vhdutils.VHDUtils, 'get_vhd_parent_path') self._mox.StubOutWithMock(vhdutils.VHDUtils, 'get_vhd_info') self._mox.StubOutWithMock(vhdutils.VHDUtils, 'resize_vhd') self._mox.StubOutWithMock(vhdutils.VHDUtils, 'validate_vhd') self._mox.StubOutWithMock(hostutils.HostUtils, 'get_cpus_info') self._mox.StubOutWithMock(hostutils.HostUtils, 'is_cpu_feature_present') self._mox.StubOutWithMock(hostutils.HostUtils, 'get_memory_info') self._mox.StubOutWithMock(hostutils.HostUtils, 'get_volume_info') self._mox.StubOutWithMock(hostutils.HostUtils, 'get_windows_version') self._mox.StubOutWithMock(hostutils.HostUtils, 'get_local_ips') self._mox.StubOutWithMock(networkutils.NetworkUtils, 'get_external_vswitch') self._mox.StubOutWithMock(networkutils.NetworkUtils, 'create_vswitch_port') self._mox.StubOutWithMock(livemigrationutils.LiveMigrationUtils, 'live_migrate_vm') self._mox.StubOutWithMock(livemigrationutils.LiveMigrationUtils, 'check_live_migration_config') self._mox.StubOutWithMock(basevolumeutils.BaseVolumeUtils, 'volume_in_mapping') self._mox.StubOutWithMock(basevolumeutils.BaseVolumeUtils, 'get_session_id_from_mounted_disk') self._mox.StubOutWithMock(basevolumeutils.BaseVolumeUtils, 'get_device_number_for_target') self._mox.StubOutWithMock(basevolumeutils.BaseVolumeUtils, 'get_target_from_disk_path') self._mox.StubOutWithMock(volumeutils.VolumeUtils, 'login_storage_target') self._mox.StubOutWithMock(volumeutils.VolumeUtils, 'logout_storage_target') self._mox.StubOutWithMock(volumeutils.VolumeUtils, 'execute_log_out') self._mox.StubOutWithMock(volumeutilsv2.VolumeUtilsV2, 'login_storage_target') self._mox.StubOutWithMock(volumeutilsv2.VolumeUtilsV2, 'logout_storage_target') self._mox.StubOutWithMock(volumeutilsv2.VolumeUtilsV2, 'execute_log_out') self._mox.StubOutClassWithMocks(instance_metadata, 'InstanceMetadata') self._mox.StubOutWithMock(instance_metadata.InstanceMetadata, 'metadata_for_config_drive') # Can't use StubOutClassWithMocks due to __exit__ and __enter__ self._mox.StubOutWithMock(configdrive, 'ConfigDriveBuilder') self._mox.StubOutWithMock(configdrive.ConfigDriveBuilder, 'make_drive') self._mox.StubOutWithMock(utils, 'execute') def tearDown(self): self._mox.UnsetStubs() super(HyperVAPITestCase, self).tearDown() def test_get_available_resource(self): cpu_info = {'Architecture': 'fake', 'Name': 'fake', 'Manufacturer': 'ACME, Inc.', 'NumberOfCores': 2, 'NumberOfLogicalProcessors': 4} tot_mem_kb = 2000000L free_mem_kb = 1000000L tot_hdd_b = 4L * 1024 ** 3 free_hdd_b = 3L * 1024 ** 3 windows_version = '6.2.9200' hostutils.HostUtils.get_memory_info().AndReturn((tot_mem_kb, free_mem_kb)) m = hostutils.HostUtils.get_volume_info(mox.IsA(str)) m.AndReturn((tot_hdd_b, free_hdd_b)) hostutils.HostUtils.get_cpus_info().AndReturn([cpu_info]) m = hostutils.HostUtils.is_cpu_feature_present(mox.IsA(int)) m.MultipleTimes() m = hostutils.HostUtils.get_windows_version() m.AndReturn(windows_version) self._mox.ReplayAll() dic = self._conn.get_available_resource(None) self._mox.VerifyAll() self.assertEquals(dic['vcpus'], cpu_info['NumberOfLogicalProcessors']) self.assertEquals(dic['hypervisor_hostname'], platform.node()) self.assertEquals(dic['memory_mb'], tot_mem_kb / 1024) self.assertEquals(dic['memory_mb_used'], tot_mem_kb / 1024 - free_mem_kb / 1024) self.assertEquals(dic['local_gb'], tot_hdd_b / 1024 ** 3) self.assertEquals(dic['local_gb_used'], tot_hdd_b / 1024 ** 3 - free_hdd_b / 1024 ** 3) self.assertEquals(dic['hypervisor_version'], windows_version.replace('.', '')) def test_get_host_stats(self): tot_mem_kb = 2000000L free_mem_kb = 1000000L tot_hdd_b = 4L * 1024 ** 3 free_hdd_b = 3L * 1024 ** 3 hostutils.HostUtils.get_memory_info().AndReturn((tot_mem_kb, free_mem_kb)) m = hostutils.HostUtils.get_volume_info(mox.IsA(str)) m.AndReturn((tot_hdd_b, free_hdd_b)) self._mox.ReplayAll() dic = self._conn.get_host_stats(True) self._mox.VerifyAll() self.assertEquals(dic['disk_total'], tot_hdd_b / 1024 ** 3) self.assertEquals(dic['disk_available'], free_hdd_b / 1024 ** 3) self.assertEquals(dic['host_memory_total'], tot_mem_kb / 1024) self.assertEquals(dic['host_memory_free'], free_mem_kb / 1024) self.assertEquals(dic['disk_total'], dic['disk_used'] + dic['disk_available']) self.assertEquals(dic['host_memory_total'], dic['host_memory_overhead'] + dic['host_memory_free']) def test_list_instances(self): fake_instances = ['fake1', 'fake2'] vmutils.VMUtils.list_instances().AndReturn(fake_instances) self._mox.ReplayAll() instances = self._conn.list_instances() self._mox.VerifyAll() self.assertEquals(instances, fake_instances) def test_get_info(self): self._instance_data = self._get_instance_data() summary_info = {'NumberOfProcessors': 2, 'EnabledState': constants.HYPERV_VM_STATE_ENABLED, 'MemoryUsage': 1000, 'UpTime': 1} m = vmutils.VMUtils.vm_exists(mox.Func(self._check_instance_name)) m.AndReturn(True) func = mox.Func(self._check_instance_name) m = vmutils.VMUtils.get_vm_summary_info(func) m.AndReturn(summary_info) self._mox.ReplayAll() info = self._conn.get_info(self._instance_data) self._mox.VerifyAll() self.assertEquals(info["state"], power_state.RUNNING) def test_spawn_cow_image(self): self._test_spawn_instance(True) def test_spawn_no_cow_image(self): self._test_spawn_instance(False) def _setup_spawn_config_drive_mocks(self, use_cdrom): im = instance_metadata.InstanceMetadata(mox.IgnoreArg(), content=mox.IsA(list), extra_md=mox.IsA(dict)) m = fake.PathUtils.get_instance_dir(mox.IsA(str)) m.AndReturn(self._test_instance_dir) cdb = self._mox.CreateMockAnything() m = configdrive.ConfigDriveBuilder(instance_md=mox.IgnoreArg()) m.AndReturn(cdb) # __enter__ and __exit__ are required by "with" cdb.__enter__().AndReturn(cdb) cdb.make_drive(mox.IsA(str)) cdb.__exit__(None, None, None).AndReturn(None) if not use_cdrom: utils.execute(CONF.hyperv.qemu_img_cmd, 'convert', '-f', 'raw', '-O', 'vpc', mox.IsA(str), mox.IsA(str), attempts=1) fake.PathUtils.remove(mox.IsA(str)) m = vmutils.VMUtils.attach_ide_drive(mox.IsA(str), mox.IsA(str), mox.IsA(int), mox.IsA(int), mox.IsA(str)) m.WithSideEffects(self._add_ide_disk) def _test_spawn_config_drive(self, use_cdrom): self.flags(force_config_drive=True) self.flags(config_drive_cdrom=use_cdrom, group='hyperv') self.flags(mkisofs_cmd='mkisofs.exe') if use_cdrom: expected_ide_disks = 1 expected_ide_dvds = 1 else: expected_ide_disks = 2 expected_ide_dvds = 0 self._test_spawn_instance(expected_ide_disks=expected_ide_disks, expected_ide_dvds=expected_ide_dvds, config_drive=True, use_cdrom=use_cdrom) def test_spawn_config_drive(self): self._test_spawn_config_drive(False) def test_spawn_config_drive_cdrom(self): self._test_spawn_config_drive(True) def test_spawn_no_config_drive(self): self.flags(force_config_drive=False) expected_ide_disks = 1 expected_ide_dvds = 0 self._test_spawn_instance(expected_ide_disks=expected_ide_disks, expected_ide_dvds=expected_ide_dvds) def test_spawn_nova_net_vif(self): self.flags(network_api_class='nova.network.api.API') # Reinstantiate driver, as the VIF plugin is loaded during __init__ self._conn = driver_hyperv.HyperVDriver(None) def setup_vif_mocks(): fake_vswitch_path = 'fake vswitch path' fake_vswitch_port = 'fake port' m = networkutils.NetworkUtils.get_external_vswitch( CONF.hyperv.vswitch_name) m.AndReturn(fake_vswitch_path) m = networkutils.NetworkUtils.create_vswitch_port( fake_vswitch_path, mox.IsA(str)) m.AndReturn(fake_vswitch_port) vmutils.VMUtils.set_nic_connection(mox.IsA(str), mox.IsA(str), fake_vswitch_port) self._test_spawn_instance(setup_vif_mocks_func=setup_vif_mocks) def test_spawn_nova_net_vif_no_vswitch_exception(self): self.flags(network_api_class='nova.network.api.API') # Reinstantiate driver, as the VIF plugin is loaded during __init__ self._conn = driver_hyperv.HyperVDriver(None) def setup_vif_mocks(): m = networkutils.NetworkUtils.get_external_vswitch( CONF.hyperv.vswitch_name) m.AndRaise(vmutils.HyperVException(_('fake vswitch not found'))) self.assertRaises(vmutils.HyperVException, self._test_spawn_instance, setup_vif_mocks_func=setup_vif_mocks, with_exception=True) def _check_instance_name(self, vm_name): return vm_name == self._instance_data['name'] def _test_vm_state_change(self, action, from_state, to_state): self._instance_data = self._get_instance_data() vmutils.VMUtils.set_vm_state(mox.Func(self._check_instance_name), to_state) self._mox.ReplayAll() action(self._instance_data) self._mox.VerifyAll() def test_pause(self): self._test_vm_state_change(self._conn.pause, None, constants.HYPERV_VM_STATE_PAUSED) def test_pause_already_paused(self): self._test_vm_state_change(self._conn.pause, constants.HYPERV_VM_STATE_PAUSED, constants.HYPERV_VM_STATE_PAUSED) def test_unpause(self): self._test_vm_state_change(self._conn.unpause, constants.HYPERV_VM_STATE_PAUSED, constants.HYPERV_VM_STATE_ENABLED) def test_unpause_already_running(self): self._test_vm_state_change(self._conn.unpause, None, constants.HYPERV_VM_STATE_ENABLED) def test_suspend(self): self._test_vm_state_change(self._conn.suspend, None, constants.HYPERV_VM_STATE_SUSPENDED) def test_suspend_already_suspended(self): self._test_vm_state_change(self._conn.suspend, constants.HYPERV_VM_STATE_SUSPENDED, constants.HYPERV_VM_STATE_SUSPENDED) def test_resume(self): self._test_vm_state_change(lambda i: self._conn.resume(i, None), constants.HYPERV_VM_STATE_SUSPENDED, constants.HYPERV_VM_STATE_ENABLED) def test_resume_already_running(self): self._test_vm_state_change(lambda i: self._conn.resume(i, None), None, constants.HYPERV_VM_STATE_ENABLED) def test_power_off(self): self._test_vm_state_change(self._conn.power_off, None, constants.HYPERV_VM_STATE_DISABLED) def test_power_off_already_powered_off(self): self._test_vm_state_change(self._conn.power_off, constants.HYPERV_VM_STATE_DISABLED, constants.HYPERV_VM_STATE_DISABLED) def test_power_on(self): self._test_vm_state_change(self._conn.power_on, constants.HYPERV_VM_STATE_DISABLED, constants.HYPERV_VM_STATE_ENABLED) def test_power_on_already_running(self): self._test_vm_state_change(self._conn.power_on, None, constants.HYPERV_VM_STATE_ENABLED) def test_reboot(self): network_info = fake_network.fake_get_instance_nw_info(self.stubs, spectacular=True) self._instance_data = self._get_instance_data() vmutils.VMUtils.set_vm_state(mox.Func(self._check_instance_name), constants.HYPERV_VM_STATE_REBOOT) self._mox.ReplayAll() self._conn.reboot(self._context, self._instance_data, network_info, None) self._mox.VerifyAll() def _setup_destroy_mocks(self, destroy_disks=True): m = vmutils.VMUtils.vm_exists(mox.Func(self._check_instance_name)) m.AndReturn(True) func = mox.Func(self._check_instance_name) vmutils.VMUtils.set_vm_state(func, constants.HYPERV_VM_STATE_DISABLED) m = vmutils.VMUtils.get_vm_storage_paths(func) m.AndReturn(([], [])) vmutils.VMUtils.destroy_vm(func) if destroy_disks: m = fake.PathUtils.get_instance_dir(mox.IsA(str), create_dir=False, remove_dir=True) m.AndReturn(self._test_instance_dir) def test_destroy(self): self._instance_data = self._get_instance_data() self._setup_destroy_mocks() self._mox.ReplayAll() self._conn.destroy(self._instance_data, None) self._mox.VerifyAll() def test_live_migration_without_volumes(self): self._test_live_migration() def test_live_migration_with_volumes(self): self._test_live_migration(with_volumes=True) def test_live_migration_with_target_failure(self): self._test_live_migration(test_failure=True) def _test_live_migration(self, test_failure=False, with_volumes=False): dest_server = 'fake_server' instance_data = self._get_instance_data() instance_name = instance_data['name'] fake_post_method = self._mox.CreateMockAnything() if not test_failure: fake_post_method(self._context, instance_data, dest_server, False) fake_recover_method = self._mox.CreateMockAnything() if test_failure: fake_recover_method(self._context, instance_data, dest_server, False) fake_ide_controller_path = 'fakeide' fake_scsi_controller_path = 'fakescsi' if with_volumes: fake_scsi_disk_path = 'fake_scsi_disk_path' fake_target_iqn = 'fake_target_iqn' fake_target_lun = 1 fake_scsi_paths = {0: fake_scsi_disk_path} else: fake_scsi_paths = {} m = livemigrationutils.LiveMigrationUtils.live_migrate_vm( instance_data['name'], dest_server) if test_failure: m.AndRaise(vmutils.HyperVException('Simulated failure')) if with_volumes: m.AndReturn([(fake_target_iqn, fake_target_lun)]) volumeutils.VolumeUtils.logout_storage_target(fake_target_iqn) else: m.AndReturn([]) self._mox.ReplayAll() try: self._conn.live_migration(self._context, instance_data, dest_server, fake_post_method, fake_recover_method) exception_raised = False except vmutils.HyperVException: exception_raised = True self.assertTrue(not test_failure ^ exception_raised) self._mox.VerifyAll() def test_pre_live_migration_cow_image(self): self._test_pre_live_migration(True, False) def test_pre_live_migration_no_cow_image(self): self._test_pre_live_migration(False, False) def test_pre_live_migration_with_volumes(self): self._test_pre_live_migration(False, True) def _test_pre_live_migration(self, cow, with_volumes): self.flags(use_cow_images=cow) instance_data = self._get_instance_data() instance = db.instance_create(self._context, instance_data) instance['system_metadata'] = {} network_info = fake_network.fake_get_instance_nw_info(self.stubs, spectacular=True) m = livemigrationutils.LiveMigrationUtils.check_live_migration_config() m.AndReturn(True) if cow: m = basevolumeutils.BaseVolumeUtils.volume_in_mapping(mox.IsA(str), None) m.AndReturn(False) m = vhdutils.VHDUtils.get_vhd_info(mox.Func(self._check_img_path)) m.AndReturn({'MaxInternalSize': 1024}) fake.PathUtils.copyfile(mox.IsA(str), mox.IsA(str)) vhdutils.VHDUtils.resize_vhd(mox.IsA(str), mox.IsA(object)) if with_volumes: block_device_info = db_fakes.get_fake_block_device_info( self._volume_target_portal, self._volume_id) mapping = driver.block_device_info_get_mapping(block_device_info) data = mapping[0]['connection_info']['data'] target_lun = data['target_lun'] target_iqn = data['target_iqn'] target_portal = data['target_portal'] fake_mounted_disk = "fake_mounted_disk" fake_device_number = 0 self._mock_login_storage_target(target_iqn, target_lun, target_portal, fake_mounted_disk, fake_device_number) else: block_device_info = None self._mox.ReplayAll() self._conn.pre_live_migration(self._context, instance, block_device_info, network_info) self._mox.VerifyAll() if cow: self.assertTrue(self._fetched_image is not None) else: self.assertTrue(self._fetched_image is None) def test_snapshot_with_update_failure(self): (snapshot_name, func_call_matcher) = self._setup_snapshot_mocks() self._update_image_raise_exception = True self._mox.ReplayAll() self.assertRaises(vmutils.HyperVException, self._conn.snapshot, self._context, self._instance_data, snapshot_name, func_call_matcher.call) self._mox.VerifyAll() # Assert states changed in correct order self.assertIsNone(func_call_matcher.match()) def _setup_snapshot_mocks(self): expected_calls = [ {'args': (), 'kwargs': {'task_state': task_states.IMAGE_PENDING_UPLOAD}}, {'args': (), 'kwargs': {'task_state': task_states.IMAGE_UPLOADING, 'expected_state': task_states.IMAGE_PENDING_UPLOAD}} ] func_call_matcher = matchers.FunctionCallMatcher(expected_calls) snapshot_name = 'test_snapshot_' + str(uuid.uuid4()) fake_hv_snapshot_path = 'fake_snapshot_path' fake_parent_vhd_path = 'C:\\fake_vhd_path\\parent.vhd' self._instance_data = self._get_instance_data() func = mox.Func(self._check_instance_name) m = vmutils.VMUtils.take_vm_snapshot(func) m.AndReturn(fake_hv_snapshot_path) m = fake.PathUtils.get_instance_dir(mox.IsA(str)) m.AndReturn(self._test_instance_dir) m = vhdutils.VHDUtils.get_vhd_parent_path(mox.IsA(str)) m.AndReturn(fake_parent_vhd_path) self._fake_dest_disk_path = None def copy_dest_disk_path(src, dest): self._fake_dest_disk_path = dest m = fake.PathUtils.copyfile(mox.IsA(str), mox.IsA(str)) m.WithSideEffects(copy_dest_disk_path) self._fake_dest_base_disk_path = None def copy_dest_base_disk_path(src, dest): self._fake_dest_base_disk_path = dest m = fake.PathUtils.copyfile(fake_parent_vhd_path, mox.IsA(str)) m.WithSideEffects(copy_dest_base_disk_path) def check_dest_disk_path(path): return path == self._fake_dest_disk_path def check_dest_base_disk_path(path): return path == self._fake_dest_base_disk_path func1 = mox.Func(check_dest_disk_path) func2 = mox.Func(check_dest_base_disk_path) # Make sure that the hyper-v base and differential VHDs are merged vhdutils.VHDUtils.reconnect_parent_vhd(func1, func2) vhdutils.VHDUtils.merge_vhd(func1, func2) def check_snapshot_path(snapshot_path): return snapshot_path == fake_hv_snapshot_path # Make sure that the Hyper-V snapshot is removed func = mox.Func(check_snapshot_path) vmutils.VMUtils.remove_vm_snapshot(func) fake.PathUtils.rmtree(mox.IsA(str)) m = fake.PathUtils.open(func2, 'rb') m.AndReturn(io.BytesIO(b'fake content')) return (snapshot_name, func_call_matcher) def test_snapshot(self): (snapshot_name, func_call_matcher) = self._setup_snapshot_mocks() self._mox.ReplayAll() self._conn.snapshot(self._context, self._instance_data, snapshot_name, func_call_matcher.call) self._mox.VerifyAll() self.assertTrue(self._image_metadata and "disk_format" in self._image_metadata and self._image_metadata["disk_format"] == "vhd") # Assert states changed in correct order self.assertIsNone(func_call_matcher.match()) def _get_instance_data(self): instance_name = 'openstack_unit_test_vm_' + str(uuid.uuid4()) return db_fakes.get_fake_instance_data(instance_name, self._project_id, self._user_id) def _spawn_instance(self, cow, block_device_info=None): self.flags(use_cow_images=cow) self._instance_data = self._get_instance_data() instance = db.instance_create(self._context, self._instance_data) instance['system_metadata'] = {} image = db_fakes.get_fake_image_data(self._project_id, self._user_id) network_info = fake_network.fake_get_instance_nw_info(self.stubs, spectacular=True) self._conn.spawn(self._context, instance, image, injected_files=[], admin_password=None, network_info=network_info, block_device_info=block_device_info) def _add_ide_disk(self, vm_name, path, ctrller_addr, drive_addr, drive_type): if drive_type == constants.IDE_DISK: self._instance_ide_disks.append(path) elif drive_type == constants.IDE_DVD: self._instance_ide_dvds.append(path) def _add_volume_disk(self, vm_name, controller_path, address, mounted_disk_path): self._instance_volume_disks.append(mounted_disk_path) def _check_img_path(self, image_path): return image_path == self._fetched_image def _setup_create_instance_mocks(self, setup_vif_mocks_func=None, boot_from_volume=False, block_device_info=None): vmutils.VMUtils.create_vm(mox.Func(self._check_vm_name), mox.IsA(int), mox.IsA(int), mox.IsA(bool)) if not boot_from_volume: m = vmutils.VMUtils.attach_ide_drive(mox.Func(self._check_vm_name), mox.IsA(str), mox.IsA(int), mox.IsA(int), mox.IsA(str)) m.WithSideEffects(self._add_ide_disk).InAnyOrder() func = mox.Func(self._check_vm_name) m = vmutils.VMUtils.create_scsi_controller(func) m.InAnyOrder() if boot_from_volume: mapping = driver.block_device_info_get_mapping(block_device_info) data = mapping[0]['connection_info']['data'] target_lun = data['target_lun'] target_iqn = data['target_iqn'] target_portal = data['target_portal'] self._mock_attach_volume(mox.Func(self._check_vm_name), target_iqn, target_lun, target_portal, True) vmutils.VMUtils.create_nic(mox.Func(self._check_vm_name), mox.IsA(str), mox.IsA(str)).InAnyOrder() if setup_vif_mocks_func: setup_vif_mocks_func() def _set_vm_name(self, vm_name): self._test_vm_name = vm_name def _check_vm_name(self, vm_name): return vm_name == self._test_vm_name def _setup_spawn_instance_mocks(self, cow, setup_vif_mocks_func=None, with_exception=False, block_device_info=None, boot_from_volume=False, config_drive=False, use_cdrom=False): m = vmutils.VMUtils.vm_exists(mox.IsA(str)) m.WithSideEffects(self._set_vm_name).AndReturn(False) m = fake.PathUtils.get_instance_dir(mox.IsA(str), create_dir=False, remove_dir=True) m.AndReturn(self._test_instance_dir) m = basevolumeutils.BaseVolumeUtils.volume_in_mapping( mox.IsA(str), block_device_info) m.AndReturn(boot_from_volume) if not boot_from_volume: m = fake.PathUtils.get_instance_dir(mox.Func(self._check_vm_name)) m.AndReturn(self._test_instance_dir) m = vhdutils.VHDUtils.get_vhd_info(mox.Func(self._check_img_path)) m.AndReturn({'MaxInternalSize': 1024}) if cow: fake.PathUtils.copyfile(mox.IsA(str), mox.IsA(str)) vhdutils.VHDUtils.resize_vhd(mox.IsA(str), mox.IsA(object)) vhdutils.VHDUtils.create_differencing_vhd(mox.IsA(str), mox.IsA(str)) else: vhdutils.VHDUtils.resize_vhd(mox.IsA(str), mox.IsA(object)) fake.PathUtils.copyfile(mox.IsA(str), mox.IsA(str)) self._setup_create_instance_mocks(setup_vif_mocks_func, boot_from_volume, block_device_info) if config_drive: self._setup_spawn_config_drive_mocks(use_cdrom) # TODO(alexpilotti) Based on where the exception is thrown # some of the above mock calls need to be skipped if with_exception: self._setup_destroy_mocks() else: vmutils.VMUtils.set_vm_state(mox.Func(self._check_vm_name), constants.HYPERV_VM_STATE_ENABLED) def _test_spawn_instance(self, cow=True, expected_ide_disks=1, expected_ide_dvds=0, setup_vif_mocks_func=None, with_exception=False, config_drive=False, use_cdrom=False): self._setup_spawn_instance_mocks(cow, setup_vif_mocks_func, with_exception, config_drive=config_drive, use_cdrom=use_cdrom) self._mox.ReplayAll() self._spawn_instance(cow) self._mox.VerifyAll() self.assertEquals(len(self._instance_ide_disks), expected_ide_disks) self.assertEquals(len(self._instance_ide_dvds), expected_ide_dvds) vhd_path = os.path.join(self._test_instance_dir, 'root.vhd') self.assertEquals(vhd_path, self._instance_ide_disks[0]) def _mock_get_mounted_disk_from_lun(self, target_iqn, target_lun, fake_mounted_disk, fake_device_number): m = volumeutils.VolumeUtils.get_device_number_for_target(target_iqn, target_lun) m.AndReturn(fake_device_number) m = vmutils.VMUtils.get_mounted_disk_by_drive_number( fake_device_number) m.AndReturn(fake_mounted_disk) def _mock_login_storage_target(self, target_iqn, target_lun, target_portal, fake_mounted_disk, fake_device_number): m = volumeutils.VolumeUtils.get_device_number_for_target(target_iqn, target_lun) m.AndReturn(fake_device_number) volumeutils.VolumeUtils.login_storage_target(target_lun, target_iqn, target_portal) self._mock_get_mounted_disk_from_lun(target_iqn, target_lun, fake_mounted_disk, fake_device_number) def _mock_attach_volume(self, instance_name, target_iqn, target_lun, target_portal=None, boot_from_volume=False): fake_mounted_disk = "fake_mounted_disk" fake_device_number = 0 fake_controller_path = 'fake_scsi_controller_path' self._mock_login_storage_target(target_iqn, target_lun, target_portal, fake_mounted_disk, fake_device_number) self._mock_get_mounted_disk_from_lun(target_iqn, target_lun, fake_mounted_disk, fake_device_number) if boot_from_volume: m = vmutils.VMUtils.get_vm_ide_controller(instance_name, 0) m.AndReturn(fake_controller_path) fake_free_slot = 0 else: m = vmutils.VMUtils.get_vm_scsi_controller(instance_name) m.AndReturn(fake_controller_path) fake_free_slot = 1 m = vmutils.VMUtils.get_attached_disks_count(fake_controller_path) m.AndReturn(fake_free_slot) m = vmutils.VMUtils.attach_volume_to_controller(instance_name, fake_controller_path, fake_free_slot, fake_mounted_disk) m.WithSideEffects(self._add_volume_disk) def test_attach_volume(self): instance_data = self._get_instance_data() connection_info = db_fakes.get_fake_volume_info_data( self._volume_target_portal, self._volume_id) data = connection_info['data'] target_lun = data['target_lun'] target_iqn = data['target_iqn'] target_portal = data['target_portal'] mount_point = '/dev/sdc' self._mock_attach_volume(instance_data['name'], target_iqn, target_lun, target_portal) self._mox.ReplayAll() self._conn.attach_volume(connection_info, instance_data, mount_point) self._mox.VerifyAll() self.assertEquals(len(self._instance_volume_disks), 1) def _mock_detach_volume(self, target_iqn, target_lun): mount_point = '/dev/sdc' fake_mounted_disk = "fake_mounted_disk" fake_device_number = 0 m = volumeutils.VolumeUtils.get_device_number_for_target(target_iqn, target_lun) m.AndReturn(fake_device_number) m = vmutils.VMUtils.get_mounted_disk_by_drive_number( fake_device_number) m.AndReturn(fake_mounted_disk) vmutils.VMUtils.detach_vm_disk(mox.IsA(str), fake_mounted_disk) volumeutils.VolumeUtils.logout_storage_target(mox.IsA(str)) def test_detach_volume(self): instance_data = self._get_instance_data() instance_name = instance_data['name'] connection_info = db_fakes.get_fake_volume_info_data( self._volume_target_portal, self._volume_id) data = connection_info['data'] target_lun = data['target_lun'] target_iqn = data['target_iqn'] target_portal = data['target_portal'] mount_point = '/dev/sdc' self._mock_detach_volume(target_iqn, target_lun) self._mox.ReplayAll() self._conn.detach_volume(connection_info, instance_data, mount_point) self._mox.VerifyAll() def test_boot_from_volume(self): block_device_info = db_fakes.get_fake_block_device_info( self._volume_target_portal, self._volume_id) self._setup_spawn_instance_mocks(cow=False, block_device_info=block_device_info, boot_from_volume=True) self._mox.ReplayAll() self._spawn_instance(False, block_device_info) self._mox.VerifyAll() self.assertEquals(len(self._instance_volume_disks), 1) def _setup_test_migrate_disk_and_power_off_mocks(self, same_host=False, copy_exception=False, size_exception=False): self._instance_data = self._get_instance_data() instance = db.instance_create(self._context, self._instance_data) network_info = fake_network.fake_get_instance_nw_info( self.stubs, spectacular=True) instance['root_gb'] = 10 fake_local_ip = '10.0.0.1' if same_host: fake_dest_ip = fake_local_ip else: fake_dest_ip = '10.0.0.2' if size_exception: flavor = 'm1.tiny' else: flavor = 'm1.small' instance_type = db.instance_type_get_by_name(self._context, flavor) if not size_exception: fake_root_vhd_path = 'C:\\FakePath\\root.vhd' fake_revert_path = os.path.join(self._test_instance_dir, '_revert') func = mox.Func(self._check_instance_name) vmutils.VMUtils.set_vm_state(func, constants.HYPERV_VM_STATE_DISABLED) m = vmutils.VMUtils.get_vm_storage_paths(func) m.AndReturn(([fake_root_vhd_path], [])) m = hostutils.HostUtils.get_local_ips() m.AndReturn([fake_local_ip]) m = fake.PathUtils.get_instance_dir(mox.IsA(str)) m.AndReturn(self._test_instance_dir) m = pathutils.PathUtils.get_instance_migr_revert_dir( instance['name'], remove_dir=True) m.AndReturn(fake_revert_path) if same_host: fake.PathUtils.makedirs(mox.IsA(str)) m = fake.PathUtils.copy(fake_root_vhd_path, mox.IsA(str)) if copy_exception: m.AndRaise(shutil.Error('Simulated copy error')) m = fake.PathUtils.get_instance_dir(mox.IsA(str), mox.IsA(str), remove_dir=True) m.AndReturn(self._test_instance_dir) else: fake.PathUtils.rename(mox.IsA(str), mox.IsA(str)) destroy_disks = True if same_host: fake.PathUtils.rename(mox.IsA(str), mox.IsA(str)) destroy_disks = False self._setup_destroy_mocks(False) if destroy_disks: m = fake.PathUtils.get_instance_dir(mox.IsA(str), mox.IsA(str), remove_dir=True) m.AndReturn(self._test_instance_dir) return (instance, fake_dest_ip, network_info, instance_type) def test_migrate_disk_and_power_off(self): (instance, fake_dest_ip, network_info, instance_type) = self._setup_test_migrate_disk_and_power_off_mocks() self._mox.ReplayAll() self._conn.migrate_disk_and_power_off(self._context, instance, fake_dest_ip, instance_type, network_info) self._mox.VerifyAll() def test_migrate_disk_and_power_off_same_host(self): args = self._setup_test_migrate_disk_and_power_off_mocks( same_host=True) (instance, fake_dest_ip, network_info, instance_type) = args self._mox.ReplayAll() self._conn.migrate_disk_and_power_off(self._context, instance, fake_dest_ip, instance_type, network_info) self._mox.VerifyAll() def test_migrate_disk_and_power_off_copy_exception(self): args = self._setup_test_migrate_disk_and_power_off_mocks( copy_exception=True) (instance, fake_dest_ip, network_info, instance_type) = args self._mox.ReplayAll() self.assertRaises(shutil.Error, self._conn.migrate_disk_and_power_off, self._context, instance, fake_dest_ip, instance_type, network_info) self._mox.VerifyAll() def test_migrate_disk_and_power_off_smaller_root_vhd_size_exception(self): args = self._setup_test_migrate_disk_and_power_off_mocks( size_exception=True) (instance, fake_dest_ip, network_info, instance_type) = args self._mox.ReplayAll() self.assertRaises(vmutils.VHDResizeException, self._conn.migrate_disk_and_power_off, self._context, instance, fake_dest_ip, instance_type, network_info) self._mox.VerifyAll() def _test_finish_migration(self, power_on): self._instance_data = self._get_instance_data() instance = db.instance_create(self._context, self._instance_data) instance['system_metadata'] = {} network_info = fake_network.fake_get_instance_nw_info( self.stubs, spectacular=True) m = basevolumeutils.BaseVolumeUtils.volume_in_mapping(mox.IsA(str), None) m.AndReturn(False) m = fake.PathUtils.get_instance_dir(mox.IsA(str)) m.AndReturn(self._test_instance_dir) self._mox.StubOutWithMock(fake.PathUtils, 'exists') m = fake.PathUtils.exists(mox.IsA(str)) m.AndReturn(True) fake_parent_vhd_path = (os.path.join('FakeParentPath', '%s.vhd' % instance["image_ref"])) m = vhdutils.VHDUtils.get_vhd_info(mox.IsA(str)) m.AndReturn({'ParentPath': fake_parent_vhd_path, 'MaxInternalSize': 1}) m = fake.PathUtils.exists(mox.IsA(str)) m.AndReturn(True) vhdutils.VHDUtils.reconnect_parent_vhd(mox.IsA(str), mox.IsA(str)) m = vhdutils.VHDUtils.get_vhd_info(mox.IsA(str)) m.AndReturn({'MaxInternalSize': 1024}) m = fake.PathUtils.exists(mox.IsA(str)) m.AndReturn(True) self._set_vm_name(instance['name']) self._setup_create_instance_mocks(None, False) if power_on: vmutils.VMUtils.set_vm_state(mox.Func(self._check_instance_name), constants.HYPERV_VM_STATE_ENABLED) self._mox.ReplayAll() self._conn.finish_migration(self._context, None, instance, "", network_info, None, False, None, power_on) self._mox.VerifyAll() def test_finish_migration_power_on(self): self._test_finish_migration(True) def test_finish_migration_power_off(self): self._test_finish_migration(False) def test_confirm_migration(self): self._instance_data = self._get_instance_data() instance = db.instance_create(self._context, self._instance_data) network_info = fake_network.fake_get_instance_nw_info( self.stubs, spectacular=True) pathutils.PathUtils.get_instance_migr_revert_dir(instance['name'], remove_dir=True) self._mox.ReplayAll() self._conn.confirm_migration(None, instance, network_info) self._mox.VerifyAll() def _test_finish_revert_migration(self, power_on): self._instance_data = self._get_instance_data() instance = db.instance_create(self._context, self._instance_data) network_info = fake_network.fake_get_instance_nw_info( self.stubs, spectacular=True) fake_revert_path = ('C:\\FakeInstancesPath\\%s\\_revert' % instance['name']) m = basevolumeutils.BaseVolumeUtils.volume_in_mapping(mox.IsA(str), None) m.AndReturn(False) m = fake.PathUtils.get_instance_dir(mox.IsA(str), create_dir=False, remove_dir=True) m.AndReturn(self._test_instance_dir) m = pathutils.PathUtils.get_instance_migr_revert_dir(instance['name']) m.AndReturn(fake_revert_path) fake.PathUtils.rename(fake_revert_path, mox.IsA(str)) m = fake.PathUtils.get_instance_dir(mox.IsA(str)) m.AndReturn(self._test_instance_dir) self._set_vm_name(instance['name']) self._setup_create_instance_mocks(None, False) if power_on: vmutils.VMUtils.set_vm_state(mox.Func(self._check_instance_name), constants.HYPERV_VM_STATE_ENABLED) self._mox.ReplayAll() self._conn.finish_revert_migration(instance, network_info, None, power_on) self._mox.VerifyAll() def test_finish_revert_migration_power_on(self): self._test_finish_revert_migration(True) def test_finish_revert_migration_power_off(self): self._test_finish_revert_migration(False)
apache-2.0
-5,168,143,782,321,951,000
39.768322
79
0.562984
false
updownlife/multipleK
dependencies/biopython-1.65/Tests/test_SearchIO_hmmer3_tab.py
1
20424
# Copyright 2012 by Wibowo Arindrarto. All rights reserved. # This code is part of the Biopython distribution and governed by its # license. Please see the LICENSE file that should have been included # as part of this package. """Tests for SearchIO HmmerIO hmmer3-tab parser.""" import os import unittest from Bio import BiopythonExperimentalWarning import warnings with warnings.catch_warnings(): warnings.simplefilter('ignore', BiopythonExperimentalWarning) from Bio.SearchIO import parse # test case files are in the Blast directory TEST_DIR = 'Hmmer' FMT = 'hmmer3-tab' def get_file(filename): """Returns the path of a test file.""" return os.path.join(TEST_DIR, filename) class HmmscanCases(unittest.TestCase): def test_31b1_hmmscan_001(self): """Test parsing hmmer3-tab, hmmscan 3.1b1, multiple queries (tab_31b1_hmmscan_001)""" tab_file = get_file('tab_31b1_hmmscan_001.out') qresults = list(parse(tab_file, FMT)) self.assertEqual(4, len(qresults)) # first qresult, first hit, first hsp qresult = qresults[0] self.assertEqual(1, len(qresult)) self.assertEqual('gi|4885477|ref|NP_005359.1|', qresult.id) self.assertEqual('-', qresult.accession) hit = qresult[0] self.assertEqual(1, len(hit)) self.assertEqual('Globin', hit.id) self.assertEqual('PF00042.17', hit.accession) self.assertEqual(1e-22, hit.evalue) self.assertEqual(80.5, hit.bitscore) self.assertEqual(0.3, hit.bias) self.assertEqual(1.3, hit.domain_exp_num) self.assertEqual(1, hit.region_num) self.assertEqual(0, hit.cluster_num) self.assertEqual(0, hit.overlap_num) self.assertEqual(1, hit.env_num) self.assertEqual(1, hit.domain_obs_num) self.assertEqual(1, hit.domain_reported_num) self.assertEqual(1, hit.domain_included_num) self.assertEqual('Globin', hit.description) hsp = hit.hsps[0] self.assertEqual(1.6e-22, hsp.evalue) self.assertEqual(79.8, hsp.bitscore) self.assertEqual(0.3, hsp.bias) # last qresult, last hit, last hsp qresult = qresults[-1] self.assertEqual(5, len(qresult)) self.assertEqual('gi|125490392|ref|NP_038661.2|', qresult.id) self.assertEqual('-', qresult.accession) hit = qresult[-1] self.assertEqual(1, len(hit)) self.assertEqual('DUF521', hit.id) self.assertEqual('PF04412.8', hit.accession) self.assertEqual(0.15, hit.evalue) self.assertEqual(10.5, hit.bitscore) self.assertEqual(0.1, hit.bias) self.assertEqual(1.4, hit.domain_exp_num) self.assertEqual(1, hit.region_num) self.assertEqual(0, hit.cluster_num) self.assertEqual(0, hit.overlap_num) self.assertEqual(1, hit.env_num) self.assertEqual(1, hit.domain_obs_num) self.assertEqual(1, hit.domain_reported_num) self.assertEqual(0, hit.domain_included_num) self.assertEqual('Protein of unknown function (DUF521)', hit.description) hsp = hit.hsps[0] self.assertEqual(0.28, hsp.evalue) self.assertEqual(9.6, hsp.bitscore) self.assertEqual(0.1, hsp.bias) def test_30_hmmscan_001(self): "Test parsing hmmer3-tab, hmmscan 3.0, multiple queries (tab_30_hmmscan_001)" tab_file = get_file('tab_30_hmmscan_001.out') qresults = parse(tab_file, FMT) counter = 0 # first qresult qresult = next(qresults) counter += 1 self.assertEqual(1, len(qresult)) self.assertEqual('gi|4885477|ref|NP_005359.1|', qresult.id) self.assertEqual('-', qresult.accession) hit = qresult[0] self.assertEqual(1, len(hit)) self.assertEqual('Globin', hit.id) self.assertEqual('PF00042.17', hit.accession) self.assertEqual(6e-21, hit.evalue) self.assertEqual(74.6, hit.bitscore) self.assertEqual(0.3, hit.bias) self.assertEqual(1.3, hit.domain_exp_num) self.assertEqual(1, hit.region_num) self.assertEqual(0, hit.cluster_num) self.assertEqual(0, hit.overlap_num) self.assertEqual(1, hit.env_num) self.assertEqual(1, hit.domain_obs_num) self.assertEqual(1, hit.domain_reported_num) self.assertEqual(1, hit.domain_included_num) self.assertEqual('Globin', hit.description) hsp = hit.hsps[0] self.assertEqual(9.2e-21, hsp.evalue) self.assertEqual(74.0, hsp.bitscore) self.assertEqual(0.2, hsp.bias) # second qresult qresult = next(qresults) counter += 1 self.assertEqual(2, len(qresult)) self.assertEqual('gi|126362951:116-221', qresult.id) self.assertEqual('-', qresult.accession) hit = qresult[0] self.assertEqual(1, len(hit)) self.assertEqual('Ig_3', hit.id) self.assertEqual('PF13927.1', hit.accession) self.assertEqual(1.4e-09, hit.evalue) self.assertEqual(38.2, hit.bitscore) self.assertEqual(0.4, hit.bias) self.assertEqual(1.3, hit.domain_exp_num) self.assertEqual(1, hit.region_num) self.assertEqual(0, hit.cluster_num) self.assertEqual(0, hit.overlap_num) self.assertEqual(1, hit.env_num) self.assertEqual(1, hit.domain_obs_num) self.assertEqual(1, hit.domain_reported_num) self.assertEqual(1, hit.domain_included_num) self.assertEqual('Immunoglobulin domain', hit.description) hsp = hit.hsps[0] self.assertEqual(2.1e-09, hsp.evalue) self.assertEqual(37.6, hsp.bitscore) self.assertEqual(0.3, hsp.bias) hit = qresult[1] self.assertEqual(1, len(hit)) self.assertEqual('Ig_2', hit.id) self.assertEqual('PF13895.1', hit.accession) self.assertEqual(3.5e-05, hit.evalue) self.assertEqual(23.7, hit.bitscore) self.assertEqual(0.1, hit.bias) self.assertEqual(1.1, hit.domain_exp_num) self.assertEqual(1, hit.region_num) self.assertEqual(0, hit.cluster_num) self.assertEqual(0, hit.overlap_num) self.assertEqual(1, hit.env_num) self.assertEqual(1, hit.domain_obs_num) self.assertEqual(1, hit.domain_reported_num) self.assertEqual(1, hit.domain_included_num) self.assertEqual('Immunoglobulin domain', hit.description) hsp = hit.hsps[0] self.assertEqual(4.3e-05, hsp.evalue) self.assertEqual(23.4, hsp.bitscore) self.assertEqual(0.1, hsp.bias) # third qresult qresult = next(qresults) counter += 1 self.assertEqual(2, len(qresult)) self.assertEqual('gi|22748937|ref|NP_065801.1|', qresult.id) self.assertEqual('-', qresult.accession) hit = qresult[0] self.assertEqual(1, len(hit)) self.assertEqual('Xpo1', hit.id) self.assertEqual('PF08389.7', hit.accession) self.assertEqual(7.8e-34, hit.evalue) self.assertEqual(116.6, hit.bitscore) self.assertEqual(7.8, hit.bias) self.assertEqual(2.8, hit.domain_exp_num) self.assertEqual(2, hit.region_num) self.assertEqual(0, hit.cluster_num) self.assertEqual(0, hit.overlap_num) self.assertEqual(2, hit.env_num) self.assertEqual(2, hit.domain_obs_num) self.assertEqual(2, hit.domain_reported_num) self.assertEqual(1, hit.domain_included_num) self.assertEqual('Exportin 1-like protein', hit.description) hsp = hit.hsps[0] self.assertEqual(1.1e-33, hsp.evalue) self.assertEqual(116.1, hsp.bitscore) self.assertEqual(3.4, hsp.bias) hit = qresult[1] self.assertEqual(1, len(hit)) self.assertEqual('IBN_N', hit.id) self.assertEqual('PF03810.14', hit.accession) self.assertEqual(0.0039, hit.evalue) self.assertEqual(16.9, hit.bitscore) self.assertEqual(0.0, hit.bias) self.assertEqual(2.7, hit.domain_exp_num) self.assertEqual(2, hit.region_num) self.assertEqual(0, hit.cluster_num) self.assertEqual(0, hit.overlap_num) self.assertEqual(2, hit.env_num) self.assertEqual(2, hit.domain_obs_num) self.assertEqual(2, hit.domain_reported_num) self.assertEqual(1, hit.domain_included_num) self.assertEqual('Importin-beta N-terminal domain', hit.description) hsp = hit.hsps[0] self.assertEqual(0.033, hsp.evalue) self.assertEqual(14.0, hsp.bitscore) self.assertEqual(0.0, hsp.bias) # last qresult qresult = next(qresults) counter += 1 self.assertEqual(5, len(qresult)) self.assertEqual('gi|125490392|ref|NP_038661.2|', qresult.id) self.assertEqual('-', qresult.accession) # first hit hit = qresult[0] self.assertEqual(1, len(hit)) self.assertEqual('Pou', hit.id) self.assertEqual('PF00157.12', hit.accession) self.assertEqual(7e-37, hit.evalue) self.assertEqual(124.8, hit.bitscore) self.assertEqual(0.5, hit.bias) self.assertEqual(1.5, hit.domain_exp_num) self.assertEqual(1, hit.region_num) self.assertEqual(0, hit.cluster_num) self.assertEqual(0, hit.overlap_num) self.assertEqual(1, hit.env_num) self.assertEqual(1, hit.domain_obs_num) self.assertEqual(1, hit.domain_reported_num) self.assertEqual(1, hit.domain_included_num) self.assertEqual('Pou domain - N-terminal to homeobox domain', hit.description) hsp = hit.hsps[0] self.assertEqual(1.4e-36, hsp.evalue) self.assertEqual(123.9, hsp.bitscore) self.assertEqual(0.3, hsp.bias) # second hit hit = qresult[1] self.assertEqual(1, len(hit)) self.assertEqual('Homeobox', hit.id) self.assertEqual('PF00046.24', hit.accession) self.assertEqual(2.1e-18, hit.evalue) self.assertEqual(65.5, hit.bitscore) self.assertEqual(1.1, hit.bias) self.assertEqual(1.5, hit.domain_exp_num) self.assertEqual(1, hit.region_num) self.assertEqual(0, hit.cluster_num) self.assertEqual(0, hit.overlap_num) self.assertEqual(1, hit.env_num) self.assertEqual(1, hit.domain_obs_num) self.assertEqual(1, hit.domain_reported_num) self.assertEqual(1, hit.domain_included_num) self.assertEqual('Homeobox domain', hit.description) hsp = hit.hsps[0] self.assertEqual(4.1e-18, hsp.evalue) self.assertEqual(64.6, hsp.bitscore) self.assertEqual(0.7, hsp.bias) # third hit hit = qresult[2] self.assertEqual(1, len(hit)) self.assertEqual('HTH_31', hit.id) self.assertEqual('PF13560.1', hit.accession) self.assertEqual(0.012, hit.evalue) self.assertEqual(15.6, hit.bitscore) self.assertEqual(0.0, hit.bias) self.assertEqual(2.2, hit.domain_exp_num) self.assertEqual(2, hit.region_num) self.assertEqual(0, hit.cluster_num) self.assertEqual(0, hit.overlap_num) self.assertEqual(2, hit.env_num) self.assertEqual(2, hit.domain_obs_num) self.assertEqual(2, hit.domain_reported_num) self.assertEqual(0, hit.domain_included_num) self.assertEqual('Helix-turn-helix domain', hit.description) hsp = hit.hsps[0] self.assertEqual(0.16, hsp.evalue) self.assertEqual(12.0, hsp.bitscore) self.assertEqual(0.0, hsp.bias) # fourth hit hit = qresult[3] self.assertEqual(1, len(hit)) self.assertEqual('Homeobox_KN', hit.id) self.assertEqual('PF05920.6', hit.accession) self.assertEqual(0.039, hit.evalue) self.assertEqual(13.5, hit.bitscore) self.assertEqual(0.0, hit.bias) self.assertEqual(1.6, hit.domain_exp_num) self.assertEqual(1, hit.region_num) self.assertEqual(0, hit.cluster_num) self.assertEqual(0, hit.overlap_num) self.assertEqual(1, hit.env_num) self.assertEqual(1, hit.domain_obs_num) self.assertEqual(1, hit.domain_reported_num) self.assertEqual(0, hit.domain_included_num) self.assertEqual('Homeobox KN domain', hit.description) hsp = hit.hsps[0] self.assertEqual(0.095, hsp.evalue) self.assertEqual(12.3, hsp.bitscore) self.assertEqual(0.0, hsp.bias) # fifth hit hit = qresult[4] self.assertEqual(1, len(hit)) self.assertEqual('DUF521', hit.id) self.assertEqual('PF04412.8', hit.accession) self.assertEqual(0.14, hit.evalue) self.assertEqual(10.5, hit.bitscore) self.assertEqual(0.1, hit.bias) self.assertEqual(1.4, hit.domain_exp_num) self.assertEqual(1, hit.region_num) self.assertEqual(0, hit.cluster_num) self.assertEqual(0, hit.overlap_num) self.assertEqual(1, hit.env_num) self.assertEqual(1, hit.domain_obs_num) self.assertEqual(1, hit.domain_reported_num) self.assertEqual(0, hit.domain_included_num) self.assertEqual('Protein of unknown function (DUF521)', hit.description) hsp = hit.hsps[0] self.assertEqual(0.26, hsp.evalue) self.assertEqual(9.6, hsp.bitscore) self.assertEqual(0.1, hsp.bias) # test if we've properly finished iteration self.assertRaises(StopIteration, next, qresults) self.assertEqual(4, counter) def test_30_hmmscan_002(self): "Test parsing hmmer3-tab, hmmscan 3.0, single query, no hits (tab_30_hmmscan_002)" tab_file = get_file('tab_30_hmmscan_002.out') qresults = parse(tab_file, FMT) self.assertRaises(StopIteration, next, qresults) def test_30_hmmscan_003(self): "Test parsing hmmer3-tab, hmmscan 3.0, single query, single hit, single hsp (tab_30_hmmscan_003)" tab_file = get_file('tab_30_hmmscan_003.out') qresults = parse(tab_file, FMT) counter = 0 qresult = next(qresults) counter += 1 self.assertEqual(1, len(qresult)) self.assertEqual('gi|4885477|ref|NP_005359.1|', qresult.id) self.assertEqual('-', qresult.accession) hit = qresult[0] self.assertEqual(1, len(hit)) self.assertEqual('Globin', hit.id) self.assertEqual('PF00042.17', hit.accession) self.assertEqual(6e-21, hit.evalue) self.assertEqual(74.6, hit.bitscore) self.assertEqual(0.3, hit.bias) self.assertEqual(1.3, hit.domain_exp_num) self.assertEqual(1, hit.region_num) self.assertEqual(0, hit.cluster_num) self.assertEqual(0, hit.overlap_num) self.assertEqual(1, hit.env_num) self.assertEqual(1, hit.domain_obs_num) self.assertEqual(1, hit.domain_reported_num) self.assertEqual(1, hit.domain_included_num) self.assertEqual('Globin', hit.description) hsp = hit.hsps[0] self.assertEqual(9.2e-21, hsp.evalue) self.assertEqual(74.0, hsp.bitscore) self.assertEqual(0.2, hsp.bias) # test if we've properly finished iteration self.assertRaises(StopIteration, next, qresults) self.assertEqual(1, counter) def test_30_hmmscan_004(self): "Test parsing hmmer3-tab, hmmscan 3.0, single query, multiple hits (tab_30_hmmscan_004)" tab_file = get_file('tab_30_hmmscan_004.out') qresults = parse(tab_file, FMT) counter = 0 qresult = next(qresults) counter += 1 self.assertEqual(2, len(qresult)) self.assertEqual('gi|126362951:116-221', qresult.id) self.assertEqual('-', qresult.accession) hit = qresult[0] self.assertEqual(1, len(hit)) self.assertEqual('Ig_3', hit.id) self.assertEqual('PF13927.1', hit.accession) self.assertEqual(1.4e-09, hit.evalue) self.assertEqual(38.2, hit.bitscore) self.assertEqual(0.4, hit.bias) self.assertEqual(1.3, hit.domain_exp_num) self.assertEqual(1, hit.region_num) self.assertEqual(0, hit.cluster_num) self.assertEqual(0, hit.overlap_num) self.assertEqual(1, hit.env_num) self.assertEqual(1, hit.domain_obs_num) self.assertEqual(1, hit.domain_reported_num) self.assertEqual(1, hit.domain_included_num) self.assertEqual('Immunoglobulin domain', hit.description) hsp = hit.hsps[0] self.assertEqual(2.1e-09, hsp.evalue) self.assertEqual(37.6, hsp.bitscore) self.assertEqual(0.3, hsp.bias) hit = qresult[1] self.assertEqual(1, len(hit)) self.assertEqual('Ig_2', hit.id) self.assertEqual('PF13895.1', hit.accession) self.assertEqual(3.5e-05, hit.evalue) self.assertEqual(23.7, hit.bitscore) self.assertEqual(0.1, hit.bias) self.assertEqual(1.1, hit.domain_exp_num) self.assertEqual(1, hit.region_num) self.assertEqual(0, hit.cluster_num) self.assertEqual(0, hit.overlap_num) self.assertEqual(1, hit.env_num) self.assertEqual(1, hit.domain_obs_num) self.assertEqual(1, hit.domain_reported_num) self.assertEqual(1, hit.domain_included_num) self.assertEqual('Immunoglobulin domain', hit.description) hsp = hit.hsps[0] self.assertEqual(4.3e-05, hsp.evalue) self.assertEqual(23.4, hsp.bitscore) self.assertEqual(0.1, hsp.bias) # test if we've properly finished iteration self.assertRaises(StopIteration, next, qresults) self.assertEqual(1, counter) class HmmsearchCases(unittest.TestCase): def test_31b1_hmmsearch_001(self): """Test parsing hmmer3-tab, hmmsearch 3.1b1, multiple queries (tab_31b1_hmmscan_001)""" tab_file = get_file('tab_31b1_hmmsearch_001.out') qresults = list(parse(tab_file, FMT)) self.assertEqual(1, len(qresults)) # first qresult qresult = qresults[0] self.assertEqual(4, len(qresult)) self.assertEqual('Pkinase', qresult.id) self.assertEqual('PF00069.17', qresult.accession) # first hit, first hsp hit = qresult[0] self.assertEqual(1, len(hit)) self.assertEqual('sp|Q9WUT3|KS6A2_MOUSE', hit.id) self.assertEqual('-', hit.accession) self.assertEqual(8.5e-147, hit.evalue) self.assertEqual(492.3, hit.bitscore) self.assertEqual(0.0, hit.bias) self.assertEqual(2.1, hit.domain_exp_num) self.assertEqual(2, hit.region_num) self.assertEqual(0, hit.cluster_num) self.assertEqual(0, hit.overlap_num) self.assertEqual(2, hit.env_num) self.assertEqual(2, hit.domain_obs_num) self.assertEqual(2, hit.domain_reported_num) self.assertEqual(2, hit.domain_included_num) self.assertEqual('Ribosomal protein S6 kinase alpha-2 OS=Mus musculus GN=Rps6ka2 PE=1 SV=1', hit.description) hsp = hit.hsps[0] self.assertEqual(1.2e-72, hsp.evalue) self.assertEqual(249.3, hsp.bitscore) self.assertEqual(0.0, hsp.bias) # last hit, last hsp hit = qresult[-1] self.assertEqual(1, len(hit)) self.assertEqual('sp|P18652|KS6AA_CHICK', hit.id) self.assertEqual('-', hit.accession) self.assertEqual(2.6e-145, hit.evalue) self.assertEqual(487.5, hit.bitscore) self.assertEqual(0.0, hit.bias) self.assertEqual(2.1, hit.domain_exp_num) self.assertEqual(2, hit.region_num) self.assertEqual(0, hit.cluster_num) self.assertEqual(0, hit.overlap_num) self.assertEqual(2, hit.env_num) self.assertEqual(2, hit.domain_obs_num) self.assertEqual(2, hit.domain_reported_num) self.assertEqual(2, hit.domain_included_num) self.assertEqual('Ribosomal protein S6 kinase 2 alpha OS=Gallus gallus GN=RPS6KA PE=2 SV=1', hit.description) hsp = hit.hsps[-1] self.assertEqual(7.6e-72, hsp.evalue) self.assertEqual(246.7, hsp.bitscore) self.assertEqual(0.0, hsp.bias) if __name__ == "__main__": runner = unittest.TextTestRunner(verbosity=2) unittest.main(testRunner=runner)
gpl-2.0
-5,258,293,306,572,943,000
39.766467
117
0.632393
false
lehmannro/pootle
setup.py
1
11428
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2008 Zuza Software Foundation # # This file is part of Pootle. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, see <http://www.gnu.org/licenses/>. import glob import os import os.path as path import re from distutils import util from distutils.command.build import build as DistutilsBuild from distutils.command.install import install as DistutilsInstall from distutils.core import setup from pootle.__version__ import sver as pootle_version ############################################################################### # CONSTANTS ############################################################################### classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Intended Audience :: Developers", "Intended Audience :: End Users/Desktop", "Intended Audience :: Information Technology", "License :: OSI Approved :: GNU General Public License (GPL)", "Programming Language :: Python", "Topic :: Software Development :: Localization", "Topic :: Text Processing :: Linguistic" "Operating System :: OS Independent", "Operating System :: Microsoft :: Windows", "Operating System :: Unix" ] pootle_description="An online collaborative localization tool." pootle_description_long="""Pootle is used to create program translations. It uses the Translate Toolkit to get access to translation files and therefore can edit a variety of files (including PO and XLIFF files).""" INSTALL_CONFIG_DIR = '/etc/pootle' INSTALL_DATA_DIR = 'share/pootle' INSTALL_DOC_DIR = 'share/doc/pootle' INSTALL_WORKING_DIR = '/var/lib/pootle' ############################################################################### # HELPER FUNCTIONS ############################################################################### def collect_options(): data_files = [ (INSTALL_CONFIG_DIR, ['localsettings.py']), (INSTALL_DOC_DIR, ['wsgi.py', 'ChangeLog', 'COPYING', 'README', 'INSTALL']), (INSTALL_WORKING_DIR + '/dbs', []) # Create the empty "dbs" dir ] + list_tree(INSTALL_DATA_DIR, 'templates') + list_tree(INSTALL_DATA_DIR, 'html') + \ list_tree(INSTALL_WORKING_DIR, 'po') + list_tree(INSTALL_DATA_DIR, 'mo') packages = ['pootle'] + ['pootle.' + pkg for pkg in find_packages('pootle')] + \ find_packages('local_apps') + find_packages('external_apps') package_data = { '': ['*.html', '*.txt', '*.xml', '*.css', '*.js'], 'pootle_app': expand_tree_globs('local_apps/pootle_app', ['templates'], ['*.html']), 'pootle_language': expand_tree_globs('local_apps/pootle_language', ['templates'], ['*.html']), 'pootle_notifications': expand_tree_globs('local_apps/pootle_notifications', ['templates'], ['*.html']), 'pootle_project': expand_tree_globs('local_apps/pootle_project', ['templates'], ['*.html']), 'pootle_store': expand_tree_globs('local_apps/pootle_store', ['templates'], ['*.html']), 'pootle_terminology': expand_tree_globs('local_apps/pootle_terminology', ['templates'], ['*.html']), 'pootle_translationproject': expand_tree_globs('local_apps/pootle_translationproject', ['templates'], ['*.html']), 'djblets': expand_tree_globs('external_apps/djblets', ['siteconfig', 'util'], ['*.html']), } package_dir = { 'pootle_app': 'local_apps/pootle_app', 'pootle_autonotices': 'local_apps/pootle_autonotices', 'pootle_language': 'local_apps/pootle_language', 'pootle_misc': 'local_apps/pootle_misc', 'pootle_notifications': 'local_apps/pootle_notifications', 'pootle_profile': 'local_apps/pootle_profile', 'pootle_project': 'local_apps/pootle_project', 'pootle_statistics': 'local_apps/pootle_statistics', 'pootle_store': 'local_apps/pootle_store', 'pootle_terminology': 'local_apps/pootle_terminology', 'pootle_translationproject': 'local_apps/pootle_translationproject', 'registration': 'external_apps/registration', 'profiles': 'external_apps/profiles', 'djblets': 'external_apps/djblets', } scripts = ['import_pootle_prefs', 'updatetm', 'PootleServer'] options = { 'data_files': data_files, 'packages': packages, 'package_data': package_data, 'package_dir': package_dir, 'scripts': scripts, } return options def expand_tree_globs(root, subdirs, globs): if root.endswith('/'): root = root[:-1] dirglobs = [] for subdir in subdirs: for g in globs: if glob.glob(path.join(root, subdir, g)): dirglobs.append(path.join(subdir, g)) for dirpath, dirs, files in os.walk(path.join(root, subdir)): curdir = dirpath[len(root)+1:] for d in dirs: for g in globs: if glob.glob(path.join(root, curdir, d, g)): dirglobs.append(path.join(curdir, d, g)) return dirglobs # The function below was shamelessly copied from setuptools def find_packages(where='.', exclude=()): """Return a list all Python packages found within directory 'where' 'where' should be supplied as a "cross-platform" (i.e. URL-style) path; it will be converted to the appropriate local path syntax. 'exclude' is a sequence of package names to exclude; '*' can be used as a wildcard in the names, such that 'foo.*' will exclude all subpackages of 'foo' (but not 'foo' itself). """ from distutils.util import convert_path out = [] stack=[(convert_path(where), '')] while stack: where,prefix = stack.pop(0) for name in os.listdir(where): fn = os.path.join(where,name) if ('.' not in name and os.path.isdir(fn) and os.path.isfile(os.path.join(fn,'__init__.py')) ): out.append(prefix+name); stack.append((fn,prefix+name+'.')) for pat in list(exclude)+['ez_setup']: from fnmatch import fnmatchcase out = [item for item in out if not fnmatchcase(item,pat)] return out def list_tree(target_base, root): tree = [] headlen = -1 for dirpath, dirs, files in os.walk(root): if headlen < 0: headlen = len(dirpath) - len(root) dirpath = dirpath[headlen:] tree.append(( path.join(target_base, dirpath), [path.join(dirpath, f) for f in files] )) return tree ############################################################################### # CLASSES ############################################################################### class PootleBuildMo(DistutilsBuild): def build_mo(self): """Compile .mo files from available .po files""" import subprocess import gettext from translate.storage import factory print "Preparing localization files" for po_filename in glob.glob(path.join('po', 'pootle', '*', 'pootle.po')): lang = path.split(path.split(po_filename)[0])[1] lang_dir = path.join('mo', lang, 'LC_MESSAGES') mo_filename = path.join(lang_dir, 'django.mo') try: store = factory.getobject(po_filename) gettext.c2py(store.getheaderplural()[1]) if not path.exists(lang_dir): os.makedirs(lang_dir) print "compiling %s language" % lang subprocess.Popen(['msgfmt', '-c', '--strict', '-o', mo_filename, po_filename]) except Exception, e: print "skipping %s, probably invalid header: %s" % (lang, e) def run(self): self.build_mo() class PootleBuild(DistutilsBuild): """make sure build_mo is run when build is run""" def run(self): DistutilsBuild.run(self) class PootleInstall(DistutilsInstall): def run(self): DistutilsInstall.run(self) self.update_install_dirs_py() def update_install_dirs_py(self): # Get the right target location of install_dirs.py, depending on # whether --root or --prefix was specified install_dirs_py_path = path.abspath(path.join(self.install_lib, 'pootle', 'install_dirs.py')) if not path.isfile(install_dirs_py_path): raise Exception('install_dirs.py file should exist, but does not. o_O (%s)' % (install_dirs_py_path)) conf_dir = path.abspath(path.join(self.install_base, INSTALL_CONFIG_DIR)) data_dir = path.abspath(path.join(self.install_base, INSTALL_DATA_DIR)) work_dir = path.abspath(path.join(self.install_base, INSTALL_WORKING_DIR)) #if self.root: # # We use distutils.util.change_root, because INSTALL_CONFIG_DIR # # and INSTALL_WORKING_DIR are absolute paths and stays that way when # # used with os.path.join() as above. This also means that data_dir # # should be changed here if the value # of INSTALL_DATA_DIR becomes # # an absolute path. # conf_dir = util.change_root(self.root, INSTALL_CONFIG_DIR) # work_dir = util.change_root(self.root, INSTALL_WORKING_DIR) # Replace directory variables in settings.py to reflect the current installation lines = open(install_dirs_py_path).readlines() config_re = re.compile(r'^CONFIG_DIR\s*=') datadir_re = re.compile(r'^DATA_DIR\s*=') workdir_re = re.compile(r'^WORKING_DIR\s*=') for i in range(len(lines)): if config_re.match(lines[i]): lines[i] = "CONFIG_DIR = '%s'\n" % (conf_dir) elif datadir_re.match(lines[i]): lines[i] = "DATA_DIR = '%s'\n" % (data_dir) elif workdir_re.match(lines[i]): lines[i] = "WORKING_DIR = '%s'\n" % (work_dir) open(install_dirs_py_path, 'w').write(''.join(lines)) ############################################################################### # MAIN ############################################################################### if __name__ == '__main__': setup( name="Pootle", version=pootle_version, license="GNU General Public License (GPL)", description=pootle_description, long_description=pootle_description_long, author="Translate.org.za", author_email="translate-devel@lists.sourceforge.net", url="http://translate.sourceforge.net/wiki/pootle/index", download_url="http://sourceforge.net/projects/translate/files/Pootle/", install_requires=["translate-toolkit>=1.5.0", "Django>=1.0"], platforms=["any"], classifiers=classifiers, cmdclass={'install': PootleInstall, 'build': PootleBuild, 'build_mo': PootleBuildMo}, **collect_options() )
gpl-2.0
-1,998,137,997,287,100,200
41.483271
122
0.588467
false
SathyaBhat/spotify-dl
spotify_dl/youtube.py
1
3746
import urllib.request from os import path import mutagen import youtube_dl from mutagen.easyid3 import EasyID3 from mutagen.id3 import APIC, ID3 from mutagen.mp3 import MP3 from spotify_dl.scaffold import log from spotify_dl.utils import sanitize def default_filename(song): return sanitize(f"{song.get('artist')} - {song.get('name')}", '#') # youtube-dl automatically replaces with # def playlist_num_filename(song): return f"{song.get('playlist_num')} - {default_filename(song)}" def download_songs(songs, download_directory, format_string, skip_mp3, keep_playlist_order=False, file_name_f=default_filename): """ Downloads songs from the YouTube URL passed to either current directory or download_directory, is it is passed. :param songs: Dictionary of songs and associated artist :param download_directory: Location where to save :param format_string: format string for the file conversion :param skip_mp3: Whether to skip conversion to MP3 :param keep_playlist_order: Whether to keep original playlist ordering. Also, prefixes songs files with playlist num :param file_name_f: optional func(song) -> str that returns a filename for the download (without extension) """ log.debug(f"Downloading to {download_directory}") for song in songs: query = f"{song.get('artist')} - {song.get('name')} Lyrics".replace(":", "").replace("\"", "") download_archive = path.join(download_directory, 'downloaded_songs.txt') file_name = file_name_f(song) file_path = path.join(download_directory, file_name) outtmpl = f"{file_path}.%(ext)s" ydl_opts = { 'format': format_string, 'download_archive': download_archive, 'outtmpl': outtmpl, 'default_search': 'ytsearch', 'noplaylist': True, 'postprocessor_args': ['-metadata', 'title=' + song.get('name'), '-metadata', 'artist=' + song.get('artist'), '-metadata', 'album=' + song.get('album')] } if not skip_mp3: mp3_postprocess_opts = { 'key': 'FFmpegExtractAudio', 'preferredcodec': 'mp3', 'preferredquality': '192', } ydl_opts['postprocessors'] = [mp3_postprocess_opts.copy()] with youtube_dl.YoutubeDL(ydl_opts) as ydl: try: ydl.download([query]) except Exception as e: log.debug(e) print('Failed to download: {}, please ensure YouTubeDL is up-to-date. '.format(query)) continue if not skip_mp3: try: song_file = MP3(path.join(f"{file_path}.mp3"), ID3=EasyID3) except mutagen.MutagenError as e: log.debug(e) print('Failed to download: {}, please ensure YouTubeDL is up-to-date. '.format(query)) continue song_file['date'] = song.get('year') if keep_playlist_order: song_file['tracknumber'] = str(song.get('playlist_num')) else: song_file['tracknumber'] = str(song.get('num')) + '/' + str(song.get('num_tracks')) song_file['genre'] = song.get('genre') song_file.save() song_file = MP3(f"{file_path}.mp3", ID3=ID3) if song.get('cover') is not None: song_file.tags['APIC'] = APIC( encoding=3, mime='image/jpeg', type=3, desc=u'Cover', data=urllib.request.urlopen(song.get('cover')).read() ) song_file.save()
mit
-4,171,133,412,178,890,000
40.622222
120
0.571543
false
1200wd/1200wd_addons
account_bank_match/__openerp__.py
1
2272
# -*- coding: utf-8 -*- ############################################################################## # # Account Bank Match # Copyright (C) 2016 May # 1200 Web Development # http://1200wd.com/ # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': "Account Bank Match", 'summary': """Match bank statement lines to sale orders or invoices""", 'description': """ Match a bank statement line with an existing sales or purchase invoice or sale order. Based on extraction of a Sale Order or Invoice reference, payment amount, bank account number, date or any other rule you can define yourself. If one correct match is found automatically link and reconcile the statement line. If more matches are found you can select the best option from a list with probabilities. """, 'author': "1200 Web Development", 'website': "http://1200wd.com", 'category': 'Accounting & Finance', 'version': '8.0.1.4', 'depends': [ 'account_bank_statement_advanced', 'account_bank_statement_import', 'sale', ], 'data': [ 'security/ir.model.access.csv', 'views/res_config_view.xml', 'views/account_bank_match.xml', 'views/account_bank_statement_view.xml', 'views/account_journal.xml', 'data/account_bank_match_rule.xml', 'data/account_bank_match_cron.xml', 'data/account_journal.xml', ], 'price': 0.00, 'currency': 'EUR', 'demo': [], 'installable': True, 'auto_install': False, 'application': False, }
agpl-3.0
-5,681,885,919,968,691,000
38.172414
171
0.617958
false
Debian/dput-ng
dput/uploaders/sftp.py
1
10474
# -*- coding: utf-8 -*- # vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 # Copyright (c) 2012 dput authors # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your Option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. """ SFTP Uploader implementation """ import paramiko import socket import os import errno import pwd import os.path from binascii import hexlify from dput.core import logger from dput.uploader import AbstractUploader from dput.exceptions import UploadException class SftpUploadException(UploadException): """ Thrown in the event of a problem connecting, uploading to or terminating the connection with the remote server. This is a subclass of :class:`dput.exceptions.UploadException`. """ pass def check_paramiko_version(req): """ Return whether paramiko satisfies the given a version requirement (``req``), """ try: # Prefer __version_info__ over parsing __version__ on our own although # it is missing in some release of paramiko. version_info = paramiko.__version_info__ except AttributeError: version_info = tuple(int(value) for value in paramiko.__version__.split('.')) return version_info >= req def find_username(conf): """ Given a profile (``conf``), return the preferred username to login with. It falls back to getting the logged in user's name. """ user = None user = pwd.getpwuid(os.getuid()).pw_name if 'login' in conf: new_user = conf['login'] if new_user != "*": user = new_user if not user: raise SftpUploadException( "No user to upload could be retrieved. " "Please set 'login' explicitly in your profile" ) return user class AskToAccept(paramiko.AutoAddPolicy): """ Paramiko policy to automatically add the hostname, but only after asking. """ def __init__(self, uploader): super(AskToAccept, self).__init__() self.uploader = uploader def missing_host_key(self, client, hostname, key): accept = self.uploader.interface.boolean( title='please login', message='To accept %s hostkey %s for %s type "yes":' % ( key.get_name(), hexlify(key.get_fingerprint()), hostname ) ) if accept: super(AskToAccept, self).missing_host_key(client, hostname, key) else: raise paramiko.SSHException('Unknown server %s' % hostname) class SFTPUploader(AbstractUploader): """ Provides an interface to upload files through SFTP. This is a subclass of :class:`dput.uploader.AbstractUploader` """ def initialize(self, **kwargs): """ See :meth:`dput.uploader.AbstractUploader.initialize` """ fqdn = self._config['fqdn'] incoming = self._config['incoming'] self.sftp_config = {} if "sftp" in self._config: self.sftp_config = self._config['sftp'] self.putargs = {'confirm': False} if "confirm_upload" in self.sftp_config: self.putargs['confirm'] = self.sftp_config['confirm_upload'] if incoming.startswith('~/'): logger.warning("SFTP does not support ~/path, continuing with" "relative directory name instead.") incoming = incoming[2:] # elif incoming.startswith('~') and not self.host_is_launchpad: # raise SftpUploadException("SFTP doesn't support ~path. " # "if you need $HOME paths, use SCP.") # XXX: What to do here?? - PRT ssh_kwargs = { "port": 22, "compress": True } # XXX: Timeout override if 'port' in self._config: ssh_kwargs['port'] = self._config['port'] if 'scp_compress' in self._config: ssh_kwargs['compress'] = self._config['scp_compress'] config = paramiko.SSHConfig() if os.path.exists('/etc/ssh/ssh_config'): config.parse(open('/etc/ssh/ssh_config')) if os.path.exists(os.path.expanduser('~/.ssh/config')): config.parse(open(os.path.expanduser('~/.ssh/config'))) o = config.lookup(fqdn) user = find_username(self._config) if "user" in o: user = o['user'] ssh_kwargs['username'] = user if 'identityfile' in o: if check_paramiko_version((1, 10)): # Starting with paramiko 1.10 identityfile is always a list. pkey = [os.path.expanduser(path) for path in o['identityfile']] else: pkey = os.path.expanduser(o['identityfile']) ssh_kwargs['key_filename'] = pkey logger.info("Logging into host %s as %s" % (fqdn, user)) self._sshclient = paramiko.SSHClient() if 'globalknownhostsfile' in o: for gkhf in o['globalknownhostsfile'].split(): if os.path.isfile(gkhf): self._sshclient.load_system_host_keys(gkhf) else: files = [ "/etc/ssh/ssh_known_hosts", "/etc/ssh/ssh_known_hosts2" ] for fpath in files: if os.path.isfile(fpath): self._sshclient.load_system_host_keys(fpath) if 'userknownhostsfile' in o: for u in o['userknownhostsfile'].split(): # actually, ssh supports a bit more than ~/, # but that would be a task for paramiko... ukhf = os.path.expanduser(u) if os.path.isfile(ukhf): self._sshclient.load_host_keys(ukhf) else: for u in ['~/.ssh/known_hosts2', '~/.ssh/known_hosts']: ukhf = os.path.expanduser(u) if os.path.isfile(ukhf): # Ideally, that should be load_host_keys, # so that the known_hosts file can be written # again. But paramiko can destroy the contents # or parts of it, so no writing by using # load_system_host_keys here, too: self._sshclient.load_system_host_keys(ukhf) self._sshclient.set_missing_host_key_policy(AskToAccept(self)) self._auth(fqdn, ssh_kwargs) try: self._sftp = self._sshclient.open_sftp() except paramiko.SSHException as e: raise SftpUploadException( "Error opening SFTP channel to %s (perhaps sftp is " "disabled there?): %s" % ( fqdn, repr(e) ) ) # logger.debug("Changing directory to %s" % (incoming)) # self._sftp.chdir(incoming) try: self._sftp.stat(incoming) except IOError as e: # launchpad does not support any operations to check if a directory # exists. stat will fail with an IOError with errno equal to None. if e.errno is None: logger.debug( "Failed to stat incoming directory %s on %s. This should " "only happen on launchpad." % ( incoming, fqdn ) ) else: raise SftpUploadException( "Failed to stat incoming directory %s on %s: %s" % ( incoming, fqdn, e.strerror ) ) except paramiko.SSHException as e: raise SftpUploadException("SFTP error uploading to %s: %s" % ( fqdn, repr(e) )) self.incoming = incoming def _auth(self, fqdn, ssh_kwargs, _first=0): if _first == 3: raise SftpUploadException("Failed to authenticate") try: self._sshclient.connect(fqdn, **ssh_kwargs) logger.debug("Logged in!") except socket.error as e: raise SftpUploadException("SFTP error uploading to %s: %s" % ( fqdn, repr(e) )) except paramiko.AuthenticationException: logger.warning("Failed to auth. Prompting for a login pair.") # XXX: Ask for pw only user = self.interface.question('please login', 'Username') # 4 first error pw = self.interface.password(None, "Password") if user is not None: ssh_kwargs['username'] = user ssh_kwargs['password'] = pw self._auth(fqdn, ssh_kwargs, _first=_first + 1) except paramiko.SSHException as e: raise SftpUploadException("SFTP error uploading to %s: %s" % ( fqdn, repr(e) )) def upload_file(self, filename, upload_filename=None): """ See :meth:`dput.uploader.AbstractUploader.upload_file` """ if not upload_filename: upload_filename = os.path.basename(filename) upload_filename = os.path.join(self.incoming, upload_filename) logger.debug("Writing to: %s" % (upload_filename)) try: self._sftp.put(filename, upload_filename, **self.putargs) except IOError as e: if e.errno == errno.EACCES: self.upload_write_error(e) else: raise SftpUploadException("Could not upload file %s: %s" % ( filename, e )) def shutdown(self): """ See :meth:`dput.uploader.AbstractUploader.shutdown` """ self._sshclient.close() self._sftp.close()
gpl-2.0
-8,902,492,800,630,426,000
34.0301
80
0.555184
false
artkolev/QuizBot
src/bot.py
1
16410
#!/usr/bin/env python # -*- coding: utf-8 -*- import Skype4Py import datetime import time import random import logging import sqlite3 from hashlib import sha1 from collections import deque FORMAT=u'%(name)s %(thread)d %(levelname)s: %(message)s' logging.basicConfig(format=FORMAT, level=logging.INFO) logging.getLogger('').setLevel(logging.INFO) logging.getLogger('app').setLevel(logging.INFO) logging.getLogger('handlers').setLevel(logging.INFO) logging.getLogger('SkypeMessenger').setLevel(logging.INFO) log = logging.getLogger('app') class QuizBot: #Инициализация бота def __init__(self): log.info ("Starting application...") #Дефолнтые значения для переменных #Дата запуска self.start = datetime.datetime.now() #Файл с вопросами self.bot_db = "QuizBot.db" #Разрешенные чаты self.listen_chats = [u'рабочие вопросы', u'семейная викторина'] #период неповторения вопроса self.quest_between = "-2 hours" #период до второй подсказки (сек) self.hint_timeout = 15 #период до ответа self.answer_timeout = 25 #Бот не остановлен self.running = False #Активность викторины self.listen = [] #Соединение с БД self.db_conn = None #Курсор БД self.db_cur = None #Текущий вопрос self.current_question = {} #Текущая подсказка self.current_hint = {} #Текущий ответ self.current_answer = {} #Проверка от повторений self.last_message = {} #Полученный контекст self.context = "" #Очередь задач self.stack = deque([]) #Инстанс скайпа self.skype = Skype4Py.Skype() #Запущен ли клиент? if not self.skype.Client.IsRunning: self.skype.Client.Start() #Подключение к запущенному скайпу self.skype.Attach() #Событие приёма сообщения self.skype.OnMessageStatus = self.run_action def run(self): self.running = True log.info("Now run!") while self.running: try: if len(self.stack): action = self.stack.popleft() if action["time"]<=datetime.datetime.now(): chat = self.skype.Chat(action["chat"]) if chat.Name in self.listen\ and chat.Name in self.current_question: hash_key = sha1(u"hash:{0}:{1}".format( self.current_question[chat.Name], self.current_answer[chat.Name])\ .encode('utf-8')).hexdigest() if hash_key == action["hash"]: if action["action"] == "answer": chat.SendMessage(u"Правильный ответ: {0}"\ .format(self.current_answer[chat.Name])) del(self.current_question[chat.Name]) del(self.current_hint[chat.Name]) del(self.current_answer[chat.Name]) self.stack.append({"time": datetime.datetime.now() + datetime.timedelta(seconds=2), "action": 'new_question', "chat": self.context.Chat.Name}) elif action["action"] == "hint": len_answer = \ int(len(self.current_answer[chat.Name])) len_hint = int(1.5 * len_answer / 4) hint = u'{}{}{}'.format( self.current_answer[chat.Name][:len_hint], '*'*(len_answer-len_hint*2), self.current_answer[chat.Name][-len_hint:]) chat.SendMessage(u"Подсказка: {0}"\ .format(hint)) else: if action["action"] == "new_question": self.new_question() else: self.stack.append(action) time.sleep(0.5) except KeyboardInterrupt: self.shutdown() def shutdown(self): log.debug("Disconnecting...") self.stop_quiz() log.info("Now shutdown...") if self.db_conn and self.db_cur: self.db_disconnect() self.running = False work_time = datetime.datetime.now() - self.start log.info("Work time: %s" % work_time) return True def db_connect(self): if not self.db_conn and not self.db_cur: log.debug("Connect to database...") self.db_conn = sqlite3.connect(self.bot_db) log.debug("Get cursor") self.db_cur = self.db_conn.cursor() else: log.error("Database connect already exits...") def db_disconnect(self): if self.db_conn and self.db_cur: log.debug("Commit to database...") self.db_conn.commit() log.debug("Close connection...") self.db_conn.close() self.db_cur = None self.db_conn = None else: log.error("No database connect...") def run_action(self, message, status): if message.Chat.FriendlyName.lower() in self.listen_chats and \ not (message.Sender.Handle in self.last_message and\ self.last_message[message.Sender.Handle] == message.Body) and \ (status == 'SENT' or status == 'RECEIVED'): log.info(u"Action: '{0}' Message: '{1} ({2}): {3}'".format(status, message.Sender.Handle, message.Chat.FriendlyName, message.Body)) self.last_message[message.Sender.Handle] = message.Body command = message.Body.split(' ')[0].lower() if command in self.functions: self.context = message self.functions[command](self) elif self.context.Chat.Name in self.listen: self.parse_answer(message) else: log.debug(u"Action: '{0}' Message: '{1} ({2}): {3}'".format(status, message.Sender.Handle, message.Chat.FriendlyName, message.Body)) def new_question(self): self.db_connect() new_quest = self.db_cur.execute("""SELECT question, answer FROM questions WHERE last_show < strftime('%s','now','{0}') ORDER BY RANDOM() LIMIT 1""".format(self.quest_between)).fetchone() if new_quest: self.current_question[self.context.Chat.Name] = new_quest[0] self.current_answer[self.context.Chat.Name] = new_quest[1] hint = u'{}{}{}'.format(new_quest[1][0], '*'*(len(new_quest[1])-2), new_quest[1][-1]) self.current_hint[self.context.Chat.Name] = hint self.context.Chat.SendMessage(u'Новый вопрос: {}'.format(new_quest[0])) time.sleep(0.5) self.context.Chat.SendMessage(u'/me Подсказка: {}'.format(hint)) self.db_cur.execute(u"""UPDATE questions SET last_show = strftime('%s','now') WHERE question = '{0}' AND answer = '{1}'""".format( new_quest[0], new_quest[1])) hash_key = sha1(u"hash:{0}:{1}".format(new_quest[0],new_quest[1])\ .encode('utf-8')).hexdigest() self.stack.append({"time": datetime.datetime.now() + datetime.timedelta(seconds=self.hint_timeout), "action": 'hint', "chat": self.context.Chat.Name, "hash": hash_key}) self.stack.append({"time": datetime.datetime.now() + datetime.timedelta(seconds=self.answer_timeout), "action": 'answer', "chat": self.context.Chat.Name, "hash": hash_key}) else: self.context.Chat.SendMessage(u'Вопросы кончились.') self.stop_quiz() self.db_disconnect() def start_quiz(self): if not self.context.Chat.Name in self.listen: log.info("Starting quiz...") self.context.Chat.SendMessage(u'/me Запускаем викторину!') self.db_connect() count_quest = self.db_cur.execute("SELECT COUNT(*) FROM questions")\ .fetchone() self.db_disconnect() if count_quest: self.listen.append(self.context.Chat.Name) self.context.Chat.SendMessage(u'/me Вопросы загружены. В базе \ %s вопросов' % count_quest) self.stack.append({"time": datetime.datetime.now(), "action": 'new_question', "chat": self.context.Chat.Name}) else: self.context.Chat.SendMessage(u'/me Вопросов в базе не найдено') else: self.context.Chat.SendMessage(u'Викторина уже запущена!!! \ Не стоит паниковать.') def stop_quiz(self): if self.listen: log.info("Stoping quiz...") self.listen = [] self.context.Chat.SendMessage(u'/me Викторина остановлена!') def parse_answer(self, message): if self.context.Chat.Name in self.listen\ and message.Chat.Name in self.current_answer \ and message.Body.lower() == self.current_answer[message.Chat.Name].lower(): self.listen.remove(self.context.Chat.Name) del(self.current_question[self.context.Chat.Name]) del(self.current_hint[self.context.Chat.Name]) log.info(u"Correct answer '{0}' from user {1}'".format(message.Body, message.Sender.Handle)) self.db_connect() user_points = self.db_cur.execute("""SELECT points FROM leaders WHERE name = '{0}' AND chat = '{1}'"""\ .format(message.Sender.Handle, message.Chat.Name)).fetchone() if user_points and user_points[0]: user_points = int(user_points[0]) + 1 message.Chat.SendMessage(u"/me {0}, правильно!!! Ответ '{1}'. У тебя {2} очков."\ .format(message.Sender.Handle, self.current_answer[message.Chat.Name], user_points)) del(self.current_answer[self.context.Chat.Name]) self.db_cur.execute("""UPDATE leaders SET points = {0} WHERE name = '{1}' AND chat = '{2}'""".format(user_points, message.Sender.Handle, message.Chat.Name)) else: user_points = 1 message.Chat.SendMessage(u"/me {0}, правильно!!! Ответ '{1}'. У тебя первое очко."\ .format(message.Sender.Handle, self.current_answer[message.Chat.Name])) self.db_cur.execute("""INSERT INTO leaders(name, points, chat) VALUES ('{0}', 1, '{1}')"""\ .format(message.Sender.Handle, message.Chat.Name)) self.db_disconnect() self.listen.append(self.context.Chat.Name) self.stack.append({"time": datetime.datetime.now(), "action": 'new_question', "chat": self.context.Chat.Name}) def next_answer(self): if self.context.Chat.Name in self.listen\ and self.context.Chat.Name in self.current_answer: log.info(u"Next answer from user {0}'".format(self.context.Sender\ .Handle)) self.context.Chat.SendMessage( u"/me Пользователь {0} пропустил вопрос. Правильный ответ был '{1}'"\ .format(self.context.Sender.Handle, self.current_answer[self.context.Chat.Name])) del(self.current_question[self.context.Chat.Name]) del(self.current_hint[self.context.Chat.Name]) del(self.current_answer[self.context.Chat.Name]) self.stack.append({"time": datetime.datetime.now() + datetime.timedelta(seconds=2), "action": 'new_question', "chat": self.context.Chat.Name}) def show_hint(self): if self.context.Chat.Name in self.listen: self.context.Chat.SendMessage(u'/me Подсказка: {0}'.format( self.current_hint[self.context.Chat.Name])) def show_top10(self): self.db_connect() leaderboard = self.db_cur.execute("""SELECT name, points FROM leaders WHERE chat = '{0}' ORDER BY points DESC LIMIT 0,10"""\ .format(self.context.Chat.Name)).fetchall() if len(leaderboard): self.context.Chat.SendMessage(u"Топ-10 лидеров:") time.sleep(1) i = 1 for name, points in leaderboard: self.context.Chat.SendMessage(u"/me {0}. {1} - {2}"\ .format(i, name, points)) i+=1 time.sleep(0.5) i = None else: self.context.Chat.SendMessage(u"/me Лидеров еще нет") self.db_disconnect() #Допустимые комманды functions = {"!start": start_quiz, u"!старт": start_quiz, "!stop": stop_quiz, u"!стоп": stop_quiz, "!next": next_answer, u"!далее": next_answer, "!hint": show_hint, u"!подсказка": show_hint, "!top": show_top10, u"!топ": show_top10, } if __name__ == "__main__": quiz_cis = QuizBot() quiz_cis.run()
gpl-2.0
7,940,548,997,084,515,000
44.736842
99
0.461194
false
yugangw-msft/azure-cli
src/azure-cli/azure/cli/command_modules/netappfiles/tests/latest/test_account_backup_commands.py
1
7184
# -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- from azure.cli.testsdk import ScenarioTest, ResourceGroupPreparer import time LOCATION = "southcentralusstage" VNET_LOCATION = "southcentralus" class AzureNetAppFilesAccountBackupServiceScenarioTest(ScenarioTest): def setup_vnet(self, vnet_name, subnet_name): self.cmd("az network vnet create -n %s -g {rg} -l %s --address-prefix 10.5.0.0/16" % (vnet_name, VNET_LOCATION)) self.cmd("az network vnet subnet create -n %s --vnet-name %s --address-prefixes '10.5.0.0/24' " "--delegations 'Microsoft.Netapp/volumes' -g {rg}" % (subnet_name, vnet_name)) def create_volume(self, account_name, pool_name, volume_name, volume_only=False, backup_id=None): vnet_name = self.create_random_name(prefix='cli-vnet-backup', length=24) subnet_name = "default" if not volume_only: # create vnet, account and pool self.setup_vnet(vnet_name, subnet_name) self.cmd("netappfiles account create -g {rg} -a '%s' -l %s" % (account_name, LOCATION)) self.cmd("netappfiles pool create -g {rg} -a %s -p %s -l %s --service-level 'Premium' --size 4" % (account_name, pool_name, LOCATION)) # create volume if backup_id is None: return self.cmd("netappfiles volume create -g {rg} -a %s -p %s -v %s -l %s --vnet %s --subnet %s " "--file-path %s --usage-threshold 100" % (account_name, pool_name, volume_name, LOCATION, vnet_name, subnet_name, volume_name) ).get_output_in_json() else: return self.cmd("netappfiles volume create -g {rg} -a %s -p %s -v %s -l %s --vnet %s --subnet %s " "--file-path %s --usage-threshold 100 --backup-id %s" % (account_name, pool_name, volume_name, LOCATION, vnet_name, subnet_name, volume_name, backup_id)).get_output_in_json() def create_backup(self, account_name, pool_name, volume_name, backup_name, backup_only=False): if not backup_only: # create account, pool and volume self.create_volume(account_name, pool_name, volume_name) # get vault vaults = self.get_vaults(account_name) # volume update with backup policy self.cmd("az netappfiles volume update -g {rg} -a %s -p %s -v %s --vault-id %s --backup-enabled %s " % (account_name, pool_name, volume_name, vaults[0]['id'], True)) # create backup return self.cmd("az netappfiles volume backup create -g {rg} -a %s -p %s -v %s -l %s --backup-name %s" % (account_name, pool_name, volume_name, LOCATION, backup_name)).get_output_in_json() def delete_backup(self, account_name, pool_name, volume_name): vaults = self.get_vaults(account_name) # Delete self.cmd("az netappfiles volume update -g {rg} -a %s -p %s -v %s --vault-id %s --backup-enabled %s " % (account_name, pool_name, volume_name, vaults[0]['id'], False)) def get_vaults(self, account_name): return self.cmd("az netappfiles vault list -g {rg} -a %s" % account_name).get_output_in_json() def wait_for_backup_created(self, account_name, pool_name, volume_name, backup_name): attempts = 0 while attempts < 40: attempts += 1 backup = self.cmd("netappfiles volume backup show -g {rg} -a %s -p %s -v %s -b %s" % (account_name, pool_name, volume_name, backup_name)).get_output_in_json() if backup['provisioningState'] != "Creating": break if self.is_live or self.in_recording: time.sleep(60) @ResourceGroupPreparer(name_prefix='cli_netappfiles_test_account_backup_') def test_list_account_backups(self): # create backup account_name = self.create_random_name(prefix='cli-acc-', length=24) pool_name = self.create_random_name(prefix='cli-pool-', length=24) volume_name = self.create_random_name(prefix='cli-vol-', length=24) backup_name = self.create_random_name(prefix='cli-backup-', length=24) self.create_backup(account_name, pool_name, volume_name, backup_name) backup_list = self.cmd("az netappfiles account backup list -g {rg} -a %s" % account_name).get_output_in_json() assert len(backup_list) == 1 self.wait_for_backup_created(account_name, pool_name, volume_name, backup_name) self.delete_backup(account_name, pool_name, volume_name) @ResourceGroupPreparer(name_prefix='cli_netappfiles_test_account_backup_') def test_get_account_backup(self): # create backup account_name = self.create_random_name(prefix='cli-acc-', length=24) pool_name = self.create_random_name(prefix='cli-pool-', length=24) volume_name = self.create_random_name(prefix='cli-vol-', length=24) backup_name = self.create_random_name(prefix='cli-backup-', length=24) self.create_backup(account_name, pool_name, volume_name, backup_name) backup = self.cmd("az netappfiles account backup show -g {rg} -a %s --backup-name %s" % (account_name, backup_name)).get_output_in_json() assert backup['name'] == account_name + "/" + backup_name self.wait_for_backup_created(account_name, pool_name, volume_name, backup_name) self.delete_backup(account_name, pool_name, volume_name) @ResourceGroupPreparer(name_prefix='cli_netappfiles_test_account_backup_') def test_delete_account_backup(self): # create backup account_name = self.create_random_name(prefix='cli-acc-', length=24) pool_name = self.create_random_name(prefix='cli-pool-', length=24) volume_name = self.create_random_name(prefix='cli-vol-', length=24) backup_name = self.create_random_name(prefix='cli-backup-', length=24) self.create_backup(account_name, pool_name, volume_name, backup_name) self.wait_for_backup_created(account_name, pool_name, volume_name, backup_name) backup_list = self.cmd("az netappfiles account backup list -g {rg} -a %s" % account_name).get_output_in_json() assert len(backup_list) == 1 self.cmd("az netappfiles volume delete -g {rg} -a %s -p %s -v %s" % (account_name, pool_name, volume_name)) self.cmd("az netappfiles account backup delete -g {rg} -a %s --backup-name %s -y" % (account_name, backup_name)) backup_list = self.cmd("az netappfiles account backup list -g {rg} -a %s" % account_name).get_output_in_json() for backup in backup_list['value']: assert backup['name'] != account_name + "/" + backup_name
mit
8,064,734,338,689,275,000
53.424242
120
0.596882
false
salceson/PJN
lab8/main.py
1
6484
# coding: utf-8 import pickle import sys from heapq import nlargest, nsmallest from operator import itemgetter from pprint import pprint from gensim import corpora, models from utils import calculate_lda, calculate_lsi, cosine_metric, preprocess_data, create_tfidf __author__ = "Michał Ciołczyk" _DATA_FILE = 'data/pap.txt' _ENCODING = 'utf-8' _ACTIONS = ['preprocess', 'preparetfidf', 'preparelsa', 'preparelda', 'notes', 'tfidf', 'topicslsa', 'topicslda', 'similarlsa', 'similarlda'] _SIMILAR_THRESHOLD = 0.4 def _usage(argv): print("Usage: python %s <action>" % argv[0]) print("\tWhere action is one of: %s" % repr(_ACTIONS)) exit(1) if __name__ == '__main__': if len(sys.argv) != 2: _usage(sys.argv) action = sys.argv[1] if action not in _ACTIONS: _usage(sys.argv) if action == 'preprocess': preprocess_data(_DATA_FILE, _ENCODING) if action == 'preparetfidf': create_tfidf() if action == 'preparelsa': calculate_lsi() if action == 'preparelda': calculate_lda() if action == 'notes': print('Reading notes...') with open('data/notes.dat', 'rb') as f: data = pickle.loads(f.read()) while True: try: index = int(input('Enter note number (ctrl+d to end program): ')) print(data[index]) print() except (ValueError, KeyError): continue except (KeyboardInterrupt, EOFError): print() exit(0) if action == 'tfidf': print('Reading tf-idf and dictionary...') with open('data/tf-idf.dat', 'rb') as f: tfidf = pickle.loads(f.read()) dictionary = corpora.Dictionary.load('data/dictionary.dat') print('Done') while True: try: index = int(input('Enter note number (ctrl+d to end program): ')) doc_tf_idf = [(dictionary[id], freq) for id, freq in tfidf[index]] print(doc_tf_idf) print() except (ValueError, KeyError): continue except (KeyboardInterrupt, EOFError): print() exit(0) if action == 'topicslsa': print('Reading LSA model and tf-idf...') lsi_model = models.LsiModel.load('data/lsi.dat') with open('data/tf-idf.dat', 'rb') as f: tfidf = pickle.loads(f.read()) print('Done') while True: try: index = int(input('Enter note number (ctrl+d to end program): ')) doc_tf_idf = tfidf[index] doc_projection = lsi_model[doc_tf_idf] topics = [(lsi_model.show_topic(x), weight) for x, weight in nlargest(10, doc_projection, key=itemgetter(1))] pprint(topics) print() except (ValueError, KeyError): continue except (KeyboardInterrupt, EOFError): print() exit(0) if action == 'topicslda': print('Reading LDA model and tf-idf...') lda_model = models.LdaModel.load('data/lda.dat') with open('data/tf-idf.dat', 'rb') as f: tfidf = pickle.loads(f.read()) print('Done') while True: try: index = int(input('Enter note number (ctrl+d to end program): ')) doc_tf_idf = tfidf[index] doc_projection = lda_model[doc_tf_idf] topics = [(lda_model.show_topic(x), weight) for x, weight in nlargest(10, doc_projection, key=itemgetter(1))] pprint(topics) print() except (ValueError, KeyError): continue except (KeyboardInterrupt, EOFError): print() exit(0) if action == 'similarlsa': print('Reading LSA model and tf-idf...') lsa_model = models.LsiModel.load('data/lsi.dat') with open('data/tf-idf.dat', 'rb') as f: tfidf = pickle.loads(f.read()) print('Done') print('Projecting tf-idf onto LSA...') lsa_projections = lsa_model[tfidf] print('Done') while True: try: index = int(input('Enter note number (ctrl+d to end program): ')) doc_tf_idf = tfidf[index] doc_projection = lsa_projections[index] docs_similarities = [(i, cosine_metric(doc_projection, p)) for i, p in enumerate(lsa_projections) if i != index and cosine_metric(doc_projection, p) < _SIMILAR_THRESHOLD] max_similarities = nsmallest(10, docs_similarities, key=itemgetter(1)) print('10 most similar notes:') print(', '.join(['%d: %.2f%%' % (i, s * 100) for i, s in max_similarities])) print() except (ValueError, KeyError): continue except (KeyboardInterrupt, EOFError): print() exit(0) if action == 'similarlda': print('Reading LDA model and tf-idf...') lda_model = models.LdaModel.load('data/lda.dat') with open('data/tf-idf.dat', 'rb') as f: tfidf = pickle.loads(f.read()) print('Done') print('Projecting tf-idf onto LDA...') lda_projections = lda_model[tfidf] print('Done') while True: try: index = int(input('Enter note number (ctrl+d to end program): ')) doc_tf_idf = tfidf[index] doc_projection = lda_projections[index] docs_similarities = [(i, cosine_metric(doc_projection, p)) for i, p in enumerate(lda_projections) if i != index and cosine_metric(doc_projection, p) < _SIMILAR_THRESHOLD] max_similarities = nsmallest(10, docs_similarities, key=itemgetter(1)) print('10 most similar notes:') print(', '.join(['%d: %.2f%%' % (i, s * 100) for i, s in max_similarities])) print() except (ValueError, KeyError): continue except (KeyboardInterrupt, EOFError): print() exit(0)
mit
-4,748,124,683,531,162,000
38.284848
109
0.51265
false
waliens/sldc
sldc/merger.py
1
10691
# -*- coding: utf-8 -*- import numpy as np from shapely.geometry import JOIN_STYLE, Polygon from shapely.ops import cascaded_union __author__ = "Begon Jean-Michel <jm.begon@gmail.com>" __contributor__ = ["Romain Mormont <romainmormont@hotmail.com>"] __version = "0.1" class Graph(object): """A class for representing a graph """ def __init__(self): self.nodes = [] self.node2idx = {} self.edges = {} def add_node(self, value): """Add a node to the graph Parameters ---------- value: int Node value Returns ------- index: int Return the node index """ self.nodes.append(value) self.node2idx[value] = len(self.nodes) - 1 return len(self.nodes) - 1 def add_edge(self, source, destination): """Add an edge to the graph Parameters ---------- source: int Id of the source node destination: int Id of the destination node """ ls = self.edges.get(source, []) if len(ls) == 0: self.edges[source] = ls ls.append(destination) def connex_components(self): """Find the connex components of the graph Returns ------- components: iterable (subtype: iterable of int) An iterable containing connex components. A connex component is an iterable of node indexes """ visited = [False]*len(self.nodes) components = [] stack = [] # store index of reachable nodes for node in self.node2idx.keys(): current_comp = [] stack.append(node) while len(stack) > 0: current_node = stack.pop() curr_idx = self.node2idx[current_node] if visited[curr_idx]: continue visited[curr_idx] = True current_comp.append(current_node) stack.extend(self.edges.get(current_node, [])) if len(current_comp) > 0: components.append(current_comp) return components def __getitem__(self, node_index): return self.nodes[node_index] def aggr_max_area_label(areas, labels): unique_labels = np.unique(labels) max_area, max_label = -1, -1 for l in unique_labels: area = np.sum(areas[labels == l]) if area > max_area: max_area = area max_label = l return max_label class SemanticMergingPolicy(object): """Merging policy for semantic merger. Specify the strategy to apply with regard to close polygons that have different labels. """ POLICY_NO_MERGE = "no_merge" # TODO implement other policies class SemanticMerger(object): """A class for merging labelled polygons. Close polygons having the same label are merged. """ def __init__(self, tolerance, policy=SemanticMergingPolicy.POLICY_NO_MERGE): """Constructor for Merger objects Parameters: ----------- tolerance: int Maximal distance between two polygons so that they are considered from the same object policy: str A merging policy to apply for overlapping polygons with different classes """ self._tolerance = tolerance self._policy = policy def merge(self, tiles, polygons, tile_topology, labels=None): """Merge the polygons passed in a per-tile fashion according to the tile topology Parameters ---------- tiles: iterable of tile identifiers (size: n, subtype: int) The identifiers of the tiles containing the polygons to merge polygons: iterable (size: n, subtype: iterable of shapely.geometry.Polygon) The polygons to merge provided as an iterable of iterables. The iterable i in polygons contains all the polygons detected in the tile tiles[i]. tile_topology: TileTopology The tile topology that was used to generate the tiles passed in polygons_tiles labels: iterable (size: n, subtype: iterable of int, default: None) The labels associated with the polygons. If None, all polygons are considered to have the same label. Returns ------- polygons: iterable (size: m, subtype: shapely.geometry.Polygon) An iterable of polygons objects containing the merged polygons out_labels: iterable (size: m, subtype: int) The labels of the merged polygons. If labels was None, this return value is omitted. """ tiles_dict, polygons_dict, labels_dict = SemanticMerger._build_dicts(tiles, polygons, labels=labels) # no polygons if len(polygons_dict) <= 0: return np.array([]) if labels is None else (np.array([]), np.array([])) # stores the polygons indexes as nodes geom_graph = Graph() # add polygons for index in polygons_dict.keys(): geom_graph.add_node(index) # add edges between polygons that should be merged for tile_identifier in tiles_dict.keys(): # check whether polygons in neighbour tiles must be merged neighbour_tiles = tile_topology.tile_neighbours(tile_identifier) for neighbour in neighbour_tiles: if neighbour is not None: self._register_merge(tiles_dict[tile_identifier], tiles_dict[neighbour], polygons_dict, labels_dict, geom_graph) merged_polygons, merged_labels = self._do_merge(geom_graph, polygons_dict, labels_dict) if labels is None: return np.array(merged_polygons) else: return np.array(merged_polygons), np.array(merged_labels) def _register_merge(self, polygons1, polygons2, polygons_dict, labels_dict, geom_graph): """Compare 2-by-2 the polygons in the two arrays. If they are very close (using `self._tolerance` as distance threshold) and can be merged regarding their labels and the merging policy, they are registered as polygons to be merged in the geometry graph (the registration being an edge between the nodes corresponding to the polygons in geom_graph). Parameters ---------- polygons1: iterable Iterable of integers containing polygons indexes polygons2: iterable Iterable of integers containing polygons indexes polygons_dict: dict Dictionary mapping polygon identifiers with actual shapely polygons objects labels_dict: dict Dictionnary mapping polygon ids with their labels geom_graph: Graph The graph in which must be registered the polygons to be merged """ for poly_id1 in polygons1: for poly_id2 in polygons2: poly1, poly2 = polygons_dict[poly_id1], polygons_dict[poly_id2] label1, label2 = labels_dict[poly_id1], labels_dict[poly_id2] if poly1.distance(poly2) < self._tolerance and label1 == label2: geom_graph.add_edge(poly_id1, poly_id2) def _do_merge(self, geom_graph, polygons_dict, labels_dict): """Effectively merges the polygons that were registered to be merged in the geom_graph Graph and return the resulting polygons in a list. Parameters ---------- geom_graph: Graph The graph in which were registered the polygons to be merged polygons_dict: dict Dictionary mapping polygon identifiers with actual shapely polygons objects labels_dict: dict Dictionnary mapping polygon ids with their labels Returns ------- polygons: iterable An iterable of polygons objects containing the merged polygons """ components = geom_graph.connex_components() dilation_dist = self._tolerance join = JOIN_STYLE.mitre merged_polygons = [] merged_labels = [] for component in components: if len(component) == 1: polygon = polygons_dict[component[0]] label = labels_dict[component[0]] else: polygons = [polygons_dict[poly_id].buffer(dilation_dist, join_style=join) for poly_id in component] polygon = cascaded_union(polygons).buffer(-dilation_dist, join_style=join) # determine label (take label representing the largest area) areas = np.array([polygons_dict[poly_id].area for poly_id in component]) labels = np.array([labels_dict[poly_id] for poly_id in component]) label = aggr_max_area_label(areas, labels) merged_polygons.append(polygon) merged_labels.append(label) return merged_polygons, merged_labels @classmethod def _build_dicts(cls, tiles, polygons, labels=None): """Given a array of tuples (polygons, tile), return dictionaries for executing the merging: Parameters ---------- tiles: iterable of tile identifiers (size: n, subtype: int) The identifiers of the tiles containing the polygons to merge polygons: iterable (size: n, subtype: iterable of shapely.geometry.Polygon) The polygons to merge provided as an iterable of iterables. The iterable i in polygons contains all the polygons detected in the tile tiles[i]. labels: iterable (size: n, subtype: iterable of int, default: None) The labels associated with the polygons. If None, all polygons are considered to have the same label. Returns ------- polygons_dict: dictionary Maps a unique integer identifier with a polygon. All the polygons passed to the functions are given an identifier and are stored in this dictionary tiles_dict: Maps a tile identifier with the an array containing the ids of the polygons located in this tile. """ tiles_dict = dict() polygons_dict = dict() labels_dict = dict() polygon_cnt = 1 for i, (tile_id, polygons) in enumerate(zip(tiles, polygons)): polygons_ids = [] for j, polygon in enumerate(polygons): polygons_dict[polygon_cnt] = polygon labels_dict[polygon_cnt] = 1 if labels is None else labels[i][j] polygons_ids.append(polygon_cnt) polygon_cnt += 1 tiles_dict[tile_id] = polygons_ids return tiles_dict, polygons_dict, labels_dict
mit
-6,005,729,276,697,479,000
40.277992
132
0.609391
false
almann/ion-python
tests/test_core_multimap.py
1
1665
from amazon.ion.core import Multimap, record import six from tests import parametrize class _P(record('pairs', 'expected_all_values', 'expected_single_value', 'expected_total_len')): def __str__(self): return '{name}'.format(name=self.pairs) ALL_DATA = _P( pairs=[('a', 1), ('a', 2), ('a', 3), ('a', [4, 5, 6]), ('b', 0), ('c', {'x': 'z', 'r': 's'})], expected_all_values=[('a', [1, 2, 3, [4, 5, 6]]), ('b', [0]), ('c', [{'x': 'z', 'r': 's'}])], expected_single_value=[('a', [4, 5, 6]), ('b', 0), ('c', {'x': 'z', 'r': 's'})], expected_total_len=6 ) def _create_multimap_with_items(pairs): m = Multimap() for pair in pairs: m.add_item(pair[0], pair[1]) return m @parametrize( ALL_DATA ) def test_add_item(p): m = _create_multimap_with_items(p.pairs) for expected in p.expected_all_values: assert list([x for x in m.get_all_values(expected[0])]) == expected[1] for expected in p.expected_single_value: assert m[expected[0]] == expected[1] assert p.expected_total_len == len(m) @parametrize( (ALL_DATA, ["a"], 2), (ALL_DATA, ["b"], 5), (ALL_DATA, ["c"], 5), (ALL_DATA, ["a", "b"], 1), (ALL_DATA, ["a", "b", "c"], 0) ) def test_delete_item(item): m = _create_multimap_with_items(item[0].pairs) p, items_to_remove, len_after_removal = item for to_remove in items_to_remove: del m[to_remove] assert len(m) == item[2] @parametrize( {}, {"a": 1}, {"a": 1, "b": 2, "c": [1, 2, {3: 4}]} ) def test_constructor(d): m = Multimap(d) for k, v in six.iteritems(d): assert m[k] == v assert len(m) == len(d)
apache-2.0
-5,591,384,137,282,426,000
25.428571
98
0.533333
false
TobiasLundby/UAST
Module1/exercise41/utm.py
1
7035
#!/usr/bin/env python #***************************************************************************** # Universal Transverse Mercator (UTM) conversion # Copyright (c) 2013-2015, Kjeld Jensen <kjeld@frobomind.org> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the copyright holder nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #***************************************************************************** """ This class implements conversion between geodetic coordinates and the Universal Transverse Mercator (UTM) projection. The class utilizes the tranmerc class located in transverse_mercator.py The functions do not check for out of range or errors in input. set_zone_override (zone) use to override the default zone by one of its neighbouring zones. If more distant zone is chosen, the inaccuracies will be significant. geodetic_to_utm (latitude, longitude) latitude: Accepted range is [-90;90] [deg] longitude: Accepted range is [-180;180] [deg] Returns: hemisphere, zone, letter, easting [m], northing [m] utm_to_geodetic (hemisphere, zone, easting, northing) hemisphere: 'N' or 'S' accepted zone: Valid UTM zone accepted Returns: geodetic latitude [deg], geodetic longitude [deg] Revision 2013-04-05 KJ Library created 2015-03-09 KJ Minor update of the license text. """ # imports from math import pi from transverse_mercator import tranmerc # WGS-84 defines wgs84_a = 6378137.0 # WGS84 semi-major axis of ellipsoid [m] wgs84_f = 1/298.257223563 # WGS84 flattening of ellipsoid # UTM defines utm_false_easting = 500000.0 utm_scale_factor = 0.9996 utm_origin_latitude = 0.0 #***************************************************************************** class utmconv(): def __init__(self): self.false_e = 500000.0 self.false_n = 0.0 self.scale = 0.9996 self.zone_override = 0 self.deg_to_rad = pi/180.0 self.rad_to_deg = 180.0/pi self.tm = tranmerc() def set_zone_override (self, zone): # allow manual override of the utm zone self.zone_override = zone def geodetic_to_utm (self, latitude, longitude): lat = latitude*self.deg_to_rad lon = longitude*self.deg_to_rad lat_deg_int = int(latitude) lon_deg_int = int(longitude) # if manually override to a neighbouring zone if self.zone_override > 0: zone = self.zone_override else: # calculate the zone based on the longitude zone = int((longitude + 180)/6) + 1 # handle areas with special conventions (Denmark & South West Norway) if (lat_deg_int>55) and (lat_deg_int<64) and (lon_deg_int> -1) and (lon_deg_int< 3): zone = 31 if (lat_deg_int>55) and (lat_deg_int< 64) and (lon_deg_int> 2) and (lon_deg_int< 12): zone = 32 # handle areas with special conventions (Svalbard) if lat_deg_int > 71: if (lon_deg_int>-1) and (lon_deg_int<9): zone = 31 if (lon_deg_int>8) and (lon_deg_int<21): zone = 33 if (lon_deg_int>20) and (lon_deg_int<33): zone = 35 if (lon_deg_int>32) and (lon_deg_int<42): zone = 37 # calculate central meridian for this zone central_meridian = ((zone - 1)*6 - 180 + 3)*self.deg_to_rad # set false northing based on hemishpere if latitude >= 0.0: false_northing = 0 hemisphere = 'N' # determine the UTM zone letter if latitude >= 72.0: zlet = 'X' elif latitude >= 64.0: zlet = 'W' elif latitude >= 56.0: zlet = 'V' elif latitude >= 48.0: zlet = 'U' elif latitude >= 40.0: zlet = 'T' elif latitude >= 32.0: zlet = 'S' elif latitude >= 24.0: let = 'R' elif latitude >= 16.0: zlet = 'Q' elif latitude >= 8.0: zlet = 'P' else: zlet = 'N' else: false_northing = 10000000 hemisphere = 'S' # determine the UTM zone letter if latitude >= -8.0: zlet = 'M' elif latitude >= -16.0: zlet = 'L' elif latitude >= -24.0: zlet = 'K' elif latitude >= -32.0: zlet = 'J' elif latitude >= -40.0: zlet = 'H' elif latitude >= -48.0: zlet = 'G' elif latitude >= -56.0: zlet = 'F' elif latitude >= -64.0: zlet = 'E' elif latitude >= -72.0: zlet = 'D' else: zlet = 'C' # set parameters for WGS-84, UTM, the false northing and the zone central meridian self.tm.set_params (wgs84_a, wgs84_f, utm_origin_latitude, central_meridian, utm_false_easting, false_northing, utm_scale_factor) # perform conversion (easting, northing) = self.tm.geodetic_to_tranmerc (lat, lon) # return hemisphere, utm zone, utm letter, easting and northing return (hemisphere, zone, zlet, easting, northing) def utm_to_geodetic (self, hemisphere, zone, easting, northing): # calculate the central meridian for the zone central_meridian = ((zone - 1)*6 - 180 + 3)*self.deg_to_rad # determine the false northing based on the hemisphere if hemisphere == 'N': false_northing = 0 else: false_northing = 10000000 # set parameters for WGS-84, UTM, the false northing and the zone central meridian self.tm.set_params (wgs84_a, wgs84_f, utm_origin_latitude, central_meridian, utm_false_easting, false_northing, utm_scale_factor) # perform conversion (lat,lon) = self.tm.tranmerc_to_geodetic (easting, northing) # return geodetic latitude and longitude in degrees return (lat*self.rad_to_deg, lon*self.rad_to_deg) #*****************************************************************************
bsd-3-clause
4,569,679,686,985,486,000
39.431034
130
0.632694
false
harrysocool/ear_recognition
ear_recognition/generate_files.py
1
6623
import csv import os import random import numpy as np import pandas as pd import matlab_wrapper from lib.utils.timer import Timer from tools.ear_recog import get_gt, ROI_boxes import scipy.io as sio def listdir_no_hidden(path): list1 = [] for f in sorted(os.listdir(path)): if not f.startswith('.'): p = os.path.abspath(path) list1.append(os.path.join(p, f)) return list1 def write_list_to_csv(list1, path_out, header=False): temp = pd.DataFrame(list1) temp.to_csv(path_out, index=False, header=header) def save_gt_roidb_csv(data_path, csv_path, image_index_output_path, gt_output_path, test_image_path, test_gt): box_list = pd.read_csv(csv_path, header=0).get_values() image_path_list = listdir_no_hidden(data_path) assert len(box_list) == len(image_path_list), 'the length of box list must equal to image list' new_list = [] new_list1 = [] for idx, entry in enumerate(image_path_list): s1 = str(entry) temp = box_list[idx] # change the x y coordination to correct [X1 Y1 X2 Y2] x1 = str(temp[-2]) y1 = str(temp[-4]) x2 = str(temp[-1]) y2 = str(temp[-3]) s2 = x1+' '+ y1+' '+x2+' '+y2 new_list.append(s1 + ' 1 ' + s2) new_list1.append(s1) # shuffle the idx of training set shuffle_idx = range(len(image_path_list)) random.seed(641) # make it can be reproduce random.shuffle(shuffle_idx) train_idx = shuffle_idx[0:437] test_idx = shuffle_idx[437:] train_image_path = [new_list1[idx] for idx in train_idx] train_gt = [new_list[idx] for idx in train_idx] test_image_path_data = [new_list1[idx] for idx in test_idx] test_gt_data = [new_list[idx] for idx in test_idx] write_list_to_csv(train_gt, gt_output_path) write_list_to_csv(train_image_path, image_index_output_path) write_list_to_csv(test_gt_data, test_gt) write_list_to_csv(test_image_path_data, test_image_path) def initialize_matlab(): matlab = matlab_wrapper.MatlabSession() # edge_detector OP_method matlab.eval("cd('/home/harrysocool/Github/fast-rcnn/OP_methods/edges')") matlab.eval("addpath(genpath('/home/harrysocool/Github/fast-rcnn/OP_methods/edges'))") matlab.eval("toolboxCompile") # # selective_search OP_method # matlab.eval("cd('/home/harrysocool/Github/fast-rcnn/OP_methods/selective_search_ijcv_with_python')") # matlab.eval("addpath(genpath('/home/harrysocool/Github/fast-rcnn/OP_methods/selective_search_ijcv_with_python'))") return matlab def time_analyse(matlab, cmd, image_filepath, par1, par2): timer = Timer() timer.tic() obj_proposals = ROI_boxes(matlab, image_filepath, cmd, par1, par2) timer.toc() time = timer.total_time box_numer = len(obj_proposals) return time, box_numer, obj_proposals def mean_IOU_ratio(image_index, dets): ratio = np.empty(0,dtype=np.float64) (x1, y1, x2, y2) = get_gt(image_index) if dets.size > 4: for box in dets: X1 = box[0] Y1 = box[1] X2 = box[2] Y2 = box[3] if ((np.float32(x1)-X1)<=15 and (X2- np.float32(x2))<=15 and (np.float32(y1)-Y1)<=15 and (Y2-np.float32(y2))<=15): ratio = np.append(ratio,1.0) else: SI = max(0, min(x2, X2) - max(x1, X1)) * \ max(0, min(y2, Y2) - max(y1, Y1)) SU = (x2 - x1) * (y2 - y1) + (X2 - X1) * (Y2 - Y1) - SI ratio = np.append(ratio, SI/SU) if ratio.size == 0: big_ratio = 0 else: big = np.where(ratio >= 0.1)[0].size total = float(len(dets)) big_ratio = float(big/total) return big_ratio if __name__ == '__main__': datasets_path = '/home/harrysocool/Github/fast-rcnn/DatabaseEars' csv_path = os.path.join(datasets_path, 'boundaries.csv') image_path = os.path.join(datasets_path, 'DatabaseEars/') gt_output_path = os.path.join(datasets_path, '../','ear_recognition/data_file/gt_roidb.csv') image_index_output_path = os.path.join(datasets_path, '../', 'ear_recognition/data_file/image_index_list.csv') mat_output_filename = os.path.join(datasets_path, '../','ear_recognition/data_file/all_boxes.mat') test_gt_output_path = os.path.join(datasets_path, '../','ear_recognition/data_file/test_gt_roidb.csv') test_image_index_output_path = os.path.join(datasets_path, '../', 'ear_recognition/data_file/test_image_index_list.csv') # save_gt_roidb_csv(image_path, csv_path, image_index_output_path, gt_output_path, test_image_index_output_path, # test_gt_output_path) matlab = initialize_matlab() timer = Timer() list1 = pd.read_csv(test_image_index_output_path, header=None).values.flatten().tolist() cmd = 'ss' # ks = [50 100 150 200 300]; par2_list = [8] # par2_list = [3] time_csv_out_path = os.path.join(os.path.dirname(datasets_path), 'result', cmd + '_' + 'OPtune_result_1.csv') if not os.path.exists(time_csv_out_path): write_list_to_csv(par2_list, time_csv_out_path) with open(time_csv_out_path, 'a') as csvfile: writer = csv.writer(csvfile) list2 = [] for par2 in [7]: for par1 in [7]: # par1 = float(par1)/100 # all_boxes = np.zeros((437,), dtype=np.object) for index, image_path in enumerate(list1): # if index>300: # break time, box_numer, obj_proposals = time_analyse(matlab, cmd, image_path, par1, par2) ratio = mean_IOU_ratio(index + 1, obj_proposals) # list2.append([time, box_numer]) # print('{} has processed in {:.3f} seconds with {} boxes'.format(len(list2), time, box_numer)) print('No. {} has processed with par {} {}, box {} IOU ratio {:.3f} in {:.2f} seconds'.format(index, par1, par2,box_numer ,ratio, time)) writer.writerow([par1, par2,ratio,box_numer, time]) # all_boxes[index] = obj_proposals # sio.savemat(mat_output_filename, {'all_boxes': all_boxes}) # write_list_to_csv(list2, time_csv_out_path) # fnames_cell = "{" + ",".join("'{}'".format(x) for x in list1) + "}" # command = "res = {}({}, '{}')".format('selective_search', fnames_cell, mat_output_filename) # print(command) # # # matlab.eval(command)
mit
-4,927,959,362,908,432,000
39.390244
145
0.587649
false
mosra/m.css
plugins/m/test/test_alias.py
1
1775
# # This file is part of m.css. # # Copyright © 2017, 2018, 2019, 2020 Vladimír Vondruš <mosra@centrum.cz> # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. # from . import PelicanPluginTestCase class Alias(PelicanPluginTestCase): def __init__(self, *args, **kwargs): super().__init__(__file__, '', *args, **kwargs) def test(self): self.run_pelican({ 'PLUGINS': ['m.htmlsanity', 'm.alias'], 'SITEURL': 'http://my.site' }) self.assertEqual(*self.actual_expected_contents('old-page/index.html')) self.assertEqual(*self.actual_expected_contents('even-older-page.html')) self.assertEqual(*self.actual_expected_contents('blog/old-article/index.html'))
mit
4,064,174,233,653,767,000
44.435897
87
0.705982
false
yaoshanliang/markdown-preview.vim
pythonx/markdown_preview.py
1
3976
#!/usr/bin/env python # encoding: utf-8 import socket import vim import markdown_parser import webbrowser import os, platform import commands import markdown_server import markdown_lib def markdownPreviewWithDefaultCodeStyle(): cssName = vim.eval("a:args1") currentpath = commands.getstatusoutput("pwd")[1] content = getHead(False, cssName) content += getBuff() content += getBody() file = open(os.path.join(currentpath, 'tmp.html'), 'w') file.write(content) file.close() url = 'file:///' + currentpath + '/tmp.html' webbrowser.open(url) def markdownPreviewWithCustomCodeStyle(): cssName = vim.eval("a:args1") codeName = vim.eval("a:args2") currentpath = commands.getstatusoutput("pwd")[1] content = getHead(False, cssName, codeName) content += getBuff() content += getBody() file = open(os.path.join(currentpath, 'tmp.html'), 'w') file.write(content) file.close() url = 'file:///' + currentpath + '/tmp.html' webbrowser.open(url) def checkPort(): sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) try: sock.bind(('localhost', 20016)) except Exception: return False return True SERVER = None def liveMarkdownPreviewStart(): global SERVER if checkPort(): SERVER = markdown_server.Server(20016) if not SERVER.isOK(): print "Server is Wrong" else: SERVER.start() content = getHead(True) content += getBuff() content += getBody() currentpath = commands.getstatusoutput("pwd")[1] file = open(os.path.join(currentpath, 'tmp.html'), 'w') file.write(content) file.close() url = 'file:///' + currentpath + '/tmp.html' webbrowser.open(url) else: print "Don't use the command twice, or you may not close the previous vim" def liveMarkdownPreviewEnd(): global SERVER try: SERVER.endServer() except Exception: print "Server is DOWN" def getBuff(): lineNum, curLineNum = 0, vim.current.window.cursor[0] - 1 # markdown_lib._print(curLineNum) buff = '' for line in vim.current.buffer: if lineNum == curLineNum: if line.startswith("```") or line.startswith('===') or line.startswith("---") or line == "": buff += line + '\n{ANCHOR}\n' else: buff += line + '{ANCHOR}\n' else: buff += line + '\n' lineNum = lineNum + 1 buff = markdown_parser.markdown(buff) buff = buff.replace('{ANCHOR}', '<a id="anchor"></a>') return buff def getHead(isLive = False, cssstyle = 'Github', codesytle = 'default'): if vim.eval("exists('g:MarkDownResDir')") == '1': cssDir = vim.eval('g:MarkDownResDir') else: if platform.system() == 'Windows': cssDir = os.path.join(vim.eval('$HOME'), 'vimfiles', 'MarkDownRes') elif vim.eval("has('nvim')") == '1': cssDir = os.path.join(vim.eval('$HOME'),'.nvim', 'MarkDownRes') else: cssDir = os.path.join(vim.eval('$HOME'), '.vim', 'MarkDownRes') content = "<html>\n" content += '<meta charset="UTF-8" />\n' content += '<head>' content += '<link rel="stylesheet" href="' + cssDir + '/code-styles/' + codesytle + '.css">\n' content += '<link href="' + cssDir + '/' + cssstyle + '.css" media="all" rel="stylesheet"/>\n' content += '<script src="' + cssDir + '/js/highlight.min.js"></script>\n' content += '<script src="' + cssDir + '/js/highlight.pack.js"></script>\n' content += '<script src="' + cssDir + '/js/jquery-1.11.3.min.js"></script>\n' content += '<script>hljs.initHighlightingOnLoad();</script>\n' if isLive == True: content += '<script src="' + cssDir + '/js/autoload.js"></script>\n' content += '</head>\n<body id="content">' return content def getBody(): return "</body></html>\r\n\r\n\r\n\r\n"
mit
6,209,467,138,368,643,000
32.411765
104
0.589537
false
eduardosan/window-manager
gwmanager/utils.py
1
1379
#!/usr/bin/env python # -*- coding: utf-8 -*- import os from glob import glob from subprocess import check_output, CalledProcessError def get_usb_devices(): """ Lista dispositivos USB conectados :return: """ sdb_devices = map(os.path.realpath, glob('/sys/block/sd*')) usb_devices = (dev for dev in sdb_devices if 'usb' in dev.split('/')[5]) return dict((os.path.basename(dev), dev) for dev in usb_devices) def get_mount_points(devices=None): """ Lista pontos de montagem :param devices: :return: Lista de tuplas [('/dev/sdb1', '/media/bisa/BACKUP')] """ devices = devices or get_usb_devices() # if devices are None: get_usb_devices output = check_output(['mount']).splitlines() is_usb = lambda path: any(dev in str(path) for dev in devices) usb_info = (line for line in output if is_usb(line.split()[0])) fullInfo = [] for info in usb_info: # print(info) mountURI = info.split()[0] usbURI = info.split()[2] # print((info.split().__sizeof__())) for x in range(3, info.split().__sizeof__()): if info.split()[x].__eq__("type"): for m in range(3, x): usbURI += " "+info.split()[m] break fullInfo.append([mountURI.decode('utf-8'), usbURI.decode('utf-8')]) return fullInfo
gpl-3.0
-8,723,302,183,774,859,000
28.978261
81
0.57723
false
kevinpt/symbolator
nucanvas/color/sinebow.py
1
1400
# -*- coding: utf-8 -*- # Copyright © 2017 Kevin Thibedeau # Distributed under the terms of the MIT license import math from math import sin, pi import colorsys def sinebow(hue): '''Adapted from http://basecase.org/env/on-rainbows''' hue = -(hue + 0.5) # Start at red rotating clockwise rgb = sin(pi * hue), sin(pi * (hue + 1.0/3.0)), sin(pi * (hue + 2.0/3.0)) return tuple(int(255 * c**2) for c in rgb) def distinct_color_sequence(hue=0.0): # Hue is normalized from 0-1.0 for one revolution phi = (1 + 5**0.5) / 2 golden_angle = phi #1.0 / phi**2 #print('# GA:', math.degrees(golden_angle), phi) while(True): yield sinebow(hue) hue += golden_angle def lighten(rgb, p): h,l,s = colorsys.rgb_to_hls(*(c / 255.0 for c in rgb)) l = p + l - p*l return tuple(int(c * 255) for c in colorsys.hls_to_rgb(h,l,s)) if __name__ == '__main__': import PIL from PIL import Image, ImageDraw cs = distinct_color_sequence() im = Image.new('RGB',(1024,10)) d = ImageDraw.Draw(im) for i in range(256): hue = i / 256 #r,g,b = sinebow(hue) r,g,b = next(cs) d.line([(i*4,0), (i*4,9)], (r,g,b), width=4) im.save('sinebow_rand.png') im = Image.new('RGB',(256,10)) d = ImageDraw.Draw(im) for i in range(256): hue = i / 256 r,g,b = sinebow(hue) d.line([(i,0), (i,9)], (r,g,b)) im.save('sinebow.png')
mit
-8,503,877,829,647,342,000
21.934426
75
0.580415
false
robbiev/pytelemeter
pytelemeter/config.py
1
3703
""" pytelemeter config file handler """ import os import sys import ConfigParser from parser import Account # exceptions class ConfigError(Exception): pass class MissingConfigFile(ConfigError): pass class MalformedConfigFile(ConfigError): pass class StockCredentials(ConfigError): pass class NoSuchAccount(ConfigError): pass class ConfigFile: def __init__(self, location): if not location: location = DEFAULT_LOCATION self.location = location if not os.path.isfile(self.location): print >> sys.stderr, ('Warning: configuration file not ' 'found, this may be fatal later on') def read(self, account=None): if not os.path.isfile(self.location): raise MissingConfigFile, ('unable to find config file, ' 'please supply your username and password') if os.stat(self.location).st_mode & 0777 != 0600: print >> sys.stderr, ('Warning: changing possibly ' 'insecure configuration file permissions') os.chmod(self.location, 0600) try: config = self._get_config() sections = config.sections() if account: if account in sections: id = account else: raise NoSuchAccount, ('unable to find the ' 'specified account in the config file') else: if 'default' in sections: id = 'default' elif 'user' in sections: id = 'user' elif len(sections): id = sections[0] else: raise MalformedConfigFile, ('no accounts specified ' 'in the configuration file') try: username = config.get(id, 'user') except ConfigParser.NoOptionError: username = config.get(id, 'username') try: password = config.get(id, 'passwd') except ConfigParser.NoOptionError: password = config.get(id, 'password') try: provider = config.get(id, 'provider') except ConfigParser.NoOptionError: provider = None # will use default = telenet self.account = Account(id, username, password, provider) except ConfigParser.Error: raise MalformedConfigFile, 'error parsing config file' def check(self): a = self.account if (a.username == 'foo' or a.password == 'bar'): raise StockCredentials, ('detected the example config ' 'file, please supply your username and password') def save(self, account='user'): config = self._get_config(false) config.remove_section(account) config.add_section(account) config.set(account, 'username', self.account.username) config.set(account, 'password', self.account.password) config.set(account, 'provider', self.account.provider) self._save_config(config) def delete(self, account): config = self._get_config() config.remove_section(account) self._save_config(config) def _get_config(self, fail=True): config = ConfigParser.ConfigParser() try: config.read(self.location) except ConfigParser.Error: if fail: raise else: pass return config def _save_config(self, config): file = open(self.location, 'w') config.write(file) file.close() os.chmod(self.location, 0600)
gpl-2.0
-8,811,499,867,530,199,000
32.972477
72
0.562247
false
SAVeselovskiy/KFU_Visual_Tracking
Tracking/detection.py
1
10566
__author__ = 'IVMIT KFU: Gataullin Ravil & Veselovkiy Sergei' from copy import copy import numpy as np from sklearn.ensemble import RandomForestClassifier from time import time import warnings warnings.filterwarnings("ignore") from sklearn.cross_validation import train_test_split from structure import Position class PatchVarianceClassifier: def __init__(self, init_patch): self.init_patch_variance = np.var(init_patch.content) def classify(self, patch): # return 1 if object is positive detected # return 0 if object is negative detected if np.var(patch.content) > 0.5 * self.init_patch_variance: return 1 else: return 0 def predict_patch(self, patch): return np.var(patch.content) / self.init_patch_variance def predict_position(self, position): return np.var(position.calculate_patch().content) / self.init_patch_variance class EnsembleClassifier: def __init__(self, learning_component): self.learning_component = learning_component self.classifier = RandomForestClassifier(max_depth=3) def classify(self, patch): # return 1 if object is positive detected # return 0 if object is negative detected feature = patch.calculate_feature(self.learning_component.descriptor) if self.classifier.predict_proba(feature)[0][self.positive_class_index] > 0.5: return 1 else: return 0 def predict_patch(self, patch): feature = patch.calculate_feature(self.learning_component.descriptor) return self.classifier.predict_proba(feature)[0][self.positive_class_index] def predict_position(self, position): feature = position.calculate_patch().calculate_feature(self.learning_component.descriptor) return self.classifier.predict_proba(feature)[0][self.positive_class_index] def relearn(self, test_size=0): samples, weights, targets = self.learning_component.get_training_set(const_weight=True) train_samples, test_samples, train_targets, test_targets = train_test_split(samples, targets, test_size=test_size, random_state=np.random.RandomState(0)) count_positives = 1.0*np.count_nonzero(train_targets) count_negatives = 1.0*(len(train_targets) - count_positives) positive_weight = count_negatives/len(train_targets) negative_weight = count_positives/len(train_targets) weights = np.array([positive_weight if target == 1 else negative_weight for target in train_targets]) self.classifier.fit(train_samples, train_targets, sample_weight=weights) self.learning_component.new_samples_count = 0 if len(test_samples) > 0: test_result = [self.classifier.predict(sample) for sample in test_samples] true_positives = 0.0 count_test_positives = 1.0*np.count_nonzero(test_targets) count_result_positives = 1.0*np.count_nonzero(test_result) for i in xrange(len(test_targets)): if test_targets[i] == test_result[i] and test_result[i] == 1: true_positives += 1 precision = true_positives / count_test_positives recall = true_positives / count_result_positives print "Precision:", precision print "Recall", recall if precision + recall != 0: print "F-score:", 2 * precision * recall / (precision + recall) else: print "F-score:", 0 self.positive_class_index = 0 for elem in self.classifier.classes_: if elem != 1.0: self.positive_class_index += 1 else: break class NearestNeighborClassifier: def __init__(self, learning_component, lmbd = 0.1, tetta = 0.6): self.learning_component = learning_component self.lmbd = lmbd self.tetta = tetta def classify(self, patch): # return 1 if object is positive detected # return 0 if object is negative detected if self.learning_component.relative_similarity(patch) > self.tetta: return 1 else: return 0 def predict_patch(self, patch): return self.learning_component.relative_similarity(patch) def predict_position(self, position): return self.learning_component.relative_similarity(position.calculate_patch()) def scanning_window(init_position, scales_step = 1.2, slip_step = 0.1, minimal_bounding_box_size = 20, min_step=1, max_step=20): flag_inc = True flag_dec = False position = copy(init_position) while min(position.width, position.height) >= minimal_bounding_box_size: position.update(x=0,y=0) step_width = min(max(min_step,int(slip_step * position.width)),max_step) step_height = min(max(min_step,int(slip_step * position.height)),max_step) while position.is_correct(): while position.is_correct(): yield position position.update(x=position.x+step_width) position.update(x=0, y=position.y+step_height) # if position.is_correct(): # yield position # is_end = False # step_width = int(slip_step * position.width) # step_height = int(slip_step * position.height) # layer = 1 # xx = position.x # yy = position.y # while not is_end: # is_end = True # for start_point, vector in (([-1,-1],[1,0]),([1,-1],[0,1]),([1,1],[-1,0]),([-1,1],[0,-1])): # position.update(x=xx + (start_point[0]*layer + vector[0])*step_width, y=yy+(start_point[1]*layer + vector[1])*step_height) # while position.is_correct() and xx - layer*step_width <= position.x <= xx + layer*step_width and yy - layer*step_height <= position.y <= yy + layer*step_height: # is_end = False # yield position # position.update(x=position.x+vector[0]*step_width, y=position.y+vector[1]*step_height) # layer += 1 if flag_inc: position.update(height=int(position.height * scales_step), width = int(position.width * scales_step)) if position.height > position.buffer[0].shape[0] or position.width > position.buffer[0].shape[0]: flag_inc = False flag_dec = True position = copy(init_position) if flag_dec: position.update(height=int(position.height / scales_step), width = int(position.width / scales_step)) def get_sliding_positions(init_position, scales_step = 1.2, slip_step = 0.1, minimal_bounding_box_size = 20, min_step=2, max_step=2): sliding_positions = [] flag_inc = True flag_dec = False position = copy(init_position) while min(position.width, position.height) >= minimal_bounding_box_size: position.update(x=0,y=0) step_width = min(max(min_step,int(slip_step * position.width)),max_step) step_height = min(max(min_step,int(slip_step * position.height)),max_step) while position.is_correct(): while position.is_correct(): sliding_positions.append(copy(position)) position.update(x=position.x+step_width) position.update(x=0, y=position.y+step_height) if flag_inc: position.update(height=int(position.height * scales_step), width = int(position.width * scales_step)) if position.height > position.buffer[0].shape[0] or position.width > position.buffer[0].shape[0]: flag_inc = False flag_dec = True position = copy(init_position) if flag_dec: position.update(height=int(position.height / scales_step), width = int(position.width / scales_step)) return sliding_positions class Detector: def __init__(self, init_position, learning_component, threshold_patch_variance=0.5, threshold_ensemble=0.5, threshold_nearest_neighbor=0.6): self.learning_component = learning_component self.patch_variance_classifier = PatchVarianceClassifier(learning_component.init_patch) self.ensemble_classifier = EnsembleClassifier(learning_component) self.nearest_neighbor_classifier = NearestNeighborClassifier(learning_component) self.threshold_patch_variance = threshold_patch_variance self.threshold_ensemble = threshold_ensemble self.threshold_nearest_neighbor = threshold_nearest_neighbor self.sliding_positions = get_sliding_positions(init_position, scales_step = 1.2, slip_step = 0.1, minimal_bounding_box_size = 50, min_step=2, max_step=10) def cascaded_classifier(self, patch): # 3 stages of classify # return 1 if object is positive detected # return 0 if object is negative detected if self.patch_variance_classifier.predict_patch(patch) < self.threshold_patch_variance: return 0 if self.ensemble_classifier.predict_patch(patch) < self.threshold_patch_variance: return 0 # elif self.nearest_neighbor_classifier.predict_patch(patch) < self.threshold_nearest_neighbor: # return 0 return 1 def detect(self, position, is_tracked): if self.learning_component.new_samples_count > 10: start = time() self.ensemble_classifier.relearn() print "Relearn:", time() - start detected_windows = [] predict_times = [] for current_position in self.sliding_positions: start = time() proba = self.predict_position(current_position) predict_times.append(time() - start) if proba == 1: detected_windows.append((current_position.get_window(), current_position.calculate_patch(), proba)) self.learning_component.add_new_positive(current_position.calculate_patch()) if is_tracked: return detected_windows else: self.learning_component.add_new_negative(current_position.calculate_patch()) print "Analysed window count:", len(predict_times) print "Max detection time:", np.max(predict_times) print "Min detection time:", np.min(predict_times) print "Mean detection time:", np.mean(predict_times) return detected_windows def predict_patch(self, patch): return self.cascaded_classifier(patch) def predict_position(self, position): return self.cascaded_classifier(position.calculate_patch())
mit
402,492,065,248,297,800
47.031818
178
0.638463
false
sathnaga/virt-test
virttest/virt_vm.py
1
42907
import logging, time, glob, os, re from autotest.client.shared import error import utils_misc, utils_net, remote class VMError(Exception): pass class VMCreateError(VMError): def __init__(self, cmd, status, output): VMError.__init__(self, cmd, status, output) self.cmd = cmd self.status = status self.output = output def __str__(self): return ("VM creation command failed: %r (status: %s, " "output: %r)" % (self.cmd, self.status, self.output)) class VMStartError(VMError): def __init__(self, name, reason=None): VMError.__init__(self, name, reason) self.name = name self.reason = reason def __str__(self): msg = "VM '%s' failed to start" % self.name if self.reason is not None: msg += ": %s" % self.reason return msg class VMConfigMissingError(VMError): def __init__(self, name, config): VMError.__init__(self, name, config) self.name = name self.config = config def __str__(self): return "Missing config '%s' for VM %s" % (self.config, self.name) class VMHashMismatchError(VMError): def __init__(self, actual, expected): VMError.__init__(self, actual, expected) self.actual_hash = actual self.expected_hash = expected def __str__(self): return ("CD image hash (%s) differs from expected one (%s)" % (self.actual_hash, self.expected_hash)) class VMImageMissingError(VMError): def __init__(self, filename): VMError.__init__(self, filename) self.filename = filename def __str__(self): return "CD image file not found: %r" % self.filename class VMImageCheckError(VMError): def __init__(self, filename): VMError.__init__(self, filename) self.filename = filename def __str__(self): return "Errors found on image: %r" % self.filename class VMBadPATypeError(VMError): def __init__(self, pa_type): VMError.__init__(self, pa_type) self.pa_type = pa_type def __str__(self): return "Unsupported PCI assignable type: %r" % self.pa_type class VMPAError(VMError): def __init__(self, pa_type): VMError.__init__(self, pa_type) self.pa_type = pa_type def __str__(self): return ("No PCI assignable devices could be assigned " "(pci_assignable=%r)" % self.pa_type) class VMPostCreateError(VMError): def __init__(self, cmd, output): VMError.__init__(self, cmd, output) self.cmd = cmd self.output = output class VMHugePageError(VMPostCreateError): def __str__(self): return ("Cannot allocate hugepage memory (command: %r, " "output: %r)" % (self.cmd, self.output)) class VMKVMInitError(VMPostCreateError): def __str__(self): return ("Cannot initialize KVM (command: %r, output: %r)" % (self.cmd, self.output)) class VMDeadError(VMError): def __init__(self, reason='', detail=''): VMError.__init__(self) self.reason = reason self.detail = detail def __str__(self): msg = "VM is dead" if self.reason: msg += " reason: %s" % self.reason if self.detail: msg += " detail: %r" % self.detail return (msg) class VMDeadKernelCrashError(VMError): def __init__(self, kernel_crash): VMError.__init__(self, kernel_crash) self.kernel_crash = kernel_crash def __str__(self): return ("VM is dead due to a kernel crash:\n%s" % self.kernel_crash) class VMInvalidInstructionCode(VMError): def __init__(self, invalid_code): VMError.__init__(self, invalid_code) self.invalid_code = invalid_code def __str__(self): error = "" for invalid_code in self.invalid_code: error += "%s" % (invalid_code) return ("Invalid instruction was executed on VM:\n%s" % error) class VMAddressError(VMError): pass class VMPortNotRedirectedError(VMAddressError): def __init__(self, port): VMAddressError.__init__(self, port) self.port = port def __str__(self): return "Port not redirected: %s" % self.port class VMAddressVerificationError(VMAddressError): def __init__(self, mac, ip): VMAddressError.__init__(self, mac, ip) self.mac = mac self.ip = ip def __str__(self): return ("Could not verify DHCP lease: " "%s --> %s" % (self.mac, self.ip)) class VMMACAddressMissingError(VMAddressError): def __init__(self, nic_index): VMAddressError.__init__(self, nic_index) self.nic_index = nic_index def __str__(self): return "No MAC defined for NIC #%s" % self.nic_index class VMIPAddressMissingError(VMAddressError): def __init__(self, mac): VMAddressError.__init__(self, mac) self.mac = mac def __str__(self): return "No DHCP lease for MAC %s" % self.mac class VMUnknownNetTypeError(VMError): def __init__(self, vmname, nicname, nettype): super(VMUnknownNetTypeError, self).__init__() self.vmname = vmname self.nicname = nicname self.nettype = nettype def __str__(self): return "Unknown nettype '%s' requested for NIC %s on VM %s" % ( self.nettype, self.nicname, self.vmname) class VMAddNetDevError(VMError): pass class VMDelNetDevError(VMError): pass class VMAddNicError(VMError): pass class VMDelNicError(VMError): pass class VMMigrateError(VMError): pass class VMMigrateTimeoutError(VMMigrateError): pass class VMMigrateCancelError(VMMigrateError): pass class VMMigrateFailedError(VMMigrateError): pass class VMMigrateProtoUnknownError(error.TestNAError): def __init__(self, protocol): self.protocol = protocol def __str__(self): return ("Virt Test doesn't know migration protocol '%s'. " "You would have to add it to the list of known protocols" % self.protocol) class VMMigrateStateMismatchError(VMMigrateError): def __init__(self): VMMigrateError.__init__(self) def __str__(self): return ("Mismatch of VM state before and after migration") class VMRebootError(VMError): pass class VMStatusError(VMError): pass class VMRemoveError(VMError): pass class VMDeviceError(VMError): pass class VMDeviceNotSupportedError(VMDeviceError): def __init__(self, name, device): VMDeviceError.__init__(self, name, device) self.name = name self.device = device def __str__(self): return ("Device '%s' is not supported for vm '%s' on this Host." % (self.device, self.name)) class VMPCIDeviceError(VMDeviceError): pass class VMPCISlotInUseError(VMPCIDeviceError): def __init__(self, name, slot): VMPCIDeviceError.__init__(self, name, slot) self.name = name self.slot = slot def __str__(self): return ("PCI slot '0x%s' is already in use on vm '%s'. Please assign" " another slot in config file." % (self.slot, self.name)) class VMPCIOutOfRangeError(VMPCIDeviceError): def __init__(self, name, max_dev_num): VMPCIDeviceError.__init__(self, name, max_dev_num) self.name = name self.max_dev_num = max_dev_num def __str__(self): return ("Too many PCI devices added on vm '%s', max supported '%s'" % (self.name, str(self.max_dev_num))) class VMUSBError(VMError): pass class VMUSBControllerError(VMUSBError): pass class VMUSBControllerMissingError(VMUSBControllerError): def __init__(self, name, controller_type): VMUSBControllerError.__init__(self, name, controller_type) self.name = name self.controller_type = controller_type def __str__(self): return ("Could not find '%s' USB Controller on vm '%s'. Please " "check config files." % (self.controller_type, self.name)) class VMUSBControllerPortFullError(VMUSBControllerError): def __init__(self, name, usb_dev_dict): VMUSBControllerError.__init__(self, name, usb_dev_dict) self.name = name self.usb_dev_dict = usb_dev_dict def __str__(self): output = "" try: for ctl, dev_list in self.usb_dev_dict.iteritems(): output += "%s: %s\n" %(ctl, dev_list) except Exception: pass return ("No available USB port left on VM %s.\n" "USB devices map is: \n%s" % (self.name, output)) class VMUSBPortInUseError(VMUSBError): def __init__(self, vm_name, controller, port): VMUSBError.__init__(self, vm_name, controller, port) self.vm_name = vm_name self.controller = controller self.port = port def __str__(self): return ("USB port '%d' of controller '%s' is already in use on vm" " '%s'. Please assign another port in config file." % (self.port, self.controller, self.vm_name)) class VMScreenInactiveError(VMError): def __init__(self, vm, inactive_time): VMError.__init__(self) self.vm = vm self.inactive_time = inactive_time def __str__(self): msg = ("%s screen is inactive for %d s (%d min)" % (self.vm.name, self.inactive_time, self.inactive_time/60)) return msg class CpuInfo(object): """ A class for VM's cpu information. """ def __init__(self, model=None, vendor=None, flags=None, family=None, smp=0, maxcpus=0, sockets=0, cores=0, threads=0): """ @param model: CPU Model of VM (use 'qemu -cpu ?' for list) @param vendor: CPU Vendor of VM @param flags: CPU Flags of VM @param flags: CPU Family of VM @param smp: set the number of CPUs to 'n' [default=1] @param maxcpus: maximum number of total cpus, including offline CPUs for hotplug, etc @param cores: number of CPU cores on one socket @param threads: number of threads on one CPU core @param sockets: number of discrete sockets in the system """ self.model = model self.vendor = vendor self.flags = flags self.family = family self.smp = smp self.maxcpus = maxcpus self.sockets = sockets self.cores = cores self.threads = threads class BaseVM(object): """ Base class for all hypervisor specific VM subclasses. This class should not be used directly, that is, do not attempt to instantiate and use this class. Instead, one should implement a subclass that implements, at the very least, all methods defined right after the the comment blocks that are marked with: "Public API - *must* be reimplemented with virt specific code" and "Protected API - *must* be reimplemented with virt specific classes" The current proposal regarding methods naming convention is: - Public API methods: named in the usual way, consumed by tests - Protected API methods: name begins with a single underline, to be consumed only by BaseVM and subclasses - Private API methods: name begins with double underline, to be consumed only by the VM subclass itself (usually implements virt specific functionality: example: __make_qemu_command()) So called "protected" methods are intended to be used only by VM classes, and not be consumed by tests. Theses should respect a naming convention and always be preceeded by a single underline. Currently most (if not all) methods are public and appears to be consumed by tests. It is a ongoing task to determine whether methods should be "public" or "protected". """ # # Assuming that all low-level hypervisor have at least migration via tcp # (true for xen & kvm). Also true for libvirt (using xen and kvm drivers) # MIGRATION_PROTOS = ['tcp', ] # # Timeout definition. This is being kept inside the base class so that # sub classes can change the default just for themselves # LOGIN_TIMEOUT = 10 LOGIN_WAIT_TIMEOUT = 240 COPY_FILES_TIMEOUT = 600 MIGRATE_TIMEOUT = 3600 REBOOT_TIMEOUT = 240 CREATE_TIMEOUT = 5 def __init__(self, name, params): self.name = name self.params = params # # Assuming all low-level hypervisors will have a serial (like) console # connection to the guest. libvirt also supports serial (like) consoles # (virDomainOpenConsole). subclasses should set this to an object that # is or behaves like aexpect.ShellSession. # self.serial_console = None self.remote_sessions = [] # Create instance if not already set if not hasattr(self, 'instance'): self._generate_unique_id() # Don't overwrite existing state, update from params if hasattr(self, 'virtnet'): # Direct reference to self.virtnet makes pylint complain # note: virtnet.__init__() supports being called anytime getattr(self, 'virtnet').__init__(self.params, self.name, self.instance) else: # Create new self.virtnet = utils_net.VirtNet(self.params, self.name, self.instance) if not hasattr(self, 'cpuinfo'): self.cpuinfo = CpuInfo() def _generate_unique_id(self): """ Generate a unique identifier for this VM """ while True: self.instance = (time.strftime("%Y%m%d-%H%M%S-") + utils_misc.generate_random_string(8)) if not glob.glob("/tmp/*%s" % self.instance): break @staticmethod def lookup_vm_class(vm_type, target): if vm_type == 'qemu': import qemu_vm return qemu_vm.VM if vm_type == 'libvirt': import libvirt_vm return libvirt_vm.VM if vm_type == 'v2v': if target == 'libvirt' or target is None: import libvirt_vm return libvirt_vm.VM if target == 'ovirt': import ovirt return ovirt.VMManager # # Public API - could be reimplemented with virt specific code # def needs_restart(self, name, params, basedir): """ Verifies whether the current virt_install commandline matches the requested one, based on the test parameters. """ try: need_restart = (self.make_create_command() != self.make_create_command(name, params, basedir)) except Exception: need_restart = True if need_restart: logging.debug("VM params in env don't match requested, restarting.") return True else: # Command-line encoded state doesn't include all params # TODO: Check more than just networking other_virtnet = utils_net.VirtNet(params, name, self.instance) if self.virtnet != other_virtnet: logging.debug("VM params in env match, but network differs, " "restarting") logging.debug("\t" + str(self.virtnet)) logging.debug("\t!=") logging.debug("\t" + str(other_virtnet)) return True else: logging.debug("VM params in env do match requested, continuing.") return False def verify_alive(self): """ Make sure the VM is alive and that the main monitor is responsive. Can be subclassed to provide better information on why the VM is not alive (reason, detail) @raise VMDeadError: If the VM is dead @raise: Various monitor exceptions if the monitor is unresponsive """ if self.is_dead(): raise VMDeadError def get_mac_address(self, nic_index=0): """ Return the MAC address of a NIC. @param nic_index: Index of the NIC @raise VMMACAddressMissingError: If no MAC address is defined for the requested NIC """ try: mac = self.virtnet[nic_index].mac return mac except KeyError: raise VMMACAddressMissingError(nic_index) def get_address(self, index=0): """ Return the IP address of a NIC or guest (in host space). @param index: Name or index of the NIC whose address is requested. @return: 'localhost': Port redirection is in use @return: IP address of NIC if valid in arp cache. @raise VMMACAddressMissingError: If no MAC address is defined for the requested NIC @raise VMIPAddressMissingError: If no IP address is found for the the NIC's MAC address @raise VMAddressVerificationError: If the MAC-IP address mapping cannot be verified (using arping) """ nic = self.virtnet[index] # TODO: Determine port redirection in use w/o checking nettype if nic.nettype not in ['bridge', 'macvtap']: return "localhost" if not nic.has_key('mac') and self.params.get('vm_type') == 'libvirt': # Look it up from xml nic.mac = self.get_virsh_mac_address(index) # else TODO: Look up mac from existing qemu-kvm process if not nic.has_key('mac'): raise VMMACAddressMissingError(index) # Get the IP address from arp cache, try upper and lower case arp_ip = self.address_cache.get(nic.mac.upper()) if not arp_ip: arp_ip = self.address_cache.get(nic.mac.lower()) if not arp_ip and os.geteuid() != 0: # For non-root, tcpdump won't work for finding IP address, try arp ip_map = utils_net.parse_arp() arp_ip = ip_map.get(nic.mac.lower()) if arp_ip: self.address_cache[nic.mac.lower()] = arp_ip if not arp_ip: raise VMIPAddressMissingError(nic.mac) # Make sure the IP address is assigned to one or more macs # for this guest macs = self.virtnet.mac_list() if not utils_net.verify_ip_address_ownership(arp_ip, macs): raise VMAddressVerificationError(nic.mac, arp_ip) logging.debug('Found/Verified IP %s for VM %s NIC %s' % ( arp_ip, self.name, str(index))) return arp_ip def fill_addrs(self, addrs): """ Fill VM's nic address to the virtnet structure based on VM's address structure addrs. @param addrs: Dict of interfaces and address {"if_name":{"mac":['addrs',], "ipv4":['addrs',], "ipv6":['addrs',]}, ...} """ for virtnet in self.virtnet: for iface_name, iface in addrs.iteritems(): if virtnet.mac in iface["mac"]: virtnet.ip = {"ipv4": iface["ipv4"], "ipv6": iface["ipv6"]} virtnet.g_nic_name = iface_name def get_port(self, port, nic_index=0): """ Return the port in host space corresponding to port in guest space. @param port: Port number in host space. @param nic_index: Index of the NIC. @return: If port redirection is used, return the host port redirected to guest port port. Otherwise return port. @raise VMPortNotRedirectedError: If an unredirected port is requested in user mode """ nic_nettype = self.virtnet[nic_index].nettype if nic_nettype in ["bridge", "macvtap"]: return port else: try: return self.redirs[port] except KeyError: raise VMPortNotRedirectedError(port) def free_mac_address(self, nic_index_or_name=0): """ Free a NIC's MAC address. @param nic_index: Index of the NIC """ self.virtnet.free_mac_address(nic_index_or_name) @error.context_aware def wait_for_get_address(self, nic_index_or_name, timeout=30, internal_timeout=1): """ Wait for a nic to acquire an IP address, then return it. """ # Don't let VMIPAddressMissingError/VMAddressVerificationError through def _get_address(): try: return self.get_address(nic_index_or_name) except (VMIPAddressMissingError, VMAddressVerificationError): return False if not utils_misc.wait_for(_get_address, timeout, internal_timeout): raise VMIPAddressMissingError(self.virtnet[nic_index_or_name].mac) return self.get_address(nic_index_or_name) # Adding/setup networking devices methods split between 'add_*' for # setting up virtnet, and 'activate_' for performing actions based # on settings. def add_nic(self, **params): """ Add new or setup existing NIC with optional model type and mac address @param: **params: Additional NIC parameters to set. @param: nic_name: Name for device @param: mac: Optional MAC address, None to randomly generate. @param: ip: Optional IP address to register in address_cache @return: Dict with new NIC's info. """ if not params.has_key('nic_name'): params['nic_name'] = utils_misc.generate_random_id() nic_name = params['nic_name'] if nic_name in self.virtnet.nic_name_list(): self.virtnet[nic_name].update(**params) else: self.virtnet.append(params) nic = self.virtnet[nic_name] if not nic.has_key('mac'): # generate random mac logging.debug("Generating random mac address for nic") self.virtnet.generate_mac_address(nic_name) # mac of '' or invaid format results in not setting a mac if nic.has_key('ip') and nic.has_key('mac'): if not self.address_cache.has_key(nic.mac): logging.debug("(address cache) Adding static " "cache entry: %s ---> %s" % (nic.mac, nic.ip)) else: logging.debug("(address cache) Updating static " "cache entry from: %s ---> %s" " to: %s ---> %s" % (nic.mac, self.address_cache[nic.mac], nic.mac, nic.ip)) self.address_cache[nic.mac] = nic.ip return nic def del_nic(self, nic_index_or_name): """ Remove the nic specified by name, or index number """ nic = self.virtnet[nic_index_or_name] nic_mac = nic.mac.lower() self.free_mac_address(nic_index_or_name) try: del self.virtnet[nic_index_or_name] del self.address_cache[nic_mac] except IndexError: pass # continue to not exist except KeyError: pass # continue to not exist def verify_kernel_crash(self): """ Find kernel crash message on the VM serial console. @raise: VMDeadKernelCrashError, in case a kernel crash message was found. """ panic_re = [r"BUG:.*---\[ end trace .* \]---"] panic_re.append(r"----------\[ cut here.* BUG .*\[ end trace .* \]---") panic_re.append(r"general protection fault:.* RSP.*>") panic_re = "|".join(panic_re) if self.serial_console is not None: data = self.serial_console.get_output() match = re.search(panic_re, data, re.DOTALL|re.MULTILINE|re.I) if match is not None: raise VMDeadKernelCrashError(match.group(0)) def verify_illegal_instruction(self): """ Find illegal instruction code on VM serial console output. @raise: VMInvalidInstructionCode, in case a wrong instruction code. """ if self.serial_console is not None: data = self.serial_console.get_output() match = re.findall(r".*trap invalid opcode.*\n", data, re.MULTILINE) if match: raise VMInvalidInstructionCode(match) def get_params(self): """ Return the VM's params dict. Most modified params take effect only upon VM.create(). """ return self.params def get_testlog_filename(self): """ Return the testlog filename. """ return "/tmp/testlog-%s" % self.instance def get_virtio_port_filename(self, port_name): """ Return the filename corresponding to a givven monitor name. """ return "/tmp/virtio_port-%s-%s" % (port_name, self.instance) def get_virtio_port_filenames(self): """ Return a list of all virtio port filenames (as specified in the VM's params). """ return [self.get_virtio_port_filename(v) for v in self.params.objects("virtio_ports")] @error.context_aware def login(self, nic_index=0, timeout=LOGIN_TIMEOUT, username=None, password=None): """ Log into the guest via SSH/Telnet/Netcat. If timeout expires while waiting for output from the guest (e.g. a password prompt or a shell prompt) -- fail. @param nic_index: The index of the NIC to connect to. @param timeout: Time (seconds) before giving up logging into the guest. @return: A ShellSession object. """ error.context("logging into '%s'" % self.name) if not username: username = self.params.get("username", "") if not password: password = self.params.get("password", "") prompt = self.params.get("shell_prompt", "[\#\$]") linesep = eval("'%s'" % self.params.get("shell_linesep", r"\n")) client = self.params.get("shell_client") address = self.get_address(nic_index) port = self.get_port(int(self.params.get("shell_port"))) log_filename = ("session-%s-%s.log" % (self.name, utils_misc.generate_random_string(4))) session = remote.remote_login(client, address, port, username, password, prompt, linesep, log_filename, timeout) session.set_status_test_command(self.params.get("status_test_command", "")) self.remote_sessions.append(session) return session def remote_login(self, nic_index=0, timeout=LOGIN_TIMEOUT, username=None, password=None): """ Alias for login() for backward compatibility. """ return self.login(nic_index, timeout, username, password) def wait_for_login(self, nic_index=0, timeout=LOGIN_WAIT_TIMEOUT, internal_timeout=LOGIN_TIMEOUT, serial=False, restart_network=False, username=None, password=None): """ Make multiple attempts to log into the guest via SSH/Telnet/Netcat. @param nic_index: The index of the NIC to connect to. @param timeout: Time (seconds) to keep trying to log in. @param internal_timeout: Timeout to pass to login(). @param serial: Whether to use a serial connection when remote login (ssh, rss) failed. @param restart_network: Whether to try to restart guest's network. @return: A ShellSession object. """ error_messages = [] logging.debug("Attempting to log into '%s' (timeout %ds)", self.name, timeout) end_time = time.time() + timeout while time.time() < end_time: try: return self.login(nic_index, internal_timeout, username, password) except (remote.LoginError, VMError), e: self.verify_alive() e = str(e) if e not in error_messages: logging.debug(e) error_messages.append(e) time.sleep(2) # Timeout expired if serial or restart_network: # Try to login via serila console return self.wait_for_serial_login(timeout, internal_timeout, restart_network, username, password) else: # Try one more time but don't catch exceptions return self.login(nic_index, internal_timeout, username, password) @error.context_aware def copy_files_to(self, host_path, guest_path, nic_index=0, limit="", verbose=False, timeout=COPY_FILES_TIMEOUT, username=None, password=None): """ Transfer files to the remote host(guest). @param host_path: Host path @param guest_path: Guest path @param nic_index: The index of the NIC to connect to. @param limit: Speed limit of file transfer. @param verbose: If True, log some stats using logging.debug (RSS only) @param timeout: Time (seconds) before giving up on doing the remote copy. """ error.context("sending file(s) to '%s'" % self.name) if not username: username = self.params.get("username", "") if not password: password = self.params.get("password", "") client = self.params.get("file_transfer_client") address = self.get_address(nic_index) port = self.get_port(int(self.params.get("file_transfer_port"))) log_filename = ("transfer-%s-to-%s-%s.log" % (self.name, address, utils_misc.generate_random_string(4))) remote.copy_files_to(address, client, username, password, port, host_path, guest_path, limit, log_filename, verbose, timeout) utils_misc.close_log_file(log_filename) @error.context_aware def copy_files_from(self, guest_path, host_path, nic_index=0, limit="", verbose=False, timeout=COPY_FILES_TIMEOUT, username=None,password=None): """ Transfer files from the guest. @param host_path: Guest path @param guest_path: Host path @param nic_index: The index of the NIC to connect to. @param limit: Speed limit of file transfer. @param verbose: If True, log some stats using logging.debug (RSS only) @param timeout: Time (seconds) before giving up on doing the remote copy. """ error.context("receiving file(s) from '%s'" % self.name) if not username: username = self.params.get("username", "") if not password: password = self.params.get("password", "") client = self.params.get("file_transfer_client") address = self.get_address(nic_index) port = self.get_port(int(self.params.get("file_transfer_port"))) log_filename = ("transfer-%s-from-%s-%s.log" % (self.name, address, utils_misc.generate_random_string(4))) remote.copy_files_from(address, client, username, password, port, guest_path, host_path, limit, log_filename, verbose, timeout) utils_misc.close_log_file(log_filename) @error.context_aware def serial_login(self, timeout=LOGIN_TIMEOUT, username=None, password=None): """ Log into the guest via the serial console. If timeout expires while waiting for output from the guest (e.g. a password prompt or a shell prompt) -- fail. @param timeout: Time (seconds) before giving up logging into the guest. @return: ShellSession object on success and None on failure. """ error.context("logging into '%s' via serial console" % self.name) if not username: username = self.params.get("username", "") if not password: password = self.params.get("password", "") prompt = self.params.get("shell_prompt", "[\#\$]") linesep = eval("'%s'" % self.params.get("shell_linesep", r"\n")) status_test_command = self.params.get("status_test_command", "") self.serial_console.set_linesep(linesep) self.serial_console.set_status_test_command(status_test_command) # Try to get a login prompt self.serial_console.sendline() remote.handle_prompts(self.serial_console, username, password, prompt, timeout) return self.serial_console def wait_for_serial_login(self, timeout=LOGIN_WAIT_TIMEOUT, internal_timeout=LOGIN_TIMEOUT, restart_network=False, username=None, password=None): """ Make multiple attempts to log into the guest via serial console. @param timeout: Time (seconds) to keep trying to log in. @param internal_timeout: Timeout to pass to serial_login(). @param restart_network: Whether try to restart guest's network. @return: A ShellSession object. """ error_messages = [] logging.debug("Attempting to log into '%s' via serial console " "(timeout %ds)", self.name, timeout) end_time = time.time() + timeout while time.time() < end_time: try: session = self.serial_login(internal_timeout) if restart_network: try: utils_net.restart_guest_network(session) except Exception: pass return session except remote.LoginError, e: self.verify_alive() e = str(e) if e not in error_messages: logging.debug(e) error_messages.append(e) time.sleep(2) # Timeout expired; try one more time but don't catch exceptions return self.serial_login(internal_timeout, username, password) def get_uuid(self): """ Catch UUID of the VM. @return: None,if not specified in config file """ if self.params.get("uuid") == "random": return self.uuid else: return self.params.get("uuid", None) def send_string(self, sr): """ Send a string to the VM. @param sr: String, that must consist of alphanumeric characters only. Capital letters are allowed. """ for char in sr: if char.isupper(): self.send_key("shift-%s" % char.lower()) else: self.send_key(char) def get_cpu_count(self): """ Get the cpu count of the VM. """ session = self.wait_for_login() try: return int(session.cmd(self.params.get("cpu_chk_cmd"))) finally: session.close() def get_memory_size(self, cmd=None, timeout=60, re_str=None): """ Get bootup memory size of the VM. @param cmd: Command used to check memory. If not provided, self.params.get("mem_chk_cmd") will be used. @param timeout: timeout for cmd @param re_str: pattern to get memory size from the command output. If not provided, self.params.get("mem_chk_re_str") will be used. """ session = self.login() if re_str is None: re_str = self.params.get("mem_chk_re_str", "([0-9]+)") try: if not cmd: cmd = self.params.get("mem_chk_cmd") mem_str = session.cmd_output(cmd, timeout=timeout) mem = re.findall(re_str, mem_str) mem_size = 0 for m in mem: mem_size += int(m) if "GB" in mem_str: mem_size *= 1024 elif "MB" in mem_str: pass else: mem_size /= 1024 return int(mem_size) finally: session.close() def get_current_memory_size(self): """ Get current memory size of the VM, rather than bootup memory. """ cmd = self.params.get("mem_chk_cur_cmd") return self.get_memory_size(cmd) # # Public API - *must* be reimplemented with virt specific code # def is_alive(self): """ Return True if the VM is alive and the management interface is responsive. """ raise NotImplementedError def is_dead(self): """ Return True if the VM is dead. """ raise NotImplementedError def is_paused(self): """ Return True if the VM is paused """ raise NotImplementedError def activate_nic(self, nic_index_or_name): """ Activate an inactive network device @param: nic_index_or_name: name or index number for existing NIC """ raise NotImplementedError def deactivate_nic(self, nic_index_or_name): """ Deactivate an active network device @param: nic_index_or_name: name or index number for existing NIC """ raise NotImplementedError def verify_userspace_crash(self): """ Verify if the userspace component of the virtualization backend crashed. """ pass def clone(self, name, **params): """ Return a clone of the VM object with optionally modified parameters. This method should be implemented by """ raise NotImplementedError def destroy(self, gracefully=True, free_mac_addresses=True): """ Destroy the VM. If gracefully is True, first attempt to shutdown the VM with a shell command. Then, attempt to destroy the VM via the monitor with a 'quit' command. If that fails, send SIGKILL to the qemu process. @param gracefully: If True, an attempt will be made to end the VM using a shell command before trying to end the qemu process with a 'quit' or a kill signal. @param free_mac_addresses: If True, the MAC addresses used by the VM will be freed. """ raise NotImplementedError def migrate(self, timeout=MIGRATE_TIMEOUT, protocol="tcp", cancel_delay=None, offline=False, stable_check=False, clean=True, save_path="/tmp", dest_host="localhost", remote_port=None): """ Migrate the VM. If the migration is local, the VM object's state is switched with that of the destination VM. Otherwise, the state is switched with that of a dead VM (returned by self.clone()). @param timeout: Time to wait for migration to complete. @param protocol: Migration protocol ('tcp', 'unix' or 'exec'). @param cancel_delay: If provided, specifies a time duration after which migration will be canceled. Used for testing migrate_cancel. @param offline: If True, pause the source VM before migration. @param stable_check: If True, compare the VM's state after migration to its state before migration and raise an exception if they differ. @param clean: If True, delete the saved state files (relevant only if stable_check is also True). @save_path: The path for state files. @param dest_host: Destination host (defaults to 'localhost'). @param remote_port: Port to use for remote migration. """ raise NotImplementedError def reboot(self, session=None, method="shell", nic_index=0, timeout=REBOOT_TIMEOUT): """ Reboot the VM and wait for it to come back up by trying to log in until timeout expires. @param session: A shell session object or None. @param method: Reboot method. Can be "shell" (send a shell reboot command) or "system_reset" (send a system_reset monitor command). @param nic_index: Index of NIC to access in the VM, when logging in after rebooting. @param timeout: Time to wait for login to succeed (after rebooting). @return: A new shell session object. """ raise NotImplementedError # should this really be expected from VMs of all hypervisor types? def send_key(self, keystr): """ Send a key event to the VM. @param: keystr: A key event string (e.g. "ctrl-alt-delete") """ raise NotImplementedError def save_to_file(self, path): """ State of paused VM recorded to path and VM shutdown on success Throws a VMStatusError if before/after state is incorrect. @param: path: file where VM state recorded """ raise NotImplementedError def restore_from_file(self, path): """ A shutdown or paused VM is resumed from path, & possibly set running Throws a VMStatusError if before/after restore state is incorrect @param: path: path to file vm state was saved to """ raise NotImplementedError def savevm(self, tag_name): """ Save the virtual machine as the tag 'tag_name' @param: tag_name: tag of the virtual machine that saved """ raise NotImplementedError def loadvm(self, tag_name): """ Load the virtual machine tagged 'tag_name'. @param: tag_name: tag of the virtual machine that saved """ raise NotImplementedError def pause(self): """ Stop the VM operation. """ raise NotImplementedError def resume(self): """ Resume the VM operation in case it's stopped. """ raise NotImplementedError
gpl-2.0
1,149,361,905,148,768,600
32.972288
86
0.573007
false
manahl/arctic
tests/integration/store/test_pickle_store.py
1
4373
from datetime import datetime as dt, timedelta import bson import numpy as np from mock import patch from arctic._util import mongo_count from arctic.arctic import Arctic def test_save_read_bson(library): blob = {'foo': dt(2015, 1, 1), 'bar': ['a', 'b', ['x', 'y', 'z']]} library.write('BLOB', blob) saved_blob = library.read('BLOB').data assert blob == saved_blob ''' Run test at your own discretion. Takes > 60 secs def test_save_read_MASSIVE(library): import pandas as pd df = pd.DataFrame(data={'data': [1] * 150000000}) data = (df, df) library.write('BLOB', data) saved_blob = library.read('BLOB').data assert(saved_blob[0].equals(df)) assert(saved_blob[1].equals(df)) ''' def test_save_read_big_encodable(library): blob = {'foo': 'a' * 1024 * 1024 * 20} library.write('BLOB', blob) saved_blob = library.read('BLOB').data assert blob == saved_blob def test_save_read_bson_object(library): blob = {'foo': dt(2015, 1, 1), 'object': Arctic} library.write('BLOB', blob) saved_blob = library.read('BLOB').data assert blob == saved_blob def test_get_info_bson_object(library): blob = {'foo': dt(2015, 1, 1), 'object': Arctic} library.write('BLOB', blob) assert library.get_info('BLOB')['handler'] == 'PickleStore' def test_bson_large_object(library): blob = {'foo': dt(2015, 1, 1), 'object': Arctic, 'large_thing': np.random.rand(int(2.1 * 1024 * 1024)).tostring()} assert len(blob['large_thing']) > 16 * 1024 * 1024 library.write('BLOB', blob) saved_blob = library.read('BLOB').data assert blob == saved_blob def test_bson_leak_objects_delete(library): blob = {'foo': dt(2015, 1, 1), 'object': Arctic} library.write('BLOB', blob) assert mongo_count(library._collection) == 1 assert mongo_count(library._collection.versions) == 1 library.delete('BLOB') assert mongo_count(library._collection) == 0 assert mongo_count(library._collection.versions) == 0 def test_bson_leak_objects_prune_previous(library): blob = {'foo': dt(2015, 1, 1), 'object': Arctic} yesterday = dt.utcnow() - timedelta(days=1, seconds=1) _id = bson.ObjectId.from_datetime(yesterday) with patch("bson.ObjectId", return_value=_id): library.write('BLOB', blob) assert mongo_count(library._collection) == 1 assert mongo_count(library._collection.versions) == 1 _id = bson.ObjectId.from_datetime(dt.utcnow() - timedelta(minutes=130)) with patch("bson.ObjectId", return_value=_id): library.write('BLOB', {}, prune_previous_version=False) assert mongo_count(library._collection) == 1 assert mongo_count(library._collection.versions) == 2 # This write should pruned the oldest version in the chunk collection library.write('BLOB', {}) assert mongo_count(library._collection) == 0 assert mongo_count(library._collection.versions) == 2 def test_prune_previous_doesnt_kill_other_objects(library): blob = {'foo': dt(2015, 1, 1), 'object': Arctic} yesterday = dt.utcnow() - timedelta(days=1, seconds=1) _id = bson.ObjectId.from_datetime(yesterday) with patch("bson.ObjectId", return_value=_id): library.write('BLOB', blob, prune_previous_version=False) assert mongo_count(library._collection) == 1 assert mongo_count(library._collection.versions) == 1 _id = bson.ObjectId.from_datetime(dt.utcnow() - timedelta(hours=10)) with patch("bson.ObjectId", return_value=_id): library.write('BLOB', blob, prune_previous_version=False) assert mongo_count(library._collection) == 1 assert mongo_count(library._collection.versions) == 2 # This write should pruned the oldest version in the chunk collection library.write('BLOB', {}) assert mongo_count(library._collection) == 1 assert mongo_count(library._collection.versions) == 2 library._delete_version('BLOB', 2) assert mongo_count(library._collection) == 0 assert mongo_count(library._collection.versions) == 1 def test_write_metadata(library): blob = {'foo': dt(2015, 1, 1), 'object': Arctic} library.write(symbol='symX', data=blob, metadata={'key1': 'value1'}) library.write_metadata(symbol='symX', metadata={'key2': 'value2'}) v = library.read('symX') assert v.data == blob assert v.metadata == {'key2': 'value2'}
lgpl-2.1
3,256,457,355,782,051,300
34.266129
77
0.658815
false
SINGROUP/pycp2k
pycp2k/classes/_each134.py
1
1114
from pycp2k.inputsection import InputSection class _each134(InputSection): def __init__(self): InputSection.__init__(self) self.Just_energy = None self.Powell_opt = None self.Qs_scf = None self.Xas_scf = None self.Md = None self.Pint = None self.Metadynamics = None self.Geo_opt = None self.Rot_opt = None self.Cell_opt = None self.Band = None self.Ep_lin_solver = None self.Spline_find_coeffs = None self.Replica_eval = None self.Bsse = None self.Shell_opt = None self.Tddft_scf = None self._name = "EACH" self._keywords = {'Bsse': 'BSSE', 'Cell_opt': 'CELL_OPT', 'Just_energy': 'JUST_ENERGY', 'Band': 'BAND', 'Xas_scf': 'XAS_SCF', 'Rot_opt': 'ROT_OPT', 'Replica_eval': 'REPLICA_EVAL', 'Tddft_scf': 'TDDFT_SCF', 'Shell_opt': 'SHELL_OPT', 'Md': 'MD', 'Pint': 'PINT', 'Metadynamics': 'METADYNAMICS', 'Geo_opt': 'GEO_OPT', 'Spline_find_coeffs': 'SPLINE_FIND_COEFFS', 'Powell_opt': 'POWELL_OPT', 'Qs_scf': 'QS_SCF', 'Ep_lin_solver': 'EP_LIN_SOLVER'}
lgpl-3.0
-8,482,029,577,158,587,000
41.846154
447
0.576302
false
Tomographer/tomographer
test/pytest_t_mhrwtasks.py
1
6356
#!/usr/bin/env python from __future__ import print_function import re import numpy as np import numpy.testing as npt import logging logging.basicConfig(level=logging.DEBUG) import unittest # import the module import tomographer.mhrwtasks import tomographer class MHRWTasksStuff(unittest.TestCase): def test_fields(self): def prg_callback(x): print(x.getHumanReport()) # just run tomorun on some arbitrary data to get some stuff to check mhrw_params = tomographer.MHRWParams( step_size=0.04, n_sweep=25, n_run=8192, n_therm=500) hist_params = tomographer.HistogramParams(0.985, 1, 20) binning_num_levels = 7 r = tomographer.tomorun.tomorun( dim=2, Emn=[ np.array([[0.5, -0.5j], [0.5j, 0.5]]), np.array([[0.5, 0.5j], [-0.5j, 0.5]]) ], Nm=np.array([ 500, 0 ]), fig_of_merit="obs-value", observable=np.array([[0.5, -0.5j], [0.5j, 0.5]]), num_repeats=2, binning_num_levels=binning_num_levels, mhrw_params=mhrw_params, hist_params=hist_params, ctrl_step_size_params={'enable': False}, progress_interval_ms=500, progress_fn=prg_callback, ) # check that all fields are there and display meaningful values runres = r['runs_results'][0] self.assertAlmostEqual(runres.mhrw_params.mhwalker_params["step_size"], mhrw_params.mhwalker_params["step_size"]) self.assertEqual(runres.mhrw_params.n_sweep, mhrw_params.n_sweep) self.assertEqual(runres.mhrw_params.n_therm, mhrw_params.n_therm) self.assertEqual(runres.mhrw_params.n_run, mhrw_params.n_run) self.assertGreater(runres.acceptance_ratio, 0.2) self.assertLess(runres.acceptance_ratio, 0.4) stats_results = runres.stats_results self.assertEqual(stats_results.histogram.numBins(), hist_params.num_bins) npt.assert_array_equal(stats_results.error_levels.shape, [hist_params.num_bins, binning_num_levels+1]) # the last error level should be the reported error bar: npt.assert_array_almost_equal(stats_results.error_levels[:, binning_num_levels], stats_results.histogram.delta) for c in stats_results.converged_status: self.assertIn(c, (tomographer.BinningAnalysis.CONVERGED, tomographer.BinningAnalysis.NOT_CONVERGED, tomographer.BinningAnalysis.UNKNOWN_CONVERGENCE) ) def test_binningbarssummary(self): x = tomographer.BinningErrorBarConvergenceSummary() self.assertEqual(x.n_bins, 0) self.assertEqual(x.n_converged, 0) self.assertEqual(x.n_unknown, 0) self.assertEqual(x.n_unknown_isolated, 0) self.assertEqual(x.n_not_converged, 0) x = tomographer.BinningErrorBarConvergenceSummary(n_bins=1,n_converged=2,n_unknown=3, n_unknown_isolated=4,n_not_converged=5) self.assertEqual(x.n_bins, 1) self.assertEqual(x.n_converged, 2) self.assertEqual(x.n_unknown, 3) self.assertEqual(x.n_unknown_isolated, 4) self.assertEqual(x.n_not_converged, 5) def test_pickle(self): hist = tomographer.HistogramWithErrorBars(0, 1, 3) stats_results = tomographer.ValueHistogramWithBinningMHRWStatsCollectorResult( hist, np.array([ [ 1, 2, 3], [4, 5, 6], [7, 8, 9], [10, 11, 12] ]), np.array([tomographer.BinningAnalysis.CONVERGED, tomographer.BinningAnalysis.NOT_CONVERGED, tomographer.BinningAnalysis.UNKNOWN_CONVERGENCE]) ) mhrw_task_result = tomographer.mhrwtasks.MHRandomWalkTaskResult( stats_results, tomographer.MHRWParams(0.03, 37, 400, 65538), 0.27 ) # see http://pybind11.readthedocs.io/en/master/advanced/classes.html#pickling-support try: import cPickle as pickle except: import pickle s = pickle.dumps(mhrw_task_result,2) print("PICKLE:\n"+str(s)) mhrw_task_result2 = pickle.loads(s) m = mhrw_task_result m2 = mhrw_task_result2 self.assertAlmostEqual(m.acceptance_ratio, m2.acceptance_ratio) self.assertAlmostEqual(m.mhrw_params.mhwalker_params, m2.mhrw_params.mhwalker_params) self.assertEqual(m.mhrw_params.n_sweep, m2.mhrw_params.n_sweep) self.assertEqual(m.mhrw_params.n_therm, m2.mhrw_params.n_therm) self.assertEqual(m.mhrw_params.n_run, m2.mhrw_params.n_run) npt.assert_array_almost_equal(m.stats_results.histogram.bins, m2.stats_results.histogram.bins) npt.assert_array_almost_equal(m.stats_results.histogram.delta, m2.stats_results.histogram.delta) npt.assert_array_almost_equal(m.stats_results.error_levels, m2.stats_results.error_levels) npt.assert_array_equal(m.stats_results.converged_status, m2.stats_results.converged_status) summary = m2.stats_results.errorBarConvergenceSummary() self.assertEqual(summary.n_bins, m.stats_results.histogram.numBins()) self.assertEqual(summary.n_converged, 1) self.assertEqual(summary.n_unknown, 1) self.assertEqual(summary.n_unknown_isolated, 0) self.assertEqual(summary.n_not_converged, 1) # pickle the summary itself? s2 = pickle.dumps(summary,2) print("PICKLE OF SUMMARY:\n"+str(s2)) summary2 = pickle.loads(s2) self.assertEqual(summary2.n_bins, m.stats_results.histogram.numBins()) self.assertEqual(summary2.n_converged, 1) self.assertEqual(summary2.n_unknown, 1) self.assertEqual(summary2.n_unknown_isolated, 0) self.assertEqual(summary2.n_not_converged, 1) # # normally, this is not needed as we are being run via pyruntest.py, but it might be # useful if we want to run individually picked tests if __name__ == '__main__': unittest.main()
mit
-1,653,078,731,656,334,000
38.234568
104
0.614695
false
anchore/anchore
anchore/anchore-modules/gates/11_check_image.py
1
1615
#!/usr/bin/env python import sys import os import json import re import anchore from anchore import anchore_utils gate_name = "IMAGECHECK" triggers = { 'BASEOUTOFDATE': { 'description':'triggers if the image\'s base image has been updated since the image was built/analyzed', 'params':'None' } } try: config = anchore.anchore_utils.init_gate_cmdline(sys.argv, gate_name, gate_help=triggers) except Exception as err: print str(err) sys.exit(1) if not config: print "ERROR: could not set up environment for gate" sys.exit(1) imageId = config['imgid'] try: params = config['params'] except: params = None outlist = list() # do somthing try: idata = anchore.anchore_utils.load_image_report(imageId) humanname = idata['meta']['humanname'] dockerfile_mode = idata['dockerfile_mode'] if dockerfile_mode == 'Actual': realbaseid = None if idata and 'familytree' in idata and len(idata['familytree']) > 0: realbaseid = idata['familytree'][0] (thefrom, thefromid) = anchore.anchore_utils.discover_from_info(idata['dockerfile_contents']) if realbaseid != thefromid: outlist.append("BASEOUTOFDATE Image stored base image ("+str(thefrom)+") ID is ("+str(realbaseid)[0:12]+"), but the latest detected base ID for ("+str(thefrom)+") is ("+str(thefromid)[0:12]+")") except Exception as err: import traceback traceback.print_exc() print "ERROR: Exception: " + str(err) sys.exit(1) # write output anchore.anchore_utils.save_gate_output(imageId, gate_name, outlist) sys.exit(0)
apache-2.0
1,436,451,591,911,512,000
25.47541
206
0.667492
false
bluven/eonboard
eoncloud_web/biz/account/views.py
1
12004
#-*-coding-utf-8-*- from datetime import datetime import logging from rest_framework import generics from rest_framework import status from rest_framework.response import Response from rest_framework.decorators import api_view from django.conf import settings from django.shortcuts import render_to_response from django.template import RequestContext from django.contrib.auth.models import User from django.http import HttpResponseRedirect from django.utils.translation import ugettext_lazy as _ from django.core.urlresolvers import reverse from django.utils import timezone from biz.account.forms import CloudUserCreateForm from biz.account.models import Contract, Operation, Quota, UserProxy, QUOTA_ITEM from biz.account.serializer import ContractSerializer, OperationSerializer, UserSerializer, QuotaSerializer from biz.account.utils import get_quota_usage from biz.idc.models import DataCenter from eoncloud_web.pagination import PagePagination LOG = logging.getLogger(__name__) def signup(request, template_name="signup.html"): error = None if request.method == "GET": userCreationForm = CloudUserCreateForm() elif request.method == "POST": user = User() userCreationForm = CloudUserCreateForm(data=request.POST, instance=user) if userCreationForm.is_valid(): userCreationForm.save() return HttpResponseRedirect(reverse("signup_success")) if userCreationForm.errors.has_key("__all__"): error = userCreationForm.errors['__all__'] else: error = userCreationForm.errors return render_to_response(template_name, RequestContext(request, { "MCC": settings.MCC, "SOURCE": settings.SOURCE, "USER_TYPE": settings.USER_TYPE, "BRAND": settings.BRAND, "userCreationForm": userCreationForm, "error": error, })) def signup_success(request, template_name="signup_success.html"): return render_to_response(template_name, RequestContext(request, { "BRAND": settings.BRAND, })) def find_password(request, template_name="find_password.html"): return render_to_response(template_name, RequestContext(request, { "BRAND": settings.BRAND, })) @api_view(["GET"]) def contract_view(request): c = Contract.objects.filter(user=request.user, udc__id=request.session["UDC_ID"])[0] s = ContractSerializer(c) return Response(s.data) @api_view(["GET"]) def quota_view(request): quota = get_quota_usage(request.user, request.session["UDC_ID"]) return Response(quota) class OperationList(generics.ListAPIView): queryset = Operation.objects serializer_class = OperationSerializer pagination_class = PagePagination def get_queryset(self): request = self.request resource = request.query_params.get('resource') resource_name = request.query_params.get('resource_name') start_date = request.query_params.get('start_date') end_date = request.query_params.get('end_date') queryset = super(OperationList, self).get_queryset() if resource: queryset = queryset.filter(resource=resource) if resource_name: queryset = queryset.filter(resource_name__istartswith=resource_name) if start_date: queryset = queryset.filter(create_date__gte=start_date) if end_date: queryset = queryset.filter(create_date__lte=end_date) if request.user.is_superuser: data_center_pk = request.query_params.get('data_center', '') operator_pk = request.query_params.get('operator', '') if data_center_pk: queryset = queryset.filter(udc__data_center__pk=data_center_pk) if operator_pk: queryset = queryset.filter(user__pk=operator_pk) else: queryset = queryset.filter(user=request.user, udc__id=request.session["UDC_ID"]) return queryset.order_by('-create_date') @api_view() def operation_filters(request): resources = Operation.objects.values('resource').distinct() for data in resources: data['name'] = _(data['resource']) return Response({ "resources": resources, "operators": UserProxy.normal_users.values('pk', 'username'), "data_centers": DataCenter.objects.values('pk', 'name') }) class ContractList(generics.ListCreateAPIView): queryset = Contract.living.filter(deleted=False) serializer_class = ContractSerializer def list(self, request, *args, **kwargs): serializer = ContractSerializer(self.get_queryset(), many=True) return Response(serializer.data) class ContractDetail(generics.RetrieveAPIView): queryset = Contract.living.all() serializer_class = ContractSerializer @api_view(['POST']) def create_contract(request): try: serializer = ContractSerializer(data=request.data, context={"request": request}) if serializer.is_valid(): contract = serializer.save() Operation.log(contract, contract.name, 'create', udc=contract.udc, user=request.user) return Response({'success': True, "msg": _('Contract is created successfully!')}, status=status.HTTP_201_CREATED) else: return Response({"success": False, "msg": _('Contract data is not valid!'), 'errors': serializer.errors}, status=status.HTTP_400_BAD_REQUEST) except Exception as e: LOG.error("Failed to create contract, msg:[%s]" % e) return Response({"success": False, "msg": _('Failed to create contract for unknown reason.')}) @api_view(['POST']) def update_contract(request): try: pk = request.data['id'] contract = Contract.objects.get(pk=pk) contract.name = request.data['name'] contract.customer = request.data['customer'] contract.start_date = datetime.strptime(request.data['start_date'], '%Y-%m-%d %H:%M:%S') contract.end_date = datetime.strptime(request.data['end_date'], '%Y-%m-%d %H:%M:%S') contract.save() Operation.log(contract, contract.name, 'update', udc=contract.udc, user=request.user) return Response({'success': True, "msg": _('Contract is updated successfully!')}, status=status.HTTP_201_CREATED) except Exception as e: LOG.error("Failed to update contract, msg:[%s]" % e) return Response({"success": False, "msg": _('Failed to update contract for unknown reason.')}) @api_view(['POST']) def delete_contracts(request): try: contract_ids = request.data.getlist('contract_ids[]') for contract_id in contract_ids: contract = Contract.objects.get(pk=contract_id) contract.deleted = True contract.save() Quota.living.filter(contract__pk=contract_id).update(deleted=True, update_date=timezone.now()) Operation.log(contract, contract.name, 'delete', udc=contract.udc, user=request.user) return Response({'success': True, "msg": _('Contracts have been deleted!')}, status=status.HTTP_201_CREATED) except Exception as e: LOG.error("Failed to delete contracts, msg:[%s]" % e) return Response({"success": False, "msg": _('Failed to delete contracts for unknown reason.')}) class UserList(generics.ListAPIView): queryset = UserProxy.normal_users serializer_class = UserSerializer def list(self, request, *args, **kwargs): serializer = self.serializer_class(self.get_queryset(), many=True) return Response(serializer.data) class UserDetail(generics.RetrieveUpdateDestroyAPIView): queryset = User.objects.all() serializer_class = UserSerializer def perform_destroy(self, instance): instance.is_active = False instance.save() @api_view(['POST']) def deactivate_user(request): pk = request.data['id'] user = User.objects.get(pk=pk) user.is_active = False user.save() return Response({"success": True, "msg": _('User has been deactivated!')}, status=status.HTTP_200_OK) @api_view(['POST']) def activate_user(request): pk = request.data['id'] user = User.objects.get(pk=pk) user.is_active = True user.save() return Response({"success": True, "msg": _('User has been activated!')}, status=status.HTTP_200_OK) class QuotaList(generics.ListAPIView): queryset = Quota.living serializer_class = QuotaSerializer def list(self, request, *args, **kwargs): queryset = self.get_queryset() if 'contract_id' in request.query_params: queryset = queryset.filter(contract__id=request.query_params['contract_id']) return Response(self.serializer_class(queryset, many=True).data) class QuotaDetail(generics.RetrieveUpdateDestroyAPIView): queryset = Quota.living serializer_class = QuotaSerializer @api_view(['GET']) def resource_options(request): return Response(QUOTA_ITEM) @api_view(['POST']) def create_quotas(request): try: contract = Contract.objects.get(pk=request.data['contract_id']) quota_ids = request.data.getlist('ids[]') resources = request.data.getlist('resources[]') limits = request.data.getlist('limits[]') for index, quota_id in enumerate(quota_ids): resource, limit = resources[index], limits[index] if quota_id and Quota.living.filter(contract=contract, pk=quota_id).exists(): Quota.objects.filter(pk=quota_id).update(resource=resource, limit=limit, update_date=timezone.now()) else: Quota.objects.create(resource=resource, limit=limit, contract=contract) Operation.log(contract, contract.name + " quota", 'update', udc=contract.udc, user=request.user) return Response({'success': True, "msg": _('Quotas have been saved successfully!')}, status=status.HTTP_201_CREATED) except Exception as e: LOG.error("Failed to save quotas, msg:[%s]" % e) return Response({"success": False, "msg": _('Failed to save quotas for unknown reason.')}) @api_view(['POST']) def create_quota(request): try: contract = Contract.objects.get(pk=request.data['contract']) resource, limit = request.data['resource'], request.data['limit'] pk = request.data['id'] if 'id' in request.data else None if pk and Quota.objects.filter(pk=pk).exists(): quota = Quota.objects.get(pk=pk) quota.limit = limit quota.save() else: quota = Quota.objects.create(resource=resource, limit=limit, contract=contract) return Response({'success': True, "msg": _('Quota have been saved successfully!'), "quota": QuotaSerializer(quota).data}, status=status.HTTP_201_CREATED) except Exception as e: LOG.error("Failed to save quota, msg:[%s]" % e) return Response({"success": False, "msg": _('Failed to save quota for unknown reason.')}) @api_view(['POST']) def delete_quota(request): try: Quota.living.filter(pk=request.data['id']).update(deleted=True) return Response({'success': True, "msg": _('Quota have been deleted successfully!')}, status=status.HTTP_201_CREATED) except Exception as e: LOG.error("Failed to create quota, msg:[%s]" % e) return Response({"success": False, "msg": _('Failed to create quota for unknown reason.')}) @api_view(["GET"]) def get_config_view(request): return Response(settings.SITE_CONFIG)
apache-2.0
4,892,102,443,532,302,000
32.252078
116
0.635955
false
nikobockerman/rsnapshot-backup
rsnapshotbackup/argumentparser.py
1
1573
''' http://stackoverflow.com/a/5943381 ''' import argparse import sys class ArgParser(argparse.ArgumentParser): def __init__(self, *args, **kwargs): """Initialisation method for the parser class""" super(ArgParser, self).__init__(*args, **kwargs) #argparse.ArgumentParser.__init__(self, *args, **kwargs) def _get_action_from_name(self, name): """Given a name, get the Action instance registered with this parser. If only it were made available in the ArgumentError object. It is passed as it's first arg... """ container = self._actions if name is None: return None for action in container: if '/'.join(action.option_strings) == name: return action elif action.metavar == name: return action elif action.dest == name: return action def error(self, message): exc = sys.exc_info()[1] if exc: exc.argument = self._get_action_from_name(exc.argument_name) raise exc raise argparse.ArgumentError(None, message) if __name__ == "__main__": print ("Starting") parser = ArgParser() parser.add_argument('foo') parser.add_argument('boo') try: parser.parse_args(['fsdf']) print ("Successfully parsed arguments") except argparse.ArgumentError as exc: print ("Exception caught:") print (exc.message) if exc.argument is not None: print (exc.argument)
bsd-2-clause
-1,816,188,652,166,511,000
30.46
77
0.574698
false
surculus12/py-hyphen
hyphen/knuth_liang.py
1
1704
""" Our implementation of knuth-liang """ from string import digits from collections import OrderedDict from .language_patterns import LanguagePatterns class KnuthLiang(object): """ This class implements knuth-liang """ __slots__ = ['language_patterns', 'limit_left', 'limit_right'] def __init__(self, lang_code=None, file_path=None, limit_left=2, limit_right=3): self.language_patterns = LanguagePatterns(lang_code, file_path) self.limit_left = limit_left self.limit_right = limit_right def hyphenate_word(self, word_input): "Hyphenates a word" word = '.' + word_input + '.' word_len = len(word) found_patterns = OrderedDict() # key order matters later for left_pos in range(word_len): for pattern in self.language_patterns.iterate(word[left_pos:].lower()): for patt_pos in pattern: index = patt_pos + left_pos - 1 if (index not in found_patterns or found_patterns[index] < pattern[patt_pos]): found_patterns[index] = pattern[patt_pos] # we don't hyphen at the left-right limits for i in (range(0, self.limit_left) + range(word_len - self.limit_right, word_len)): if i in found_patterns: del found_patterns[i] # we find all the odd-numbered digits in the pattern and hyphenate hyphens = (h for h in found_patterns.keys() if found_patterns[h] & 1) for i, hyphen in enumerate(hyphens): index = i + hyphen + 1 word = word[:index] + '-' + word[index:] return word[1:-1]
mit
6,763,296,129,892,360,000
34.5
84
0.583333
false
pferreir/indico-backup
indico/MaKaC/webinterface/common/contribFilters.py
1
15606
# -*- coding: utf-8 -*- ## ## ## This file is part of Indico. ## Copyright (C) 2002 - 2014 European Organization for Nuclear Research (CERN). ## ## Indico is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 3 of the ## License, or (at your option) any later version. ## ## Indico is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Indico;if not, see <http://www.gnu.org/licenses/>. import MaKaC.common.filters as filters from MaKaC.webinterface.common.contribStatusWrapper import ContribStatusList from indico.util.string import natural_sort_key class TypeFilterField( filters.FilterField ): """ """ _id = "type" def satisfies( self, contribution ): """ """ if len(self._conf.getContribTypeList()) == len(self._values) and contribution.getType(): return True elif contribution.getType() is None: return self._showNoValue else: return contribution.getType().getId() in self._values class TrackFilterField( filters.FilterField ): """Contains the filtering criteria for the track of a contribution. Inherits from: AbstractFilterField Attributes: _values -- (list) List of track identifiers; _showNoValue -- (bool) Tells whether an contribution satisfies the filter if it hasn't belonged to any track. """ _id = "track" def satisfies( self, contribution ): """ """ if len(self._conf.getTrackList()) == len(self._values) and contribution.getTrack(): return True elif contribution.getTrack(): if contribution.getTrack().getId() in self._values: return True else: return self._showNoValue return False class SessionFilterField( filters.FilterField ): """Contains the filtering criteria for the session which a contribution belongs. Inherits from: AbstractFilterField Attributes: _values -- (list) List of session identifiers; _showNoValue -- (bool) Tells whether an contribution satisfies the filter if it hasn't belonged to any session. """ _id = "session" def satisfies( self, contribution ): """ """ if len(self._conf.getSessionList()) == len(self._values) and contribution.getSession(): return True elif contribution.getSession(): if contribution.getSession().getId() in self._values: return True else: return self._showNoValue return False class PosterFilterField (filters.FilterField): """ Contains the filtering criteria for the contribution being a poster or not. A contribution is considered a poster contribution if it belongs to a poster session. Inherits from: AbstractFilterField Attributes: _values -- (bool) Tells if the contribution should be a poster or not. _showNoValue -- (bool) Tells whether an contribution satisfies the filter if it doesn't satisfy the _values criterion. So, if True, all contribution will satisfy the criterion. """ _id = "poster" def satisfies( self, contribution ): if self._showNoValue: return True elif len(self._values) > 0 and self._values[0]: #values[0] is True or False. Contribution has to be a poster return contribution.getSession() and contribution.getSession().getScheduleType() == "poster" else: #contribution must not be a poster return not contribution.getSession() or contribution.getSession().getScheduleType() != "poster" class StatusFilterField(filters.FilterField): """ """ _id = "status" def satisfies(self,contribution): """ """ if len(ContribStatusList().getList()) == len(self._values): return True stKlass=contribution.getCurrentStatus().__class__ return ContribStatusList().getId(stKlass) in self._values class AuthorFilterField( filters.FilterField ): """ """ _id = "author" def satisfies(self,contribution): """ """ queryText = "" if len(self._values) > 0: queryText = str(self._values[0]) #The first value is the query text query=queryText.strip().lower() if query=="": return True for auth in contribution.getPrimaryAuthorList(): key="%s %s"%(auth.getFamilyName(),auth.getFirstName()) if key.lower().find(query)!=-1: return True return False class MaterialFilterField(filters.FilterField): """ """ _id = "material" def satisfies(self,contribution): """ """ #all options selected if len(self._values) == 4: return True from MaKaC.webinterface.materialFactories import PaperFactory paper=contribution.getPaper() if (PaperFactory().getId() in self._values) and paper is not None: return True from MaKaC.webinterface.materialFactories import SlidesFactory slides=contribution.getSlides() if (SlidesFactory().getId() in self._values) and slides is not None: return True if ("--other--" in self._values) and \ len(contribution.getAllMaterialList())>0: return True if ("--none--" in self._values) and \ len(contribution.getAllMaterialList())==0: return True return False class RefereeFilterField( filters.FilterField ): """ Contains the filtering criteria for the Referee of a contribution. Attributes: _value -- (User object) a User object. Can also be the string "any", and then the contribution won't be filtered by referee. _showNoValue -- (bool) Tells whether an contribution satisfies the filter if it doesn't have a Referee """ _id = "referee" def __init__( self, conf, values, showNoValue = True ): filters.FilterField.__init__(self, conf, values, showNoValue) def satisfies( self, contribution ): rm = contribution.getReviewManager() if rm.hasReferee(): user = self._values[0] if user == "any" or rm.isReferee(user): return True else: return False else: return self._showNoValue class EditorFilterField( filters.FilterField ): """ Contains the filtering criteria for the Editor of a contribution. Attributes: _value -- (User object) a User object. Can also be the string "any", and then the contribution won't be filtered by editor. _showNoValue -- (bool) Tells whether an contribution satisfies the filter if it doesn't have an Editor """ _id = "editor" def __init__( self, conf, values, showNoValue = True ): filters.FilterField.__init__(self, conf, values, showNoValue) def satisfies( self, contribution ): rm = contribution.getReviewManager() if rm.hasEditor(): user = self._values[0] if user == "any" or rm.isEditor(user): return True else: return False else: return self._showNoValue class ReviewerFilterField( filters.FilterField ): """ Contains the filtering criteria for a Reviewer of a contribution. Attributes: _value -- (User object) a User object. Can also be the string "any", and then the contribution won't be filtered by reviewer. _showNoValue -- (bool) Tells whether an contribution satisfies the filter if it doesn't have any Reviewers """ _id = "reviewer" def __init__( self, conf, values, showNoValue = True ): filters.FilterField.__init__(self, conf, values, showNoValue) def satisfies( self, contribution ): rm = contribution.getReviewManager() if rm.hasReviewers(): user = self._values[0] if user == "any" or rm.isReviewer(user): return True else: return False else: return self._showNoValue class ReviewingFilterField( filters.FilterField ): """ Contains the filtering criteria for a Reviewing of a contribution. Attributes: _value -- (list) List of User objects with keys "referee", "editor" and "reviewer". Can also be the string "any", and then the contribution won't be filtered by reviewer. _showNoValue -- (bool) Tells whether an contribution satisfies the filter if it doesn't have reviewing team """ _id = "reviewing" def __init__( self, conf, values, showNoValue = True ): filters.FilterField.__init__(self, conf, values, showNoValue) def satisfies( self, contribution ): rm = contribution.getReviewManager() if rm.isReferee(self._values[0].get("referee", "")): if (self._values[0].get("editor", "") == "any" and rm.hasEditor()) \ or (self._values[0].get("reviewer", "") == "any" and rm.hasReviewers()): return True elif not rm.hasEditor() and not rm.hasReviewers(): return self._showNoValue else: return False elif self._values[0].get("referee", "") == "any" or self._values[0].get("referee", "") == "": if ((self._values[0].get("referee", "") == "any") and rm.hasReferee()) \ or (self._values[0].get("editor", "") == "any" and rm.hasEditor()) \ or (self._values[0].get("reviewer", "") == "any" and rm.hasReviewers()): return True elif not rm.hasReferee() and not rm.hasEditor() and not rm.hasReviewers(): return self._showNoValue else: return False class MaterialSubmittedFilterField( filters.FilterField ): """ Contains the filtering criteria for a Review material of a contribution. Attributes: _value -- (User object) a User object. Can also be the string "any", and then the contribution won't be filtered by reviewer. _showNoValue -- (bool) Tells whether an contribution satisfies the filter if it doesn't have any Reviewers """ _id = "materialsubmitted" def satisfies( self, contribution ): review = contribution.getReviewManager().getLastReview() if self._values[0] and review.isAuthorSubmitted(): return True elif review.isAuthorSubmitted(): return False else: return self._showNoValue class TitleSF(filters.SortingField): _id="name" def compare( self, c1, c2 ): """ """ if c1.getTitle() == None and c2.getTitle() == None: return 0 if c1.getTitle() == None: return +1 if c2.getTitle() == None: return -1 return cmp(natural_sort_key(c1.getTitle().lower().strip()), natural_sort_key(c2.getTitle().lower().strip())) class NumberSF( filters.SortingField ): _id = "number" def compare( self, c1, c2 ): try: n1 = int(c1.getId()) n2 = int(c2.getId()) return cmp(n1,n2) except ValueError, e: return cmp( c1.getId(), c2.getId() ) class DateSF( filters.SortingField ): _id = "date" def compare( self, c1, c2 ): if c1.getStartDate()==None and c2.getStartDate()==None: return 0 if c1.getStartDate() is None: return +1 if c2.getStartDate() is None: return -1 return cmp( c1.getStartDate() ,c2.getStartDate()) class ContribTypeSF( filters.SortingField ): _id = "type" def compare( self, c1, c2 ): """ """ if c1.getType() == None and c2.getType() == None: return 0 elif c1.getType() == None: return +1 elif c2.getType() == None: return -1 return cmp(natural_sort_key(c1.getType().getName().lower().strip()), natural_sort_key(c2.getType().getName().lower().strip())) class SessionSF( filters.SortingField ): _id = "session" def compare( self, c1, c2 ): """ """ if c1.getSession() == None and c2.getSession() == None: return 0 elif c1.getSession() == None: return +1 elif c2.getSession() == None: return -1 return cmp( c1.getSession().getCode(), c2.getSession().getCode() ) class SessionTitleSF( filters.SortingField ): _id = "sessionTitle" def compare( self, c1, c2 ): """ """ if c1.getSession() == None and c2.getSession() == None: return 0 elif c1.getSession() == None: return +1 elif c2.getSession() == None: return -1 return cmp(natural_sort_key(c1.getSession().getTitle().lower().strip()), natural_sort_key(c2.getSession().getTitle().lower().strip())) class TrackSF(filters.SortingField): _id = "track" def compare( self, c1, c2 ): """ """ if c1.getTrack() == None and c2.getTrack() == None: return 0 elif c1.getTrack() == None: return +1 elif c2.getTrack() == None: return -1 return cmp( c1.getTrack().getTitle(), c2.getTrack().getTitle() ) class SpeakerSF(filters.SortingField): _id = "speaker" def compare( self, c1, c2 ): """ """ if c1.getSpeakerList() == [] and c2.getSpeakerList() == []: return 0 elif c1.getSpeakerList() == []: return +1 elif c2.getSpeakerList() == []: return -1 s1 = "%s %s"%(c1.getSpeakerList()[0].getFamilyName().lower(), c1.getSpeakerList()[0].getFirstName().lower()) s2 = "%s %s"%(c2.getSpeakerList()[0].getFamilyName().lower(), c2.getSpeakerList()[0].getFirstName().lower()) return cmp( s1, s2 ) class BoardNumberSF( filters.SortingField ): _id = "board_number" def compare(self,c1,c2): try: n1=int(c1.getBoardNumber()) except ValueError, e: n1=c1.getBoardNumber() try: n2=int(c2.getBoardNumber()) except ValueError, e: n2=c2.getBoardNumber() return cmp(n1,n2) class SortingCriteria( filters.SortingCriteria ): """ """ _availableFields = {NumberSF.getId():NumberSF, \ DateSF.getId():DateSF, \ ContribTypeSF.getId():ContribTypeSF, \ SessionSF.getId():SessionSF, \ SessionTitleSF.getId():SessionTitleSF, \ TrackSF.getId():TrackSF, \ SpeakerSF.getId():SpeakerSF, \ BoardNumberSF.getId():BoardNumberSF, \ TitleSF.getId():TitleSF}
gpl-3.0
4,691,919,094,041,345,000
33.68
125
0.578111
false
Lana-B/Pheno4T
madanalysis/install/install_fastjetcontrib.py
1
6885
################################################################################ # # Copyright (C) 2012-2013 Eric Conte, Benjamin Fuks # The MadAnalysis development team, email: <ma5team@iphc.cnrs.fr> # # This file is part of MadAnalysis 5. # Official website: <https://launchpad.net/madanalysis5> # # MadAnalysis 5 is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # MadAnalysis 5 is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with MadAnalysis 5. If not, see <http://www.gnu.org/licenses/> # ################################################################################ from madanalysis.install.install_service import InstallService from shell_command import ShellCommand import os import sys import logging class InstallFastjetContrib: def __init__(self,main): self.main = main self.installdir = os.path.normpath(self.main.archi_info.ma5dir+'/tools/fastjet/') self.bindir = os.path.normpath(self.installdir+'/bin/fastjet-config') self.toolsdir = os.path.normpath(self.main.archi_info.ma5dir+'/tools') self.tmpdir = self.main.session_info.tmpdir self.downloaddir = self.main.session_info.downloaddir self.untardir = os.path.normpath(self.tmpdir + '/MA5_fastjetcontrib/') self.ncores = 1 self.files = {"fastjetcontrib.tar.gz" : "http://madanalysis.irmp.ucl.ac.be/raw-attachment/wiki/WikiStart/fjcontrib-1.012.tar.gz"} def GetNcores(self): self.ncores = InstallService.get_ncores(self.main.archi_info.ncores,\ self.main.forced) def CreateTmpFolder(self): ok = InstallService.prepare_tmp(self.untardir, self.downloaddir) if ok: self.tmpdir=self.untardir return ok def Download(self): # Checking connection with MA5 web site if not InstallService.check_ma5site(): return False # Launching wget logname = os.path.normpath(self.installdir+'/wget_contrib.log') if not InstallService.wget(self.files,logname,self.downloaddir): return False # Ok return True def Unpack(self): # Logname logname = os.path.normpath(self.installdir+'/unpack_contrib.log') # Unpacking the tarball ok, packagedir = InstallService.untar(logname, self.tmpdir,'fastjetcontrib.tar.gz') if not ok: return False # Ok: returning the good folder self.tmpdir=packagedir return True def Configure(self): # Input theCommands=['./configure','--fastjet-config='+self.bindir] logname=os.path.normpath(self.installdir+'/configuration_contrib.log') # Execute logging.debug('shell command: '+' '.join(theCommands)) ok, out= ShellCommand.ExecuteWithLog(theCommands,\ logname,\ self.tmpdir,\ silent=False) # return result if not ok: logging.error('impossible to configure the project. For more details, see the log file:') logging.error(logname) return ok def Build(self): # Input theCommands=['make','-j'+str(self.ncores)] logname=os.path.normpath(self.installdir+'/compilation_contrib.log') # Execute logging.debug('shell command: '+' '.join(theCommands)) ok, out= ShellCommand.ExecuteWithLog(theCommands,\ logname,\ self.tmpdir,\ silent=False) # return result if not ok: logging.error('impossible to build the project. For more details, see the log file:') logging.error(logname) return ok def Install(self): # Input theCommands=['make','install'] logname=os.path.normpath(self.installdir+'/installation_contrib.log') # Execute logging.debug('shell command: '+' '.join(theCommands)) ok, out= ShellCommand.ExecuteWithLog(theCommands,\ logname,\ self.tmpdir,\ silent=False) # return result if not ok: logging.error('impossible to build the project. For more details, see the log file:') logging.error(logname) return ok def Check(self): # Check folders dirs = [self.installdir+"/include/fastjet/contrib",\ self.installdir+"/lib",\ self.installdir+"/bin"] for dir in dirs: if not os.path.isdir(dir): logging.error('folder '+dir+' is missing.') self.display_log() return False # Check fastjet executable if not os.path.isfile(self.installdir+'/bin/fastjet-config'): logging.error("binary labeled 'fastjet-config' is missing.") self.display_log() return False # Check one header file if not os.path.isfile(self.installdir+'/include/fastjet/contrib/Nsubjettiness.hh'): logging.error("header labeled 'include/fastjet/contrib/Nsubjettiness.hh' is missing.") self.display_log() return False if (not os.path.isfile(self.installdir+'/lib/libNsubjettiness.so')) and \ (not os.path.isfile(self.installdir+'/lib/libNsubjettiness.a')): logging.error("library labeled 'libNsubjettiness.so' or 'libNsubjettiness.a' is missing.") self.display_log() return False return True def display_log(self): logging.error("More details can be found into the log files:") logging.error(" - "+os.path.normpath(self.installdir+"/wget_contrib.log")) logging.error(" - "+os.path.normpath(self.installdir+"/unpack_contrib.log")) logging.error(" - "+os.path.normpath(self.installdir+"/configuration_contrib.log")) logging.error(" - "+os.path.normpath(self.installdir+"/compilation_contrib.log")) logging.error(" - "+os.path.normpath(self.installdir+"/installation_contrib.log")) def NeedToRestart(self): return True
gpl-3.0
-3,883,177,532,337,279,500
38.797688
137
0.581699
false
ymca-ireland/hth
app/main.py
1
2477
#!/usr/bin/env kivy import kivy import uuid kivy.require('1.8.0') from kivy.app import App from kivy.properties import NumericProperty, ReferenceListProperty,ObjectProperty, StringProperty from kivy.uix.button import Button from kivy.uix.label import Label from kivy.uix.boxlayout import BoxLayout from kivy.uix.screenmanager import ScreenManager, Screen from kivy.uix.textinput import TextInput from lxml import etree class TileScreen(Screen): up = ObjectProperty(None) box = ObjectProperty(None) def __init__(self, **kwargs): super(TileScreen, self).__init__(**kwargs) def add_button(self, button): self.box.add_widget(button) class InfoScreen(Screen): name_value = ObjectProperty() class ScreenSwitchButton(Button): sub_screen = StringProperty() view = ObjectProperty(None) def __init__(self, **kwargs): super(ScreenSwitchButton, self).__init__(**kwargs) def parse_display_elements(screenHandler, display_element, parent_screen_id=None): tiles = display_element.findall('tile') screen_id = str(uuid.uuid4()) view = None if len(tiles): # Is Tiles view = TileScreen(name=screen_id) for tile in tiles: b = ScreenSwitchButton(text=tile.find('name').text) b.view = view sub_display = tile.find('display') if sub_display is not None: b.sub_screen = parse_display_elements(screenHandler, sub_display, screen_id) view.add_button(b) else: info = display_element.find('info') view = InfoScreen(name=screen_id) view.name_value.text = info.find('title').text if view is not None: if parent_screen_id is not None: view.up.sub_screen = parent_screen_id view.up.view = view screenHandler.add_widget(view) return screen_id return None def parse_xml_file(screenHandler): doc = etree.parse('hth.xml') display = doc.find('display') if display is not None: current_screen = parse_display_elements(screenHandler, display) screenHandler.current = current_screen class DisplayPortal(BoxLayout): screenHandler = ObjectProperty(None) def __init__(self, **kwargs): super(DisplayPortal, self).__init__(**kwargs) parse_xml_file(self.screenHandler) class H2HApp(App): def build(self): return DisplayPortal() if __name__ == '__main__': H2HApp().run()
bsd-2-clause
7,853,394,444,727,441,000
24.802083
97
0.654017
false
Khan/react-components
make_template.py
1
1966
#!/usr/bin/env python # TODO(colin): fix these lint errors (http://pep8.readthedocs.io/en/release-1.7.x/intro.html#error-codes) # pep8-disable:E128 import os.path import jinja2 from jinja2.ext import Extension class CodeExampleExtension(Extension): """Insert a code example. My plan for the docs is side-by-side code and live widgets. I plan to make this extension fancier in the future, since I would like one file to serve as the source of both the pretty code we display to the user and the widget. I have a vague idea of how that will work - I think there will have to be a header/footer that will be inserted for any given example, then this command will strip that out, put it where it needs to go, and format the code nicely. http://jinja.pocoo.org/docs/extensions/#adding-extensions """ tags = set(["code_example"]) def __init__(self, environment): super(CodeExampleExtension, self).__init__(environment) # {% code_example "filename" %} # ^------------------ first token, call next() to advance past it # ^----- generate self._insert("filename") def parse(self, parser): lineno = parser.stream.next().lineno filename = parser.parse_expression() return (jinja2.nodes .CallBlock(self.call_method('_insert', [filename]), [], [], []) .set_lineno(lineno)) def _insert(self, filename, caller): path = os.path.join('examples', filename) with open(path, 'r') as f: example = f.read() return (jinja2.Markup("<div class='example_div'>%s</div>") % example.strip()) if __name__ == '__main__': loader = jinja2.FileSystemLoader('.') env = jinja2.Environment(loader=loader, extensions=[CodeExampleExtension]) template = env.get_template('template.html') with open('docs/index.html', 'w') as f: f.seek(0) f.write(template.render())
mit
8,428,015,763,604,837,000
33.491228
105
0.632248
false
testalt/electrum-ppc
setup-release.py
1
2894
""" py2app/py2exe build script for Electrum Litecoin Usage (Mac OS X): python setup.py py2app Usage (Windows): python setup.py py2exe """ from setuptools import setup import os import re import shutil import sys from lib.util import print_error from lib.version import ELECTRUM_VERSION as version name = "Electrum-ppc" mainscript = 'electrum-ppc' if sys.version_info[:3] < (2, 6, 0): print_error("Error: " + name + " requires Python version >= 2.6.0...") sys.exit(1) if sys.platform == 'darwin': from plistlib import Plist plist = Plist.fromFile('Info.plist') plist.update(dict(CFBundleIconFile='electrum.icns')) shutil.copy(mainscript, mainscript + '.py') mainscript += '.py' extra_options = dict( setup_requires=['py2app'], app=[mainscript], options=dict(py2app=dict(argv_emulation=True, includes=['PyQt4.QtCore', 'PyQt4.QtGui', 'PyQt4.QtWebKit', 'PyQt4.QtNetwork', 'sip'], packages=['lib', 'gui', 'plugins'], iconfile='electrum.icns', plist=plist, resources=["data", "icons"])), ) elif sys.platform == 'win32': extra_options = dict( setup_requires=['py2exe'], app=[mainscript], ) else: extra_options = dict( # Normally unix-like platforms will use "setup.py install" # and install the main script as such scripts=[mainscript], ) setup( name=name, version=version, **extra_options ) from distutils import dir_util if sys.platform == 'darwin': # Remove the copied py file os.remove(mainscript) resource = "dist/" + name + ".app/Contents/Resources/" dir_util.copy_tree("locale", resource + "locale/") # Try to locate qt_menu # Let's try the port version first! if os.path.isfile("/opt/local/lib/Resources/qt_menu.nib"): qt_menu_location = "/opt/local/lib/Resources/qt_menu.nib" else: # No dice? Then let's try the brew version if os.path.exists("/usr/local/Cellar"): qt_menu_location = os.popen("find /usr/local/Cellar -name qt_menu.nib | tail -n 1").read() # no brew, check /opt/local else: qt_menu_location = os.popen("find /opt/local -name qt_menu.nib | tail -n 1").read() qt_menu_location = re.sub('\n', '', qt_menu_location) if (len(qt_menu_location) == 0): print "Sorry couldn't find your qt_menu.nib this probably won't work" else: print "Found your qib: " + qt_menu_location # Need to include a copy of qt_menu.nib shutil.copytree(qt_menu_location, resource + "qt_menu.nib") # Need to touch qt.conf to avoid loading 2 sets of Qt libraries fname = resource + "qt.conf" with file(fname, 'a'): os.utime(fname, None)
gpl-3.0
-7,215,813,235,646,526,000
30.11828
118
0.599516
false
ddeepak6992/IITM-placement-Spyder
main.py
1
1142
# -*- coding: utf-8 -*- """ Created on Mon Nov 16 00:27:31 2015 @author: deep """ ##################################### USERNAME = '' PASSWORD = '' ##################################### import json from scraper import scraper import os import time def diff(A,B): if len(A.keys()) > len(B.keys()): A,B = B,A for k in A.keys(): if A[k]!=B[k]: print '--',k,A[k] print '++',k,B[k] print '' while True: try: if os.path.exists(os.path.join(os.getcwd(),'database.json')): with open('database.json','r') as fin: old_database = json.load(fin) new_database = scraper(USERNAME, PASSWORD) if new_database != old_database: diff(old_database, new_database) with open('database.json', 'w') as fout: json.dump(new_database, fout) else: new_database = scraper(USERNAME, PASSWORD) with open('database.json', 'w') as fout: json.dump(new_database, fout) time.sleep(60*60) except Exception as e: print e
gpl-2.0
2,431,725,302,540,940,300
25.55814
73
0.478984
false
dracos/QGIS
python/plugins/processing/algs/translations.py
1
53414
# -*- coding: utf-8 -*- """ Don't edit this file manually. Update it from QGIS console: from processing.tools.translation import updateTranslations updateTranslations() """ from PyQt4.QtCore import QCoreApplication def translationShadow(): """QGISAlgorithmProvider""" QCoreApplication.translate("SumLines", "Sum line lengths") QCoreApplication.translate("PointsInPolygon", "Count points in polygon") QCoreApplication.translate("PointsInPolygonWeighted", "Count points in polygon(weighted)") QCoreApplication.translate("PointsInPolygonUnique", "Count unique points in polygon") QCoreApplication.translate("BasicStatisticsStrings", "Basic statistics for text fields") QCoreApplication.translate("BasicStatisticsNumbers", "Basic statistics for numeric fields") QCoreApplication.translate("NearestNeighbourAnalysis", "Nearest neighbour analysis") QCoreApplication.translate("MeanCoords", "Mean coordinate(s)") QCoreApplication.translate("LinesIntersection", "Line intersections") QCoreApplication.translate("UniqueValues", "List unique values") QCoreApplication.translate("PointDistance", "Distance matrix") QCoreApplication.translate("ReprojectLayer", "Reproject layer") QCoreApplication.translate("ExportGeometryInfo", "Export/Add geometry columns") QCoreApplication.translate("Centroids", "Polygon centroids") QCoreApplication.translate("Delaunay", "Delaunay triangulation") QCoreApplication.translate("VoronoiPolygons", "Voronoi polygons") QCoreApplication.translate("SimplifyGeometries", "Simplify geometries") QCoreApplication.translate("DensifyGeometries", "Densify geometries") QCoreApplication.translate("DensifyGeometriesInterval", "Densify geometries given an interval") QCoreApplication.translate("MultipartToSingleparts", "Multipart to singleparts") QCoreApplication.translate("SinglePartsToMultiparts", "Singleparts to multipart") QCoreApplication.translate("PolygonsToLines", "Polygons to lines") QCoreApplication.translate("LinesToPolygons", "Lines to polygons") QCoreApplication.translate("ExtractNodes", "Extract nodes") QCoreApplication.translate("Eliminate", "Eliminate sliver polygons") QCoreApplication.translate("ConvexHull", "Convex hull") QCoreApplication.translate("FixedDistanceBuffer", "Fixed distance buffer") QCoreApplication.translate("VariableDistanceBuffer", "Variable distance buffer") QCoreApplication.translate("Dissolve", "Dissolve") QCoreApplication.translate("Difference", "Difference") QCoreApplication.translate("Intersection", "Intersection") QCoreApplication.translate("Union", "Union") QCoreApplication.translate("Clip", "Clip") QCoreApplication.translate("ExtentFromLayer", "Polygon from layer extent") QCoreApplication.translate("RandomSelection", "Random selection") QCoreApplication.translate("RandomSelectionWithinSubsets", "Random selection within subsets") QCoreApplication.translate("SelectByLocation", "Select by location") QCoreApplication.translate("RandomExtract", "Random extract") QCoreApplication.translate("DeleteHoles", "Delete holes") QCoreApplication.translate("RandomExtractWithinSubsets", "Random extract within subsets") QCoreApplication.translate("ExtractByLocation", "Extract by location") QCoreApplication.translate("SpatialJoin", "Join attributes by location") QCoreApplication.translate("RegularPoints", "Regular points") QCoreApplication.translate("SymmetricalDifference", "Symmetrical difference") QCoreApplication.translate("VectorSplit", "Split vector layer") QCoreApplication.translate("VectorGrid", "Vector grid") QCoreApplication.translate("DeleteColumn", "Delete column") QCoreApplication.translate("DeleteDuplicateGeometries", "Delete duplicate geometries") QCoreApplication.translate("TextToFloat", "Text to float") QCoreApplication.translate("ExtractByAttribute", "Extract by attribute") QCoreApplication.translate("SelectByAttribute", "Select by attribute") QCoreApplication.translate("Grid", "Create graticule") QCoreApplication.translate("Gridify", "Snap points to grid") QCoreApplication.translate("HubDistance", "Distance to nearest hub") QCoreApplication.translate("HubLines", "Hub lines") QCoreApplication.translate("Merge", "Merge vector layers") QCoreApplication.translate("GeometryConvert", "Convert geometry type") QCoreApplication.translate("AddTableField", "Add field to attributes table") QCoreApplication.translate("FieldsCalculator", "Field calculator") QCoreApplication.translate("SaveSelectedFeatures", "Save selected features") QCoreApplication.translate("JoinAttributes", "Join attributes table") QCoreApplication.translate("AutoincrementalField", "Add autoincremental field") QCoreApplication.translate("Explode", "Explode lines") QCoreApplication.translate("FieldsPyculator", "Advanced Python field calculator") QCoreApplication.translate("EquivalentNumField", "Create equivalent numerical field") QCoreApplication.translate("PointsLayerFromTable", "Points layer from table") QCoreApplication.translate("StatisticsByCategories", "Statistics by categories") QCoreApplication.translate("ConcaveHull", "Concave hull") QCoreApplication.translate("Polygonize", "Polygonize") QCoreApplication.translate("RasterLayerStatistics", "Raster layer statistics") QCoreApplication.translate("PointsDisplacement", "Points displacement") QCoreApplication.translate("ZonalStatistics", "Zonal Statistics") QCoreApplication.translate("PointsFromPolygons", "Generate points (pixel centroids) inside polygons") QCoreApplication.translate("PointsFromLines", "Generate points (pixel centroids) along line") QCoreApplication.translate("RandomPointsExtent", "Random points in extent") QCoreApplication.translate("RandomPointsLayer", "Random points in layer bounds") QCoreApplication.translate("RandomPointsPolygonsFixed", "Random points inside polygons (fixed)") QCoreApplication.translate("RandomPointsPolygonsVariable", "Random points inside polygons (variable)") QCoreApplication.translate("RandomPointsAlongLines", "Random points along line") QCoreApplication.translate("PointsToPaths", "Points to path") QCoreApplication.translate("PostGISExecuteSQL", "PostGIS execute SQL") QCoreApplication.translate("ImportIntoPostGIS", "Import into PostGIS") QCoreApplication.translate("SetVectorStyle", "Set style for vector layer") QCoreApplication.translate("SetRasterStyle", "Set style for raster layer") QCoreApplication.translate("SelectByExpression", "Select by expression") QCoreApplication.translate("HypsometricCurves", "Hypsometric curves") QCoreApplication.translate("SplitLinesWithLines", "Split lines with lines") QCoreApplication.translate("CreateConstantRaster", "Create constant raster layer") QCoreApplication.translate("FieldsMapper", "Refactor fields") QCoreApplication.translate("SelectByAttributeSum", "Select by attribute sum") QCoreApplication.translate("Datasources2Vrt", "Build virtual vector") QCoreApplication.translate("CheckValidity", "Check validity") QCoreApplication.translate("VectorLayerHistogram", "Vector layer histogram") QCoreApplication.translate("RasterLayerHistogram", "Raster layer histogram") QCoreApplication.translate("VectorLayerScatterplot", "Vector layer scatterplot") QCoreApplication.translate("MeanAndStdDevPlot", "Mean and standard deviation plot") QCoreApplication.translate("BarPlot", "Bar plot") QCoreApplication.translate("PolarPlot", "Polar plot") QCoreApplication.translate("ScriptAlgorithm", "Number of unique values in classes") QCoreApplication.translate("ScriptAlgorithm", "Create points along lines") QCoreApplication.translate("ScriptAlgorithm", "Keep n biggest parts") QCoreApplication.translate("ScriptAlgorithm", "Frequency analysis") QCoreApplication.translate("ScriptAlgorithm", "Fill holes") """ModelerOnlyAlgorithmProvider""" QCoreApplication.translate("CalculatorModelerAlgorithm", "Calculator") QCoreApplication.translate("RasterLayerBoundsAlgorithm", "Raster layer bounds") QCoreApplication.translate("VectorLayerBoundsAlgorithm", "Vector layer bounds") """GdalOgrAlgorithmProvider""" QCoreApplication.translate("nearblack", "Near black") QCoreApplication.translate("information", "Information") QCoreApplication.translate("warp", "Reproject raster layer") QCoreApplication.translate("translate", "Export raster layer") QCoreApplication.translate("rgb2pct", "RGB to PCT") QCoreApplication.translate("pct2rgb", "PCT to RGB") QCoreApplication.translate("merge", "Merge raster layers") QCoreApplication.translate("buildvrt", "Build Virtual Raster") QCoreApplication.translate("polygonize", "Vectorize raster layer") QCoreApplication.translate("gdaladdo", "Build overviews (pyramids)") QCoreApplication.translate("ClipByExtent", "Clip raster by extent") QCoreApplication.translate("ClipByMask", "Clip raster by mask layer") QCoreApplication.translate("contour", "Contour lines") QCoreApplication.translate("rasterize", "Rasterize (vector to raster)") QCoreApplication.translate("proximity", "Proximity (raster distance)") QCoreApplication.translate("sieve", "Remove small pixel clumps (nearest neighbour)") QCoreApplication.translate("fillnodata", "Fill nodata") QCoreApplication.translate("ExtractProjection", "Extract projection") QCoreApplication.translate("gdal2xyz", "gdal2xyz") QCoreApplication.translate("hillshade", "Hillshade") QCoreApplication.translate("slope", "Slope") QCoreApplication.translate("aspect", "Aspect") QCoreApplication.translate("tri", "TRI (Terrain Ruggedness Index)") QCoreApplication.translate("tpi", "TPI (Topographic Position Index)") QCoreApplication.translate("roughness", "Roughness") QCoreApplication.translate("ColorRelief", "Color relief") QCoreApplication.translate("GridInvDist", "Interpolate (Inverse distance weighting)") QCoreApplication.translate("GridAverage", "Interpolate (Average)") QCoreApplication.translate("GridNearest", "Interpolate (Nearest Neighbor)") QCoreApplication.translate("GridDataMetrics", "Interpolate (Data metrics)") QCoreApplication.translate("gdaltindex", "Tile Index") QCoreApplication.translate("gdalcalc", "Raster calculator") QCoreApplication.translate("rasterize_over", "Rasterize (write over existing raster)") QCoreApplication.translate("OgrInfo", "Information") QCoreApplication.translate("Ogr2Ogr", "Convert format") QCoreApplication.translate("Ogr2OgrClip", "Clip vectors by polygon") QCoreApplication.translate("Ogr2OgrClipExtent", "Clip vectors by extent") QCoreApplication.translate("Ogr2OgrToPostGis", "Import Vector into PostGIS database (new connection)") QCoreApplication.translate("Ogr2OgrToPostGisList", "Import Vector into PostGIS database (available connections)") QCoreApplication.translate("Ogr2OgrPointsOnLines", "Create points along lines") QCoreApplication.translate("Ogr2OgrBuffer", "Buffer vectors") QCoreApplication.translate("Ogr2OgrDissolve", "Dissolve polygons") QCoreApplication.translate("Ogr2OgrOneSideBuffer", "Single sided buffers (and offset lines) for lines") QCoreApplication.translate("Ogr2OgrTableToPostGisList", "Import layer/table as geometryless table into PostgreSQL database") QCoreApplication.translate("OgrSql", "Execute SQL on vector layer") """LidarToolsAlgorithmProvider""" """OTBAlgorithmProvider""" QCoreApplication.translate("OTBAlgorithm", "ExtractROI (standard)") QCoreApplication.translate("OTBAlgorithm", "ComputeModulusAndPhase-one (OneEntry)") QCoreApplication.translate("OTBAlgorithm", "EdgeExtraction (gradient)") QCoreApplication.translate("OTBAlgorithm", "GrayScaleMorphologicalOperation (opening)") QCoreApplication.translate("OTBAlgorithm", "RigidTransformResample (translation)") QCoreApplication.translate("OTBAlgorithm", "Pansharpening (lmvm)") QCoreApplication.translate("OTBAlgorithm", "DimensionalityReduction (napca)") QCoreApplication.translate("OTBAlgorithm", "TrainImagesClassifier (knn)") QCoreApplication.translate("OTBAlgorithm", "ExtractROI (fit)") QCoreApplication.translate("OTBAlgorithm", "Smoothing (gaussian)") QCoreApplication.translate("OTBAlgorithm", "Band Math") QCoreApplication.translate("OTBAlgorithm", "Image to KMZ Export") QCoreApplication.translate("OTBAlgorithm", "Smoothing (anidif)") QCoreApplication.translate("OTBAlgorithm", "BinaryMorphologicalOperation (closing)") QCoreApplication.translate("OTBAlgorithm", "Segmentation (watershed)") QCoreApplication.translate("OTBAlgorithm", "EdgeExtraction (sobel)") QCoreApplication.translate("OTBAlgorithm", "Split Image") QCoreApplication.translate("OTBAlgorithm", "ComputeConfusionMatrix (vector)") QCoreApplication.translate("OTBAlgorithm", "Exact Large-Scale Mean-Shift segmentation, step 4") QCoreApplication.translate("OTBAlgorithm", "Segmentation (meanshift)") QCoreApplication.translate("OTBAlgorithm", "RigidTransformResample (id)") QCoreApplication.translate("OTBAlgorithm", "DimensionalityReduction (ica)") QCoreApplication.translate("OTBAlgorithm", "Segmentation (mprofiles)") QCoreApplication.translate("OTBAlgorithm", "Radiometric Indices") QCoreApplication.translate("OTBAlgorithm", "SOM Classification") QCoreApplication.translate("OTBAlgorithm", "TrainImagesClassifier (ann)") QCoreApplication.translate("OTBAlgorithm", "Segmentation (cc)") QCoreApplication.translate("OTBAlgorithm", "Connected Component Segmentation") QCoreApplication.translate("OTBAlgorithm", "Image Classification") QCoreApplication.translate("OTBAlgorithm", "Read image information") QCoreApplication.translate("OTBAlgorithm", "Pansharpening (rcs)") QCoreApplication.translate("OTBAlgorithm", "Smoothing (mean)") QCoreApplication.translate("OTBAlgorithm", "BinaryMorphologicalOperation (dilate)") QCoreApplication.translate("OTBAlgorithm", "OrthoRectification (fit-to-ortho)") QCoreApplication.translate("OTBAlgorithm", "Mean Shift filtering (can be used as Exact Large-Scale Mean-Shift segmentation, step 1)") QCoreApplication.translate("OTBAlgorithm", "Images Concatenation") QCoreApplication.translate("OTBAlgorithm", "Line segment detection") QCoreApplication.translate("OTBAlgorithm", "OrthoRectification (epsg)") QCoreApplication.translate("OTBAlgorithm", "Exact Large-Scale Mean-Shift segmentation, step 3 (optional)") QCoreApplication.translate("OTBAlgorithm", "FusionOfClassifications (dempstershafer)") QCoreApplication.translate("OTBAlgorithm", "Concatenate") QCoreApplication.translate("OTBAlgorithm", "Stereo Framework") QCoreApplication.translate("OTBAlgorithm", "Exact Large-Scale Mean-Shift segmentation, step 2") QCoreApplication.translate("OTBAlgorithm", "DimensionalityReduction (maf)") QCoreApplication.translate("OTBAlgorithm", "TrainImagesClassifier (gbt)") QCoreApplication.translate("OTBAlgorithm", "ColorMapping (image)") QCoreApplication.translate("OTBAlgorithm", "ComputeConfusionMatrix (raster)") QCoreApplication.translate("OTBAlgorithm", "BinaryMorphologicalOperation (erode)") QCoreApplication.translate("OTBAlgorithm", "Hoover compare segmentation") QCoreApplication.translate("OTBAlgorithm", "GrayScaleMorphologicalOperation (dilate)") QCoreApplication.translate("OTBAlgorithm", "Image Envelope") QCoreApplication.translate("OTBAlgorithm", "TrainImagesClassifier (rf)") QCoreApplication.translate("OTBAlgorithm", "Haralick Texture Extraction") QCoreApplication.translate("OTBAlgorithm", "TrainImagesClassifier (bayes)") QCoreApplication.translate("OTBAlgorithm", "Images comparaison") QCoreApplication.translate("OTBAlgorithm", "Optical calibration") QCoreApplication.translate("OTBAlgorithm", "Segmentation (edison)") QCoreApplication.translate("OTBAlgorithm", "FusionOfClassifications (majorityvoting)") QCoreApplication.translate("OTBAlgorithm", "ColorMapping (custom)") QCoreApplication.translate("OTBAlgorithm", "TrainImagesClassifier (boost)") QCoreApplication.translate("OTBAlgorithm", "Classification Map Regularization") QCoreApplication.translate("OTBAlgorithm", "TrainImagesClassifier (libsvm)") QCoreApplication.translate("OTBAlgorithm", "Pansharpening (bayes)") QCoreApplication.translate("OTBAlgorithm", "GrayScaleMorphologicalOperation (closing)") QCoreApplication.translate("OTBAlgorithm", "Rescale Image") QCoreApplication.translate("OTBAlgorithm", "ColorMapping (optimal)") QCoreApplication.translate("OTBAlgorithm", "DimensionalityReduction (pca)") QCoreApplication.translate("OTBAlgorithm", "Local Statistic Extraction") QCoreApplication.translate("OTBAlgorithm", "Compute Images second order statistics") QCoreApplication.translate("OTBAlgorithm", "TrainImagesClassifier (svm)") QCoreApplication.translate("OTBAlgorithm", "RigidTransformResample (rotation)") QCoreApplication.translate("OTBAlgorithm", "GrayScaleMorphologicalOperation (erode)") QCoreApplication.translate("OTBAlgorithm", "Unsupervised KMeans image classification") QCoreApplication.translate("OTBAlgorithm", "ComputeModulusAndPhase-two (TwoEntries)") QCoreApplication.translate("OTBAlgorithm", "TrainImagesClassifier (dt)") QCoreApplication.translate("OTBAlgorithm", "Superimpose sensor") QCoreApplication.translate("OTBAlgorithm", "Image Tile Fusion") QCoreApplication.translate("OTBAlgorithm", "OrthoRectification (utm)") QCoreApplication.translate("OTBAlgorithm", "OrthoRectification (lambert-WGS84)") QCoreApplication.translate("OTBAlgorithm", "EdgeExtraction (touzi)") QCoreApplication.translate("OTBAlgorithm", "Multivariate alteration detector") QCoreApplication.translate("OTBAlgorithm", "ColorMapping (continuous)") QCoreApplication.translate("OTBAlgorithm", "BinaryMorphologicalOperation (opening)") """RAlgorithmProvider""" QCoreApplication.translate("RAlgorithm", "Tobler") QCoreApplication.translate("RAlgorithm", "ACP contribution") QCoreApplication.translate("RAlgorithm", "scatterplot regressione") QCoreApplication.translate("RAlgorithm", "Kernel density estimation") QCoreApplication.translate("RAlgorithm", "Minimum convex polygon") QCoreApplication.translate("RAlgorithm", "CART") QCoreApplication.translate("RAlgorithm", "Summary statistics") QCoreApplication.translate("RAlgorithm", "frequency plot") QCoreApplication.translate("RAlgorithm", "ACP individus") QCoreApplication.translate("RAlgorithm", "Distance") QCoreApplication.translate("RAlgorithm", "Kriging") QCoreApplication.translate("RAlgorithm", "scatterplot log") QCoreApplication.translate("RAlgorithm", "Selection with Bayesian Information Criterion") QCoreApplication.translate("RAlgorithm", "Extract points from line") QCoreApplication.translate("RAlgorithm", "ACP var") QCoreApplication.translate("RAlgorithm", "qqplot") QCoreApplication.translate("RAlgorithm", "Krigeage selection") QCoreApplication.translate("RAlgorithm", "Frequency table") QCoreApplication.translate("RAlgorithm", "Advanced raster histogram") QCoreApplication.translate("RAlgorithm", "Density curve") QCoreApplication.translate("RAlgorithm", "CAH") QCoreApplication.translate("RAlgorithm", "reseau voisin") QCoreApplication.translate("RAlgorithm", "Relative distribution (raster covariate)") QCoreApplication.translate("RAlgorithm", "Histogram") QCoreApplication.translate("RAlgorithm", "Random sampling grid") QCoreApplication.translate("RAlgorithm", "selection Cp") QCoreApplication.translate("RAlgorithm", "Quadrat analysis") QCoreApplication.translate("RAlgorithm", "ggplot scatterplot") QCoreApplication.translate("RAlgorithm", "selection adjr2") QCoreApplication.translate("RAlgorithm", "Monte-Carlo spatial randomness") QCoreApplication.translate("RAlgorithm", "Douglas-Peucker with choice") QCoreApplication.translate("RAlgorithm", "Inverse Distance Weigthing with method selection") QCoreApplication.translate("RAlgorithm", "AFC") QCoreApplication.translate("RAlgorithm", "ponderation selection") QCoreApplication.translate("RAlgorithm", "alpha shape") QCoreApplication.translate("RAlgorithm", "a-star") QCoreApplication.translate("RAlgorithm", "Ripley - Rasson spatial domain") QCoreApplication.translate("RAlgorithm", "Selection Cp") QCoreApplication.translate("RAlgorithm", "Selection with criterion choice") QCoreApplication.translate("RAlgorithm", "Alpha shape") QCoreApplication.translate("RAlgorithm", "Close neighbor") QCoreApplication.translate("RAlgorithm", "Kolmogrov-Smirnov test") QCoreApplication.translate("RAlgorithm", "Droite") QCoreApplication.translate("RAlgorithm", "regression multiple") QCoreApplication.translate("RAlgorithm", "Douglas-Peucker") QCoreApplication.translate("RAlgorithm", "ponderation") QCoreApplication.translate("RAlgorithm", "regression") QCoreApplication.translate("RAlgorithm", "A-star") QCoreApplication.translate("RAlgorithm", "Kriging with model selection") QCoreApplication.translate("RAlgorithm", "Courbe densite") QCoreApplication.translate("RAlgorithm", "kernel") QCoreApplication.translate("RAlgorithm", "selection critere") QCoreApplication.translate("RAlgorithm", "ANOVA") QCoreApplication.translate("RAlgorithm", "F function") QCoreApplication.translate("RAlgorithm", "Krigeage") QCoreApplication.translate("RAlgorithm", "Multiple Regression") QCoreApplication.translate("RAlgorithm", "Simple Linear Regression") QCoreApplication.translate("RAlgorithm", "scatterplot types") QCoreApplication.translate("RAlgorithm", "Polygone") QCoreApplication.translate("RAlgorithm", "Autocor spatiale") QCoreApplication.translate("RAlgorithm", "G function") QCoreApplication.translate("RAlgorithm", "Selection with r2") QCoreApplication.translate("RAlgorithm", "Inverse Distance Weigthing") QCoreApplication.translate("RAlgorithm", "douglas choix-dept") QCoreApplication.translate("RAlgorithm", "Selection with r2 adjusted") QCoreApplication.translate("RAlgorithm", "Raster histogram") QCoreApplication.translate("RAlgorithm", "selection r2") QCoreApplication.translate("RAlgorithm", "AFDM") QCoreApplication.translate("RAlgorithm", "selection BIC") QCoreApplication.translate("RAlgorithm", "Relative distribution (distance covariate)") QCoreApplication.translate("RAlgorithm", "Regular sampling grid") QCoreApplication.translate("RAlgorithm", "ACP cercle") """SagaAlgorithmProvider""" """GrassAlgorithmProvider""" QCoreApplication.translate("GrassAlgorithm", "r.thin - Thins non-zero cells that denote linear features in a raster layer.") QCoreApplication.translate("GrassAlgorithm", "v.hull - Produces a convex hull for a given vector map.") QCoreApplication.translate("GrassAlgorithm", "r.mfilter.fp - Raster map matrix filter.") QCoreApplication.translate("GrassAlgorithm", "r.horizon - Horizon angle computation from a digital elevation model.") QCoreApplication.translate("GrassAlgorithm", "r.terraflow - Flow computation for massive grids (float version).") QCoreApplication.translate("GrassAlgorithm", "r.fill.dir - Filters and generates a depressionless elevation layer and a flow direction layer from a given elevation raster layer.") QCoreApplication.translate("GrassAlgorithm", "r.coin - Tabulates the mutual occurrence (coincidence) of categories for two raster map layers.") QCoreApplication.translate("GrassAlgorithm", "v.surf.rst.line - Spatial approximation and topographic analysis using regularized spline with tension.") QCoreApplication.translate("GrassAlgorithm", "v.clean.advanced - Toolset for cleaning topology of vector map (Advanced).") QCoreApplication.translate("GrassAlgorithm", "v.lidar.correction - Correction of the v.lidar.growing output. It is the last of the three algorithms for LIDAR filtering.") QCoreApplication.translate("GrassAlgorithm", "r.out.gridatb - Exports GRASS raster map to GRIDATB.FOR map file (TOPMODEL)") QCoreApplication.translate("GrassAlgorithm", "v.surf.rst - Spatial approximation and topographic analysis using regularized spline with tension.") QCoreApplication.translate("GrassAlgorithm", "r.terraflow.short - Flow computation for massive grids (integer version).") QCoreApplication.translate("GrassAlgorithm", "r.univar - Calculates univariate statistics from the non-null cells of a raster map.") QCoreApplication.translate("GrassAlgorithm", "r.slope - Generates raster maps of slope from a elevation raster map.") QCoreApplication.translate("GrassAlgorithm", "r.random.cells - Generates random cell values with spatial dependence.") QCoreApplication.translate("GrassAlgorithm", "v.transform - Performs an affine transformation on a vector layer.") QCoreApplication.translate("GrassAlgorithm", "v.dissolve - Dissolves boundaries between adjacent areas sharing a common category number or attribute.") QCoreApplication.translate("GrassAlgorithm", "v.lidar.growing - Building contour determination and Region Growing algorithm for determining the building inside") QCoreApplication.translate("GrassAlgorithm", "v.extract - Selects vector objects from a vector layer a new layer containing only the selected objects.") QCoreApplication.translate("GrassAlgorithm", "nviz - Visualization and animation tool for GRASS data.") QCoreApplication.translate("GrassAlgorithm", "r.kappa - Calculate error matrix and kappa parameter for accuracy assessment of classification result.") QCoreApplication.translate("GrassAlgorithm", "r.contour.step - Create vector contours from raster at specified steps") QCoreApplication.translate("GrassAlgorithm", "r.average - Finds the average of values in a cover raster layer within areas assigned the same category value in a user-specified base layer.") QCoreApplication.translate("GrassAlgorithm", "v.buffer.distance - Creates a buffer around features of given type.") QCoreApplication.translate("GrassAlgorithm", "r.rescale - Rescales the range of category values in a raster layer.") QCoreApplication.translate("GrassAlgorithm", "r.reclass - Creates a new map layer whose category values are based upon a reclassification of the categories in an existing raster map layer.") QCoreApplication.translate("GrassAlgorithm", "v.kernel - Generates a raster density map from vector point data using a moving kernel or optionally generates a vector density map on a vector network.") QCoreApplication.translate("GrassAlgorithm", "v.distance - Finds the nearest element in vector map 'to' for elements in vector map 'from'.") QCoreApplication.translate("GrassAlgorithm", "r.grow.distance - Generates a raster layer of distance to features in input layer.") QCoreApplication.translate("GrassAlgorithm", "v.surf.bspline.lambda - Bicubic or bilinear spline interpolation with Tykhonov regularization.") QCoreApplication.translate("GrassAlgorithm", "r.mode - Finds the mode of values in a cover layer within areas assigned the same category value in a user-specified base layer.") QCoreApplication.translate("GrassAlgorithm", "r.regression.line - Calculates linear regression from two raster layers : y = a + b*x.") QCoreApplication.translate("GrassAlgorithm", "r.topidx - Creates topographic index layer from elevation raster layer") QCoreApplication.translate("GrassAlgorithm", "r.plane - Creates raster plane layer given dip (inclination), aspect (azimuth) and one point.") QCoreApplication.translate("GrassAlgorithm", "r.mapcalculator - Calculate new raster map from a r.mapcalc expression.") QCoreApplication.translate("GrassAlgorithm", "v.info - Outputs basic information about a user-specified vector map.") QCoreApplication.translate("GrassAlgorithm", "v.segment - Creates points/segments from input vector lines and positions.") QCoreApplication.translate("GrassAlgorithm", "v.drape - Converts vector map to 3D by sampling of elevation raster map.") QCoreApplication.translate("GrassAlgorithm", "r.cost.full - Creates a raster layer of cumulative cost of moving across a raster layer whose cell values represent cost.") QCoreApplication.translate("GrassAlgorithm", "i.ifft - Inverse Fast Fourier Transform (IFFT) for image processing.") QCoreApplication.translate("GrassAlgorithm", "r.circle - Creates a raster map containing concentric rings around a given point.") QCoreApplication.translate("GrassAlgorithm", "r.water.outlet - Watershed basin creation program.") QCoreApplication.translate("GrassAlgorithm", "r.resample - GRASS raster map layer data resampling capability using nearest neighbors.") QCoreApplication.translate("GrassAlgorithm", "v.sample - Samples a raster layer at vector point locations.") QCoreApplication.translate("GrassAlgorithm", "r.quantile - Compute quantiles using two passes.") QCoreApplication.translate("GrassAlgorithm", "r.statistics - Calculates category or object oriented statistics.") QCoreApplication.translate("GrassAlgorithm", "v.reclass - Changes vector category values for an existing vector map according to results of SQL queries or a value in attribute table column.") QCoreApplication.translate("GrassAlgorithm", "r.out.ppm - Converts a raster layer to a PPM image file at the pixel resolution of the currently defined region.") QCoreApplication.translate("GrassAlgorithm", "r.resamp.stats - Resamples raster layers to a coarser grid using aggregation.") QCoreApplication.translate("GrassAlgorithm", "i.zc - Zero-crossing \"edge detection\" raster function for image processing.") QCoreApplication.translate("GrassAlgorithm", "r.lake.coords - Fills lake at given point to given level.") QCoreApplication.translate("GrassAlgorithm", "r.surf.contour - Surface generation program from rasterized contours.") QCoreApplication.translate("GrassAlgorithm", "v.kcv - Randomly partition points into test/train sets.") QCoreApplication.translate("GrassAlgorithm", "v.out.pov - Converts to POV-Ray format, GRASS x,y,z -> POV-Ray x,z,y") QCoreApplication.translate("GrassAlgorithm", "v.report - Reports geometry statistics for vectors.") QCoreApplication.translate("GrassAlgorithm", "r.out.xyz - Export a raster map to a text file as x,y,z values based on cell centers") QCoreApplication.translate("GrassAlgorithm", "r.carve - Takes vector stream data, transforms it to raster and subtracts depth from the output DEM.") QCoreApplication.translate("GrassAlgorithm", "v.to.rast.value - Converts (rasterize) a vector layer into a raster layer.") QCoreApplication.translate("GrassAlgorithm", "v.outlier - Removes outliers from vector point data.") QCoreApplication.translate("GrassAlgorithm", "r.surf.random - Produces a raster layer of uniform random deviates whose range can be expressed by the user.") QCoreApplication.translate("GrassAlgorithm", "r.resamp.rst - Reinterpolates using regularized spline with tension and smoothing.") QCoreApplication.translate("GrassAlgorithm", "r.bitpattern - Compares bit patterns with a raster map.") QCoreApplication.translate("GrassAlgorithm", "r.covar - Outputs a covariance/correlation matrix for user-specified raster layer(s).") QCoreApplication.translate("GrassAlgorithm", "r.shaded.relief - Creates shaded relief from an elevation layer (DEM).") QCoreApplication.translate("GrassAlgorithm", "v.generalize - Vector based generalization.") QCoreApplication.translate("GrassAlgorithm", "v.distance.toattr - Finds the nearest element in vector map 'to' for elements in vector map 'from'.") QCoreApplication.translate("GrassAlgorithm", "r.info - Output basic information about a raster layer.") QCoreApplication.translate("GrassAlgorithm", "r.his - Generates red, green and blue raster layers combining hue, intensity and saturation (HIS) values from user-specified input raster layers.") QCoreApplication.translate("GrassAlgorithm", "r.aspect - Generates raster maps of aspect from a elevation raster map.") QCoreApplication.translate("GrassAlgorithm", "v.split.length - Split lines to shorter segments by length.") QCoreApplication.translate("GrassAlgorithm", "r.sim.sediment - Sediment transport and erosion/deposition simulation using path sampling method (SIMWE).") QCoreApplication.translate("GrassAlgorithm", "r.patch - Creates a composite raster layer by using one (or more) layer(s) to fill in areas of \"no data\" in another map layer.") QCoreApplication.translate("GrassAlgorithm", "r.reclass.area.greater - Reclassifies a raster layer, selecting areas larger than a user specified size") QCoreApplication.translate("GrassAlgorithm", "r.horizon.height - Horizon angle computation from a digital elevation model.") QCoreApplication.translate("GrassAlgorithm", "r.sun - Solar irradiance and irradiation model.") QCoreApplication.translate("GrassAlgorithm", "v.clean - Toolset for cleaning topology of vector map.") QCoreApplication.translate("GrassAlgorithm", "r.recode - Recodes categorical raster maps.") QCoreApplication.translate("GrassAlgorithm", "v.parallel - Creates parallel line to input vector lines.") QCoreApplication.translate("GrassAlgorithm", "v.random - Randomly generate a 2D/3D vector points map.") QCoreApplication.translate("GrassAlgorithm", "r.describe - Prints terse list of category values found in a raster layer.") QCoreApplication.translate("GrassAlgorithm", "r.surf.gauss - Creates a raster layer of Gaussian deviates.") QCoreApplication.translate("GrassAlgorithm", "v.normal - Tests for normality for points.") QCoreApplication.translate("GrassAlgorithm", "i.his.rgb - Transforms raster maps from HIS (Hue-Intensity-Saturation) color space to RGB (Red-Green-Blue) color space.") QCoreApplication.translate("GrassAlgorithm", "r.report - Reports statistics for raster layers.") QCoreApplication.translate("GrassAlgorithm", "r.series - Makes each output cell value a function of the values assigned to the corresponding cells in the input raster layers.") QCoreApplication.translate("GrassAlgorithm", "m.cogo - A simple utility for converting bearing and distance measurements to coordinates and vice versa. It assumes a cartesian coordinate system") QCoreApplication.translate("GrassAlgorithm", "i.atcorr - Performs atmospheric correction using the 6S algorithm.") QCoreApplication.translate("GrassAlgorithm", "r.mfilter - Performs raster map matrix filter.") QCoreApplication.translate("GrassAlgorithm", "r.lake.layer - Fills lake at given point to given level.") QCoreApplication.translate("GrassAlgorithm", "r.drain.coordinate - Traces a flow through an elevation model on a raster map.") QCoreApplication.translate("GrassAlgorithm", "r.to.vect - Converts a raster into a vector layer.") QCoreApplication.translate("GrassAlgorithm", "v.voronoi - Creates a Voronoi diagram from an input vector layer containing points.") QCoreApplication.translate("GrassAlgorithm", "v.transform.pointsfile - Performs an affine transformation on a vector layer, using a support point file.") QCoreApplication.translate("GrassAlgorithm", "v.neighbors - Makes each cell value a function of attribute values and stores in an output raster map.") QCoreApplication.translate("GrassAlgorithm", "r.basins.fill - Generates watershed subbasins raster map.") QCoreApplication.translate("GrassAlgorithm", "r.flow - Construction of slope curves (flowlines), flowpath lengths, and flowline densities (upslope areas) from a raster digital elevation model (DEM).") QCoreApplication.translate("GrassAlgorithm", "r.cross - Creates a cross product of the category values from multiple raster map layers.") QCoreApplication.translate("GrassAlgorithm", "v.mkgrid - Creates a GRASS vector layer of a user-defined grid.") QCoreApplication.translate("GrassAlgorithm", "v.out.dxf - Exports GRASS vector map layers to DXF file format.") QCoreApplication.translate("GrassAlgorithm", "r.neighbors - Makes each cell category value a function of the category values assigned to the cells around it") QCoreApplication.translate("GrassAlgorithm", "r.median - Finds the median of values in a cover layer within areas assigned the same category value in a user-specified base layer.") QCoreApplication.translate("GrassAlgorithm", "r.cost.full.raster - Creates a raster layer of cumulative cost of moving across a raster layer whose cell values represent cost.") QCoreApplication.translate("GrassAlgorithm", "r.watershed - Watershed basin analysis program.") QCoreApplication.translate("GrassAlgorithm", "r.gwflow - Numerical calculation program for transient, confined and unconfined groundwater flow in two dimensions.") QCoreApplication.translate("GrassAlgorithm", "r.buffer - Creates a raster map layer showing buffer zones surrounding cells that contain non-NULL category values.") QCoreApplication.translate("GrassAlgorithm", "r.clump - Recategorizes data in a raster map by grouping cells that form physically discrete areas into unique categories.") QCoreApplication.translate("GrassAlgorithm", "v.buffer.column - Creates a buffer around features of given type.") QCoreApplication.translate("GrassAlgorithm", "i.fft - Fast Fourier Transform (FFT) for image processing.") QCoreApplication.translate("GrassAlgorithm", "r.spreadpath - Recursively traces the least cost path backwards to cells from which the cumulative cost was determined.") QCoreApplication.translate("GrassAlgorithm", "r.profile - Outputs the raster layer values lying on user-defined line(s).") QCoreApplication.translate("GrassAlgorithm", "v.to.rast.attribute - Converts (rasterize) a vector layer into a raster layer.") QCoreApplication.translate("GrassAlgorithm", "r.param.scale - Extracts terrain parameters from a DEM.") QCoreApplication.translate("GrassAlgorithm", "v.db.select - Prints vector map attributes") QCoreApplication.translate("GrassAlgorithm", "r.sunmask - Calculates cast shadow areas from sun position and elevation raster map.") QCoreApplication.translate("GrassAlgorithm", "v.surf.rst.cvdev.line - Spatial approximation and topographic analysis using regularized spline with tension.") QCoreApplication.translate("GrassAlgorithm", "r.random.raster - Create random raster") QCoreApplication.translate("GrassAlgorithm", "v.surf.bspline - Bicubic or bilinear spline interpolation with Tykhonov regularization.") QCoreApplication.translate("GrassAlgorithm", "r.rescale.eq - Rescales histogram equalized the range of category values in a raster layer.") QCoreApplication.translate("GrassAlgorithm", "r.cost - Creates a raster layer of cumulative cost of moving across a raster layer whose cell values represent cost.") QCoreApplication.translate("GrassAlgorithm", "v.surf.idw - Surface interpolation from vector point data by Inverse Distance Squared Weighting.") QCoreApplication.translate("GrassAlgorithm", "r.bilinear - Bilinear interpolation utility for raster map layers.") QCoreApplication.translate("GrassAlgorithm", "r.resamp.interp - Resamples a raster map layer to a finer grid using interpolation.") QCoreApplication.translate("GrassAlgorithm", "r.out.vrml - Export a raster layer to the Virtual Reality Modeling Language (VRML)") QCoreApplication.translate("GrassAlgorithm", "r.volume - Calculates the volume of data \"clumps\".") QCoreApplication.translate("GrassAlgorithm", "r.los - Line-of-sight raster analysis program.") QCoreApplication.translate("GrassAlgorithm", "v.patch - Create a new vector map layer by combining other vector map layers.") QCoreApplication.translate("GrassAlgorithm", "r.random - Creates a raster layer and vector point map containing randomly located points.") QCoreApplication.translate("GrassAlgorithm", "i.rgb.his - Transforms raster maps from RGB (Red-Green-Blue) color space to HIS (Hue-Intensity-Saturation) color space.") QCoreApplication.translate("GrassAlgorithm", "r.composite - Combines red, green and blue raster maps into a single composite raster map.") QCoreApplication.translate("GrassAlgorithm", "v.delaunay - Creates a Delaunay triangulation from an input vector map containing points or centroids.") QCoreApplication.translate("GrassAlgorithm", "r.contour.level - Create vector contour from raster at specified levels") QCoreApplication.translate("GrassAlgorithm", "v.univar - Calculates univariate statistics for attribute. Variance and standard deviation is calculated only for points if specified.") QCoreApplication.translate("GrassAlgorithm", "r.walk - Outputs a raster layer showing the anisotropic cumulative cost of moving based on friction cost.") QCoreApplication.translate("GrassAlgorithm", "v.class - Classifies attribute data, e.g. for thematic mapping.") QCoreApplication.translate("GrassAlgorithm", "r.spread - Simulates elliptically anisotropic spread on a graphics window and generates a raster map of the cumulative time of spread, given raster maps containing the rates of spread (ROS), the ROS directions and the spread origins.") QCoreApplication.translate("GrassAlgorithm", "v.overlay - Overlays two vector maps.") QCoreApplication.translate("GrassAlgorithm", "r.surf.idw2 - Surface generation.") QCoreApplication.translate("GrassAlgorithm", "v.in.dxf - Converts files in DXF format to GRASS vector map format.") QCoreApplication.translate("GrassAlgorithm", "r.drain - Traces a flow through an elevation model on a raster map.") QCoreApplication.translate("GrassAlgorithm", "r.sum - Sums up the raster cell values.") QCoreApplication.translate("GrassAlgorithm", "r.slope.aspect - Generates raster layers of slope, aspect, curvatures and partial derivatives from a elevation raster layer.") QCoreApplication.translate("GrassAlgorithm", "v.surf.bspline.sparse - Bicubic or bilinear spline interpolation with Tykhonov regularization.") QCoreApplication.translate("GrassAlgorithm", "r.grow - Generates a raster layer with contiguous areas grown by one cell.") QCoreApplication.translate("GrassAlgorithm", "v.qcount - Indices for quadrant counts of sites lists.") QCoreApplication.translate("GrassAlgorithm", "r.quant - Produces the quantization file for a floating-point map.") QCoreApplication.translate("GrassAlgorithm", "r.fillnulls - Fills no-data areas in a raster layer using v.surf.rst splines interpolation or v.surf.bspline interpolation") QCoreApplication.translate("GrassAlgorithm", "v.perturb - Random location perturbations of GRASS vector points") QCoreApplication.translate("GrassAlgorithm", "r.stats - Generates area statistics for raster layers.") QCoreApplication.translate("GrassAlgorithm", "r.sim.water - Overland flow hydrologic simulation using path sampling method (SIMWE).") QCoreApplication.translate("GrassAlgorithm", "v.to.points - Create points along input lines") QCoreApplication.translate("GrassAlgorithm", "r.reclass.area.lesser - Reclassifies a raster layer, selecting areas lower than a user specified size") QCoreApplication.translate("GrassAlgorithm", "v.split.vert - Split lines to shorter segments by max number of vertices.") QCoreApplication.translate("GrassAlgorithm", "v.surf.rst.cvdev - Spatial approximation and topographic analysis using regularized spline with tension.") QCoreApplication.translate("GrassAlgorithm", "r.ros - Generates three, or four raster map layers showing 1) the base (perpendicular) rate of spread (ROS), 2) the maximum (forward) ROS, 3) the direction of the maximum ROS, and optionally 4) the maximum potential spotting distance.") QCoreApplication.translate("GrassAlgorithm", "v.lidar.edgedetection - Detects the object's edges from a LIDAR data set.") QCoreApplication.translate("GrassAlgorithm", "r.surf.idw - Surface interpolation utility for raster layers.") QCoreApplication.translate("GrassAlgorithm", "v.select - Selects features from vector map (A) by features from other vector map (B).") QCoreApplication.translate("GrassAlgorithm", "r.surf.area - Surface area estimation for rasters.") QCoreApplication.translate("GrassAlgorithm", "v.in.wfs - Import GetFeature from WFS") QCoreApplication.translate("nviz", "nviz") """Grass7AlgorithmProvider""" """ScriptAlgorithmProvider""" QCoreApplication.translate("ScriptAlgorithm", "Number of unique values in classes") QCoreApplication.translate("ScriptAlgorithm", "Hex grid from layer bounds") QCoreApplication.translate("ScriptAlgorithm", "Batch string replace via regex dictionary") QCoreApplication.translate("ScriptAlgorithm", "test help") QCoreApplication.translate("ScriptAlgorithm", "Points on touching lines") QCoreApplication.translate("ScriptAlgorithm", "Create rasters from canvas for each vector feature extent") QCoreApplication.translate("ScriptAlgorithm", "Points on crossing lines") QCoreApplication.translate("ScriptAlgorithm", "Points from vector") QCoreApplication.translate("ScriptAlgorithm", "Square grid from layer extent") QCoreApplication.translate("ScriptAlgorithm", "Create vector layer from SQL Query") QCoreApplication.translate("ScriptAlgorithm", "Read file content into string") QCoreApplication.translate("ScriptAlgorithm", "Assing predominant category") QCoreApplication.translate("ScriptAlgorithm", "Extract raster values to shapefile") QCoreApplication.translate("ScriptAlgorithm", "Buffer Contour") QCoreApplication.translate("ScriptAlgorithm", "Save features filtered by expression") QCoreApplication.translate("ScriptAlgorithm", "Extract raster values to CSV") QCoreApplication.translate("ScriptAlgorithm", "Save selected features") QCoreApplication.translate("ScriptAlgorithm", "Create vector layer from postgis table") QCoreApplication.translate("ScriptAlgorithm", "Keep n biggest parts") QCoreApplication.translate("ScriptAlgorithm", "Frequency analysis") QCoreApplication.translate("ScriptAlgorithm", "Cut by field") QCoreApplication.translate("ScriptAlgorithm", "Define 1 vector layer properties") QCoreApplication.translate("ScriptAlgorithm", "classification by decision tree") QCoreApplication.translate("ScriptAlgorithm", "Split vector layer by attribute") QCoreApplication.translate("ScriptAlgorithm", "CSV RGB or HEX to categorized style") QCoreApplication.translate("ScriptAlgorithm", "Contour") QCoreApplication.translate("ScriptAlgorithm", "Chainage") QCoreApplication.translate("ScriptAlgorithm", "Remove parts") QCoreApplication.translate("ScriptAlgorithm", "Summarize") QCoreApplication.translate("ScriptAlgorithm", "CSV R-G-B to categorized style") QCoreApplication.translate("ScriptAlgorithm", "pygraticule") QCoreApplication.translate("ScriptAlgorithm", "Define 1 raster layer properties") QCoreApplication.translate("ScriptAlgorithm", "Create tiling from vector layer") QCoreApplication.translate("ScriptAlgorithm", "Define multiple vector layers properties") QCoreApplication.translate("ScriptAlgorithm", "Set multiple raster layers properties") QCoreApplication.translate("ScriptAlgorithm", "Fill holes") QCoreApplication.translate("ScriptAlgorithm", "Unique values count") """TauDEMAlgorithmProvider""" """ModelerAlgorithmProvider""" """Groups and subgroups""" QCoreApplication.translate("AlgorithmClassification", "User scripts") QCoreApplication.translate("AlgorithmClassification", "Statistics") QCoreApplication.translate("AlgorithmClassification", "Vector overlay tools") QCoreApplication.translate("AlgorithmClassification", "Overlay") QCoreApplication.translate("AlgorithmClassification", "[GDAL] Miscellaneous") QCoreApplication.translate("AlgorithmClassification", "Feature Extraction") QCoreApplication.translate("AlgorithmClassification", "Raster -> Vector") QCoreApplication.translate("AlgorithmClassification", "Miscellaneous") QCoreApplication.translate("AlgorithmClassification", "Vector geometry tools") QCoreApplication.translate("AlgorithmClassification", "Vector selection tools") QCoreApplication.translate("AlgorithmClassification", "Vector Data Manipulation") QCoreApplication.translate("AlgorithmClassification", "[OGR] Conversion") QCoreApplication.translate("AlgorithmClassification", "Imagery (i.*)") QCoreApplication.translate("AlgorithmClassification", "Point pattern analysis") QCoreApplication.translate("AlgorithmClassification", "My scripts") QCoreApplication.translate("AlgorithmClassification", "Modeler-only tools") QCoreApplication.translate("AlgorithmClassification", "Home Range Analysis") QCoreApplication.translate("AlgorithmClassification", "Vector (v.*)") QCoreApplication.translate("AlgorithmClassification", "Miscellaneous (m.*)") QCoreApplication.translate("AlgorithmClassification", "Visualization(NVIZ)") QCoreApplication.translate("AlgorithmClassification", "Segmentation") QCoreApplication.translate("AlgorithmClassification", "Vector table tools") QCoreApplication.translate("AlgorithmClassification", "Vector creation tools") QCoreApplication.translate("AlgorithmClassification", "Raster - vector") QCoreApplication.translate("AlgorithmClassification", "Viewsheds\Lighting") QCoreApplication.translate("AlgorithmClassification", "Calibration") QCoreApplication.translate("AlgorithmClassification", "Classification by decision tree") QCoreApplication.translate("AlgorithmClassification", "Vector -> Raster") QCoreApplication.translate("AlgorithmClassification", "Raster general tools") QCoreApplication.translate("AlgorithmClassification", "[OGR] Miscellaneous") QCoreApplication.translate("AlgorithmClassification", "[OGR] Geoprocessing") QCoreApplication.translate("AlgorithmClassification", "Image Manipulation") QCoreApplication.translate("AlgorithmClassification", "Raster tools") QCoreApplication.translate("AlgorithmClassification", "Graphics") QCoreApplication.translate("AlgorithmClassification", "Plots") QCoreApplication.translate("AlgorithmClassification", "Image Filtering") QCoreApplication.translate("AlgorithmClassification", "[GDAL] Analysis") QCoreApplication.translate("AlgorithmClassification", "Raster") QCoreApplication.translate("AlgorithmClassification", "Stereo") QCoreApplication.translate("AlgorithmClassification", "Vector processing") QCoreApplication.translate("AlgorithmClassification", "Vector analysis tools") QCoreApplication.translate("AlgorithmClassification", "[GDAL] Projections") QCoreApplication.translate("AlgorithmClassification", "Database") QCoreApplication.translate("AlgorithmClassification", "Geometry") QCoreApplication.translate("AlgorithmClassification", "Terrain analysis and geomorphometry") QCoreApplication.translate("AlgorithmClassification", "Style") QCoreApplication.translate("AlgorithmClassification", "Domain specific") QCoreApplication.translate("AlgorithmClassification", "Lines") QCoreApplication.translate("AlgorithmClassification", "Analysis") QCoreApplication.translate("AlgorithmClassification", "[GDAL] Extraction") QCoreApplication.translate("AlgorithmClassification", "Vector") QCoreApplication.translate("AlgorithmClassification", "Table") QCoreApplication.translate("AlgorithmClassification", "Basic statistics") QCoreApplication.translate("AlgorithmClassification", "Vector general tools") QCoreApplication.translate("AlgorithmClassification", "Modeler") QCoreApplication.translate("AlgorithmClassification", "Vector_layer") QCoreApplication.translate("AlgorithmClassification", "Selection") QCoreApplication.translate("AlgorithmClassification", "Raster (r.*)") QCoreApplication.translate("AlgorithmClassification", "[GDAL] Conversion") QCoreApplication.translate("AlgorithmClassification", "Modeler tools") QCoreApplication.translate("AlgorithmClassification", "Creation") QCoreApplication.translate("AlgorithmClassification", "Table tools") QCoreApplication.translate("AlgorithmClassification", "Edition") QCoreApplication.translate("AlgorithmClassification", "Points") QCoreApplication.translate("AlgorithmClassification", "Utils") QCoreApplication.translate("AlgorithmClassification", "Raster processing") QCoreApplication.translate("AlgorithmClassification", "Learning") QCoreApplication.translate("AlgorithmClassification", "Images") QCoreApplication.translate("AlgorithmClassification", "Geometry operations") QCoreApplication.translate("AlgorithmClassification", "General tools") QCoreApplication.translate("AlgorithmClassification", "Polygons")
gpl-2.0
1,586,493,956,906,256,000
85.993485
286
0.779927
false
IE-NITK/NITK-Student-Council-Website
src/smriti/views.py
1
8079
from django.shortcuts import render, get_object_or_404, redirect from django.views import generic from django.contrib import auth, messages from django.http import HttpResponseRedirect from django.contrib.auth.decorators import login_required from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger from django.core.urlresolvers import reverse from django.conf import settings from profiles.models import Profile from profiles.forms import ProfileForm from .models import * from .mails import sendgrid_mail from braces.views import LoginRequiredMixin import django_rq # Create your views here. def sort_by_rollno(queryset): return sorted(queryset, key=lambda x:int(x.rollno[4:])) def indexPage(request): if request.method == 'POST': username = request.POST.get('email','') password = request.POST.get('password','') user = auth.authenticate(email=username,password=password) if user is not None: auth.login(request, user) return HttpResponseRedirect('/smriti/home/') else: return render(request,'smriti/index.html', {"errors":"Invalid Login Credentials. Please try again with correct credentials."}) if request.method == 'GET': if request.user.is_authenticated(): return HttpResponseRedirect('/smriti/home/') return render(request,'smriti/index.html') @login_required def homePage(request): testimonials = Testimonial.objects.filter(testimonial_to=request.user) profile = Profile.objects.get(user=request.user) return render(request,"smriti/home.html",{'testimonials':testimonials,'profile':profile}) def browsePage(request): final_years = Profile.objects.filter(rollno__contains="13") #cornercase = Profile.objects.filter(rollno="11IT26") ch = sort_by_rollno(final_years.filter(branch='CH')) co = sort_by_rollno(final_years.filter(branch='CO')) cv = sort_by_rollno(final_years.filter(branch='CV')) ec = sort_by_rollno(final_years.filter(branch='EC')) ee = sort_by_rollno(final_years.filter(branch='EE')) it = sort_by_rollno(final_years.filter(branch='IT')) me = sort_by_rollno(final_years.filter(branch='ME')) mn = sort_by_rollno(final_years.filter(branch='MN')) mt = sort_by_rollno(final_years.filter(branch='MT')) return render(request,"smriti/browse.html",{'ch':ch, 'co':co, 'cv':cv, 'ec':ec, 'ee':ee, 'it':it, 'me':me, 'mn':mn, 'mt':mt, }) def searchPage(request): if request.method == 'POST': search_param = request.POST.get('search_param','') final_years = Profile.objects.filter(rollno__contains="13") name_result = final_years.filter(user__name__icontains=search_param) roll_result = final_years.filter(rollno__icontains=search_param) results = name_result | roll_result return render(request,"smriti/search.html",{'results':results, 'key':search_param}) if request.method == 'GET': return render(request,'smriti/search.html') def testimonial(request, id): testimonial = get_object_or_404(Testimonial, id=id) return render(request, "smriti/testimonial.html", {'testimonial':testimonial}) class WritePage(generic.TemplateView): template_name = "smriti/write.html" def profilePage(request, rollno): profile = get_object_or_404(Profile, rollno__iexact=rollno) testimonials = Testimonial.objects.filter(testimonial_to=profile.user) return render(request,"smriti/home.html",{'testimonials':testimonials,'profile':profile}) def send_new_testimonial_mail(to, writer, id): subject = "New testimonial on Smriti from "+ writer.name link = "http://students.nitk.ac.in"+reverse("smriti:testimonial", args=[id]) content = "Hello "+ to.name + """, <br><br> You have a new testimonial on Smriti from """+ writer.name +""". <br><br>You can view it at """+ link + """.<br><br> Regards,<br> Smriti 2017 Team, IE-NITK <br><br> <%asm_group_unsubscribe_url%> """ asm_group = settings.SENDGRID_TESTIMONIAL_ASM_ID sendgrid_mail(to.email, subject, content, asm_group) @login_required def writeTestimonial(request, rollno): testimonial_to = get_object_or_404(Profile, rollno__iexact=rollno) if request.method == "GET": testimonial = Testimonial.objects.filter(testimonial_to=testimonial_to.user, created_by=request.user) if testimonial.exists(): current_content = testimonial[0] return render(request,"smriti/write.html", {'to':testimonial_to, "testimonial" : current_content}) else: return render(request,"smriti/write.html", {'to':testimonial_to}) elif request.method == "POST": content = request.POST.get('content','') if content.strip() == "": return render(request, "smriti/generic.html", {"content":"Sorry! Blank testimonials are not allowed."}) test, created = Testimonial.objects.get_or_create( testimonial_to=testimonial_to.user, created_by = request.user, ) test.description = content.strip() test.save() if created: #django_rq.enqueue(send_new_testimonial_mail, testimonial_to.user, request.user, test.id) return redirect("/smriti/profiles/"+testimonial_to.rollno) else: return render(request, "smriti/generic.html", {"content":"Oops! Something went wrong. Please try again!"}) class EditProfile(LoginRequiredMixin, generic.TemplateView): template_name = "smriti/edit_profile.html" http_method_names = ['get', 'post'] def get(self, request, *args, **kwargs): user = self.request.user if "profile_form" not in kwargs: kwargs["profile_form"] = ProfileForm(instance=user.profile) return super(EditProfile, self).get(request, *args, **kwargs) def post(self, request, *args, **kwargs): user = self.request.user profile_form = ProfileForm(request.POST, request.FILES, instance=user.profile) if not (profile_form.is_valid()): messages.error(request, "There was a problem with the form. " "Please check the details.") profile_form = ProfileForm(instance=user.profile) return super(EditProfile, self).get(request, profile_form=profile_form) profile = profile_form.save(commit=False) profile.user = user profile.save() messages.success(request, "Profile details saved!") return redirect("smriti:home") def feed(request): testimonial_list = Testimonial.objects.all() paginator = Paginator(testimonial_list, 50) page = request.GET.get('page') try: testimonials = paginator.page(page) except PageNotAnInteger: # If page is not an integer, deliver first page. testimonials = paginator.page(1) except EmptyPage: # If page is out of range (e.g. 9999), deliver last page of results. testimonials = paginator.page(paginator.num_pages) return render(request, "smriti/feed.html", {"testimonials": testimonials}) @login_required def deleteTestimonial(request, id): testimonial = get_object_or_404(Testimonial, id=id) if not (request.user == testimonial.testimonial_to or request.user == testimonial.created_by): return render(request, "smriti/generic.html", {"content":"You dont have permissions."}) testimonial.delete() return render(request, "smriti/generic.html", {"content":"Your testimonial has been deleted."})
mit
8,959,160,691,661,415,000
44.644068
138
0.630276
false
hcarvalhoalves/hy
hy/importer.py
1
6134
# Copyright (c) 2013 Paul Tagliamonte <paultag@debian.org> # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. from py_compile import wr_long, MAGIC from hy.compiler import hy_compile from hy.models import HyObject from hy.lex import tokenize from io import open import marshal import imp import sys import ast import os import __future__ from hy._compat import builtins, long_type def ast_compile(ast, filename, mode): """Compile AST. Like Python's compile, but with some special flags.""" flags = (__future__.CO_FUTURE_DIVISION | __future__.CO_FUTURE_PRINT_FUNCTION) return compile(ast, filename, mode, flags) def import_buffer_to_hst(buf): """Import content from buf and return an Hy AST.""" return tokenize(buf + "\n") def import_file_to_hst(fpath): """Import content from fpath and return an Hy AST.""" with open(fpath, 'r', encoding='utf-8') as f: return import_buffer_to_hst(f.read()) def import_buffer_to_ast(buf, module_name): """ Import content from buf and return a Python AST.""" return hy_compile(import_buffer_to_hst(buf), module_name) def import_file_to_ast(fpath, module_name): """Import content from fpath and return a Python AST.""" return hy_compile(import_file_to_hst(fpath), module_name) def import_file_to_module(module_name, fpath): """Import content from fpath and puts it into a Python module. Returns the module.""" try: _ast = import_file_to_ast(fpath, module_name) mod = imp.new_module(module_name) mod.__file__ = fpath eval(ast_compile(_ast, fpath, "exec"), mod.__dict__) except Exception: sys.modules.pop(module_name, None) raise return mod def import_buffer_to_module(module_name, buf): _ast = import_buffer_to_ast(buf, module_name) mod = imp.new_module(module_name) eval(ast_compile(_ast, "", "exec"), mod.__dict__) return mod def hy_eval(hytree, namespace, module_name): foo = HyObject() foo.start_line = 0 foo.end_line = 0 foo.start_column = 0 foo.end_column = 0 hytree.replace(foo) _ast, expr = hy_compile(hytree, module_name, get_expr=True) # Spoof the positions in the generated ast... for node in ast.walk(_ast): node.lineno = 1 node.col_offset = 1 for node in ast.walk(expr): node.lineno = 1 node.col_offset = 1 # Two-step eval: eval() the body of the exec call eval(ast_compile(_ast, "<eval_body>", "exec"), namespace) # Then eval the expression context and return that return eval(ast_compile(expr, "<eval>", "eval"), namespace) def write_hy_as_pyc(fname): with open(fname, 'U') as f: try: st = os.fstat(f.fileno()) except AttributeError: st = os.stat(fname) timestamp = long_type(st.st_mtime) _ast = import_file_to_ast(fname, os.path.basename(os.path.splitext(fname)[0])) code = ast_compile(_ast, fname, "exec") cfile = "%s.pyc" % fname[:-len(".hy")] open_ = builtins.open with open_(cfile, 'wb') as fc: if sys.version_info[0] >= 3: fc.write(b'\0\0\0\0') else: fc.write('\0\0\0\0') wr_long(fc, timestamp) if (sys.version_info[0] >= 3 and sys.version_info[1] >= 3): wr_long(fc, st.st_size) marshal.dump(code, fc) fc.flush() fc.seek(0, 0) fc.write(MAGIC) class MetaLoader(object): def __init__(self, path): self.path = path def is_package(self, fullname): dirpath = "/".join(fullname.split(".")) for pth in sys.path: pth = os.path.abspath(pth) composed_path = "%s/%s/__init__.hy" % (pth, dirpath) if os.path.exists(composed_path): return True return False def load_module(self, fullname): if fullname in sys.modules: return sys.modules[fullname] if not self.path: return sys.modules[fullname] = None mod = import_file_to_module(fullname, self.path) ispkg = self.is_package(fullname) mod.__file__ = self.path mod.__loader__ = self mod.__name__ = fullname if ispkg: mod.__path__ = [] mod.__package__ = fullname else: mod.__package__ = fullname.rpartition('.')[0] sys.modules[fullname] = mod return mod class MetaImporter(object): def find_on_path(self, fullname): fls = ["%s/__init__.hy", "%s.hy"] dirpath = "/".join(fullname.split(".")) for pth in sys.path: pth = os.path.abspath(pth) for fp in fls: composed_path = fp % ("%s/%s" % (pth, dirpath)) if os.path.exists(composed_path): return composed_path def find_module(self, fullname, path=None): path = self.find_on_path(fullname) if path: return MetaLoader(path) sys.meta_path.append(MetaImporter()) sys.path.insert(0, "")
mit
3,325,653,699,906,485,000
29.216749
76
0.61477
false