code
stringlengths
3
1.05M
repo_name
stringlengths
5
104
path
stringlengths
4
251
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
3
1.05M
# -*- coding: utf-8 -*- """ Hardware file for the Superconducting Magnet (SCM) QuDi is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. QuDi is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with QuDi. If not, see <http://www.gnu.org/licenses/>. Copyright (c) the Qudi Developers. See the COPYRIGHT.txt file at the top-level directory of this distribution and at <https://github.com/Ulm-IQO/qudi/> """ import socket from core.base import Base import numpy as np import time from interface.magnet_interface import MagnetInterface from collections import OrderedDict import re class Magnet(Base, MagnetInterface): """Magnet positioning software for superconducting magnet. Enables precise positioning of the magnetic field in spherical coordinates with the angle theta, phi and the radius rho. The superconducting magnet has three coils, one in x, y and z direction respectively. The current through these coils is used to compute theta, phi and rho. The alignment can be done manually as well as automatically via fluorescence alignment. """ _modtype = 'Magnet' _modclass = 'hardware' def __init__(self, **kwargs): """Here the connections to the power supplies and to the counter are established""" super().__init__(**kwargs) socket.setdefaulttimeout(3) try: self.soc_x = socket.socket(socket.AF_INET, socket.SOCK_STREAM) except socket.timeout: self.log.error("socket timeout for coil in x-direction") try: self.soc_y = socket.socket(socket.AF_INET, socket.SOCK_STREAM) except socket.timeout: self.log.error("socket timeout for coil in y-direction") try: self.soc_z = socket.socket(socket.AF_INET, socket.SOCK_STREAM) except socket.timeout: self.log.error("socket timeout for coil in z-direction") # default waiting time of the pc after a message was sent to the magnet # should be set in the config file self.waitingtime = 0.01 # This is saves in which interval the input theta was in the last movement self._inter = 1 # this is a new thing. "normal_mode" # allows one to set magnetic fields up to 1 T along each axis # and the magnetic field vector can't be larger than 1.2 T # In z_mode you are allowed to move in a 5° solid angle along the z-axis # with a maximum field of 3 T. # For more documentation or how to change the mode look into # the function switch_mode self.mode = "normal_mode" # constraints of the superconducting magnet in T # should be set in the config file # Normally you should get and set constraints in the # function get_constraints(). The problem is here that # the constraint rho is no constant and is dependent on the # current theta and phi value. self.x_constr = 1.0 self.y_constr = 1.0 self.z_constr = 3.0 self.rho_constr = 1.2 def on_activate(self): """ loads the config file and extracts the necessary configurations for the superconducting magnet @return int: (0: Ok, -1:error) """ # get necessary information from the config file config = self.getConfiguration() if 'magnet_port' in config.keys(): port = config['magnet_port'] else: self.log.error('No port hs been defined in the config file!') return -1 if 'magnet_IP_address_x' in config.keys(): self.soc_x.connect((config['magnet_IP_address_x'], port)) else: self.log.error('No ip-address for connection to x-coil defined!') return -1 if 'magnet_IP_address_y' in config.keys(): self.soc_y.connect((config['magnet_IP_address_y'], port)) else: self.log.error('No ip-address for connection to y-coil defined!') return -1 if 'magnet_IP_address_z' in config.keys(): self.soc_z.connect((config['magnet_IP_address_z'], port)) else: self.log.error('No ip-address for connection to z-coil defined!') return -1 if 'magnet_waitingtime' in config.keys(): self.waitingtime = config['magnet_waitingtime'] if 'magnet_x_constr' in config.keys(): self.x_constr = config['magnet_x_constr'] if 'magnet_y_constr' in config.keys(): self.y_constr = config['magnet_y_constr'] if 'magnet_z_constr' in config.keys(): self.z_constr = config['magnet_z_constr'] if 'magnet_rho_constr' in config.keys(): self.rho_constr = config['magnet_rho_constr'] # sending a signal to all coils to receive an answer to cut off the # useless welcome message. ask_dict = {'x': "STATE?\n", 'y': "STATE?\n", 'z': "STATE?\n"} answ_dict = self.ask(ask_dict) self.log.info("Magnet in state: {0}".format(answ_dict)) # sending a command to the magnet to turn into SI units regarding # field units. self.heat_all_switches() tell_dict = {'x': 'CONF:FIELD:UNITS 1', 'y': 'CONF:FIELD:UNITS 1', 'z': 'CONF:FIELD:UNITS 1'} self.tell(tell_dict) def on_deactivate(self): self.soc_x.close() self.soc_y.close() self.soc_z.close() def utf8_to_byte(self, myutf8): """ Convenience function for code refactoring @param string myutf8 the message to be encoded @return the encoded message in bytes """ return myutf8.encode('utf-8') def byte_to_utf8(self, mybytes): """ Convenience function for code refactoring @param bytes mybytes the byte message to be decoded @return the decoded string in uni code """ return mybytes.decode() # =========================== Magnet Functionality Core ==================================== def get_constraints(self): """ Retrieve the hardware constraints from the magnet driving device. @return dict: dict with constraints for the magnet hardware. These constraints will be passed via the logic to the GUI so that proper display elements with boundary conditions could be made. Provides all the constraints for each axis of a motorized stage (like total travel distance, velocity, ...) Each axis has its own dictionary, where the label is used as the identifier throughout the whole module. The dictionaries for each axis are again grouped together in a constraints dictionary in the form {'<label_axis0>': axis0 } where axis0 is again a dict with the possible values defined below. The possible keys in the constraint are defined in the interface file. If the hardware does not support the values for the constraints, then insert just None. If you are not sure about the meaning, look in other hardware files to get an impression. """ constraints = OrderedDict() pos_dict = self.get_pos() coord_list = [pos_dict['rho'], pos_dict['theta'], pos_dict['phi']] pos_max_dict = self.rho_pos_max({'rad': coord_list}) # get the constraints for the x axis: axis0 = {} axis0['label'] = 'rho' # name is just as a sanity included axis0['unit'] = 'T' # the SI units axis0['pos_min'] = 0 axis0['pos_max'] = pos_max_dict['rho'] axis0['pos_step'] = 300000 axis0['vel_min'] = 0 axis0['vel_max'] = 0.0404*0.01799 # unit is T/s axis0['vel_step'] = 10**4 # In fact position constraints for rho is dependent on theta and phi, which would need # the use of an additional function to calculate # going to change the return value to a function rho_max_pos which needs the current theta and # phi position # get the constraints for the x axis: axis1 = {} axis1['label'] = 'theta' # name is just as a sanity included axis1['unit'] = 'rad' # the SI units axis1['pos_min'] = -1000 # arbitrary values for now ( there isn't any restriction on them ) axis1['pos_max'] = 1000 # that is basically the traveling range axis1['pos_step'] = 36000 axis1['vel_min'] = 0 axis1['vel_max'] = 0.0404*0.01799 #unit is T/s axis1['vel_step'] = 10**4 # get the constraints for the x axis: axis2 = {} axis2['label'] = 'phi' # name is just as a sanity included axis2['unit'] = 'rad' # the SI units axis2['pos_min'] = -1000 # arbitrary values for now ( there isn't any restriction on them ) axis2['pos_max'] = 1000 # that is basically the traveling range axis2['pos_step'] = 92000 axis2['vel_min'] = 0 axis2['vel_max'] = 0.0380*0.07028 #unit is T/s axis2['vel_step'] = 10**4 # assign the parameter container for x to a name which will identify it constraints[axis0['label']] = axis0 constraints[axis1['label']] = axis1 constraints[axis2['label']] = axis2 return constraints def tell(self, param_dict): """Send a command string to the magnet. @param dict param_dict: has to have one of the following keys: 'x', 'y' or 'z' with an appropriate command for the magnet """ internal_counter = 0 if param_dict.get('x') is not None: if not param_dict['x'].endswith('\n'): param_dict['x'] += '\n' self.soc_x.send(self.utf8_to_byte(param_dict['x'])) internal_counter += 1 if param_dict.get('y') is not None: if not param_dict['y'].endswith('\n'): param_dict['y'] += '\n' self.soc_y.send(self.utf8_to_byte(param_dict['y'])) internal_counter += 1 if param_dict.get('z') is not None: if not param_dict['z'].endswith('\n'): param_dict['z'] += '\n' self.soc_z.send(self.utf8_to_byte(param_dict['z'])) internal_counter += 1 if internal_counter == 0: self.log.warning('no parameter_dict was given therefore the ' 'function tell() call was useless') def ask(self, param_dict): """Asks the magnet a 'question' and returns an answer from it. @param dictionary param_dict: has to have one of the following keys: 'x', 'y' or 'z' the items have to be valid questions for the magnet. @return answer_dict: contains the same labels as the param_dict if it was set correct and the corresponding items are the answers of the magnet (format is string), else an empty dictionary is returned """ answer_dict = {} if param_dict.get('x') is not None: if not param_dict['x'].endswith('\n'): param_dict['x'] += '\n' # repeat this block to get out crappy messages. self.soc_x.send(self.utf8_to_byte(param_dict['x'])) # time.sleep(self.waitingtime) # you need to wait until magnet generating # an answer. answer_dict['x'] = self.byte_to_utf8(self.soc_x.recv(1024)) # receive an answer self.soc_x.send(self.utf8_to_byte(param_dict['x'])) # time.sleep(self.waitingtime) # you need to wait until magnet generating # an answer. answer_dict['x'] = self.byte_to_utf8(self.soc_x.recv(1024)) # receive an answer answer_dict['x'] = answer_dict['x'].replace('\r', '') answer_dict['x'] = answer_dict['x'].replace('\n', '') if param_dict.get('y') is not None: if not param_dict['y'].endswith('\n'): param_dict['y'] += '\n' self.soc_y.send(self.utf8_to_byte(param_dict['y'])) # time.sleep(self.waitingtime) # you need to wait until magnet generating # an answer. answer_dict['y'] = self.byte_to_utf8(self.soc_y.recv(1024)) # receive an answer self.soc_y.send(self.utf8_to_byte(param_dict['y'])) # time.sleep(self.waitingtime) # you need to wait until magnet generating # an answer. answer_dict['y'] = self.byte_to_utf8(self.soc_y.recv(1024)) # receive an answer answer_dict['y'] = answer_dict['y'].replace('\r', '') answer_dict['y'] = answer_dict['y'].replace('\n', '') if param_dict.get('z') is not None: if not param_dict['z'].endswith('\n'): param_dict['z'] += '\n' self.soc_z.send(self.utf8_to_byte(param_dict['z'])) # time.sleep(self.waitingtime) # you need to wait until magnet generating # an answer. answer_dict['z'] = self.byte_to_utf8(self.soc_z.recv(1024)) # receive an answer self.soc_z.send(self.utf8_to_byte(param_dict['z'])) # time.sleep(self.waitingtime) # you need to wait until magnet generating # an answer. answer_dict['z'] = self.byte_to_utf8(self.soc_z.recv(1024)) # receive an answer answer_dict['z'] = answer_dict['z'].replace('\r', '') answer_dict['z'] = answer_dict['z'].replace('\n', '') if len(answer_dict) == 0: self.log.warning('no parameter_dict was given therefore the ' 'function call ask() was useless') return answer_dict def get_status(self, param_list=None): """ Get the status of the position @param list param_list: optional, if a specific status of an axis is desired, then the labels of the needed axis should be passed in the param_list. If nothing is passed, then from each axis the status is asked. @return dict: with the axis label as key and the status number as item. Possible states are { -1 : Error, 1: SCM doing something, 0: SCM doing nothing } """ # I have chosen the numbers rather lightly and # an improvement is probably easily achieved. if param_list is not None: status_plural = self.ask_status(param_list) else: status_plural = self.ask_status() status_dict = {} for axes in status_plural: status = status_plural[axes] translated_status = -1 if status == '1': translated_status = 1 elif status == '2': translated_status = 0 elif status == '3': translated_status = 0 elif status == '4': translated_status = 1 elif status == '5': translated_status = 0 elif status == '6': translated_status = 1 elif status == '7': translated_status = -1 elif status == '8': translated_status = 0 elif status == '9': translated_status = 1 elif status == '10': translated_status = 1 status_dict[axes] = translated_status # adjusting to the axis problem axes = ['rho', 'theta', 'phi'] return_dict = {axes[i] : status_dict[old_key] for i, old_key in enumerate(status_dict)} return return_dict def heat_switch(self, axis): """ This function enables heating of the PJSwitch, which is a necessary step to conduct current to the coils. @param string axis: desired axis (x, y, z) """ if axis == "x": self.soc_x.send(self.utf8_to_byte("PS 1\n")) elif axis == "y": self.soc_y.send(self.utf8_to_byte("PS 1\n")) elif axis == "z": self.soc_z.send(self.utf8_to_byte("PS 1\n")) else: self.log.error("In function heat_switch only 'x', 'y' and 'z' are possible axes") def heat_all_switches(self): """ Just a convenience function to heat all switches at once, as it is unusual to only apply a magnetic field in one direction""" self.heat_switch("x") self.heat_switch("y") self.heat_switch("z") def cool_switch(self, axis): """ Turns off the heating of the PJSwitch, axis depending on user input @param string axis: desired axis (x, y, z) """ if axis == "x": self.soc_x.send(self.utf8_to_byte("PS 0\n")) elif axis == "y": self.soc_y.send(self.utf8_to_byte("PS 0\n")) elif axis == "z": self.soc_z.send(self.utf8_to_byte("PS 0\n")) else: self.log.error("In function cool_switch only 'x', 'y' and 'z' are possible axes") def cool_all_switches(self): """ Just a convenience function to cool all switches at once This will take 600s.""" self.cool_switch("x") self.cool_switch("y") self.cool_switch("z") def initialize(self): """ Acts as a switch. When all coils of the superconducting magnet are heated it cools them, else the coils get heated. @return int: (0: Ok, -1:error) """ # need to ask if the PJSwitch is on answ_dict = {} answ_dict = self.ask({'x': "PS?", 'y': "PS?", 'z': "PS?"}) if answ_dict['x'] == answ_dict['y'] == answ_dict['z']: if answ_dict['x'] == '0': self.heat_all_switches() else: self.cool_all_switches() else: self.log.warning('can not correctly turn on/ turn off magnet, ' 'because not all coils are in the same state in function ' 'initialize') return -1 return 0 # how to realize this function ? def idle_magnet(self): """ Cool all coils of the superconducting magnet to achieve maximum accuracy after aligning. @return int: (0: Ok, -1:error) """ self.cool_all_switches() return 0 def wake_up_magnet(self): """ Heat all coils of the superconducting magnet to get back to the working state. @return int: (0: Ok, -1:error) """ self.heat_all_switches() return 0 def switch_mode(self, bool_var): """ This function is special for this Super Conducting Magnet (SCM). It stems from the constraints on the coils. There is one mode (so called "normal_mode" which allows a field strength of up to 1 T in each direction and a combined field strength of 1.2 T. The z_mode is special as the z-coil can conduct more current and therefore exert higher field values. In this mode the combined field strength is allowed to be as high as 3 T but only within a cone of 5° to the z-axis. @param bool_var: True sets mode to "normal_mode", and False to "z_mode" @return int: (0: 0k, -1:error) """ if bool_var: if self.mode != "normal_mode": self.calibrate() self.mode = "normal_mode" else: if self.mode != "z_mode": self.calibrate() self.mode = "z_mode" return 0 def target_field_setpoint(self, param_dict): """ Function to set the target field (in T), which will be reached through the function ramp(self, param_list). @param dict param_dict: Contains as keys the axes to be set e.g. 'x' or 'y' and the items are the float values for the new field generated by the coil of that axis. @return int: error code (0:OK, -1:error) """ field_dict = self.get_current_field() mode = self.mode if param_dict.get('x') is not None: field_dict['x'] = param_dict['x'] if param_dict.get('y') is not None: field_dict['y'] = param_dict['y'] if param_dict.get('z') is not None: field_dict['z'] = param_dict['z'] if param_dict.get('x') is None and param_dict.get('x') is None and param_dict.get('x') is None: self.log.warning('no valid axis was supplied in ' 'target_field_setpoint') return -1 new_coord = [field_dict['x'], field_dict['y'], field_dict['z']] check_var = self.check_constraints({mode: {'cart': new_coord}}) if check_var: if param_dict.get('x') is not None: self.soc_x.send(self.utf8_to_byte("CONF:FIELD:TARG " + str(param_dict['x']) + "\n")) if param_dict.get('y') is not None: self.soc_y.send(self.utf8_to_byte("CONF:FIELD:TARG " + str(param_dict['y']) + "\n")) if param_dict.get('z') is not None: self.soc_z.send(self.utf8_to_byte("CONF:FIELD:TARG " + str(param_dict['z']) + "\n")) else: self.log.warning('resulting field would be too high in ' 'target_field_setpoint') return -1 return 0 def ramp(self, param_list=None): """ function to ramp the magnetic field in the direction(s) to the target field values @param list param_list: This param is optional. If supplied it has to contain the labels for the axes, which should be ramped (only cartesian makes sense here), else all axes will be ramped. @return int: error code (0:OK, -1:error) """ if param_list is None: self.soc_x.send(self.utf8_to_byte("RAMP\n")) self.soc_y.send(self.utf8_to_byte("RAMP\n")) self.soc_z.send(self.utf8_to_byte("RAMP\n")) else: if 'x' in param_list: self.soc_x.send(self.utf8_to_byte("RAMP\n")) elif 'y' in param_list: self.soc_y.send(self.utf8_to_byte("RAMP\n")) elif 'z' in param_list: self.soc_z.send(self.utf8_to_byte("RAMP\n")) else: self.log.warning('in function ramp your definition of ' 'param_list was incorrect') return -1 return 0 def ramp_to_zero(self, axis): """ Function to ramp down a specific coil to zero current @param axis: string axis: (allowed inputs 'x', 'y' and 'z') """ if axis == "x": self.soc_x.send(self.utf8_to_byte("ZERO\n")) elif axis == "y": self.soc_y.send(self.utf8_to_byte("ZERO\n")) elif axis == "z": self.soc_z.send(self.utf8_to_byte("ZERO\n")) else: self.log.error("In function ramp_to_zero only 'x', 'y' and 'z' are possible axes") def calibrate(self, param_list=None): """ Calibrates the stage. In the case of the super conducting magnet this just means moving all or a user specified coil to zero magnetic field. @param dict param_list: param_list: optional, if a specific calibration of an axis is desired, then the labels of the needed axis should be passed in the param_list. If nothing is passed, then all connected axis will be calibrated. @return int: error code (0:OK, -1:error) After calibration the stage moves to home position which will be the zero point for the passed axis. The calibration procedure will be different for each stage. """ if not param_list: self.ramp_to_zero("x") self.ramp_to_zero("y") self.ramp_to_zero("z") else: if 'x' in param_list: self.ramp_to_zero("x") elif 'y' in param_list: self.ramp_to_zero("y") elif 'z' in param_list: self.ramp_to_zero("z") else: self.log.error('no valid axis was supplied') return -1 return 0 def set_coordinates(self, param_dict): """ Function to set spherical coordinates ( keep in mind all is in radians) This function is intended to replace the old set functions ( set_magnitude, set_theta, set_phi ). @param dict param_dict: dictionary, which passes all the relevant field values, that should be passed. Usage: {'axis_label': <the-abs-pos-value>}. 'axis_label' must correspond to a label given to one of the axis. In this case the axes are labeled 'rho', 'theta' and 'phi' @return int: error code (0:OK, -1:error) """ answ_dict = {} coord_list = [] transform_dict = {'cart': {'rad': coord_list}} answ_dict = self.get_current_field() coord_list.append(answ_dict['x']) coord_list.append(answ_dict['y']) coord_list.append(answ_dict['z']) coord_list = self.transform_coordinates(transform_dict) label_list = ['rho', 'theta', 'phi'] if param_dict.get('rho') is not None: coord_list[0] = param_dict['rho'] if param_dict.get('theta') is not None: coord_list[1] = param_dict['theta'] if param_dict.get('phi') is not None: coord_list[2] = param_dict['phi'] for key in param_dict.keys(): if key not in label_list: self.log.warning("The key "+key+" provided is no valid key in set_coordinates.") return -1 transform_dict = {'rad': {'cart': coord_list}} coord_list = self.transform_coordinates(transform_dict) set_point_dict = {'x': coord_list[0], 'y': coord_list[1], 'z': coord_list[2]} check_val = self.target_field_setpoint(set_point_dict) return check_val def move_abs(self, param_dict): """ Moves stage to absolute position (absolute movement) @param dict param_dict: dictionary, which passes all the relevant parameters, that should be changed. Usage: {'axis_label': <the-abs-pos-value>}. 'axis_label' must correspond to a label given to one of the axis. In this case the axes are labeled 'rho', 'theta' and 'phi'. @return int: error code (0:OK, -1:error) """ # the problem here is, that check_coordinates needs a complete dictionary with all # labels while move_abs doesn't need it. I think it is better to extend this flexibility to # check_constraints than changing move_abs. coord_list = [] mode = self.mode param_dict = self.update_coordinates(param_dict) coord_list.append(param_dict['rho']) coord_list.append(param_dict['theta']) coord_list.append(param_dict['phi']) # lets adjust theta theta = param_dict['theta'] phi = param_dict['phi'] # switch variable decides what has to be done ( in intervals [2*k*np.pi, 2k+1*np.pi] the movement would # be ok ( no rotation in phi ). In the other intervals one has to see if there was a movement before this # movement in one of these regions or not. If not just move, if there was shift to the interval [0, np.pi] and # move there. switch = np.ceil(theta / np.pi) % 2 inter1 = np.ceil(theta / np.pi) inter1 = int(inter1) # if inter1 > 0: # inter1 -= 1 # move the theta values in the right range # for the constraints # if in an even interval if switch: theta -= np.pi * (inter1 - 1) else: # get into the correct interval theta -= np.pi * (inter1 - 1) # now mirror at the center of the interval theta = np.pi/2 - (theta - np.pi/2) # interval was correct if switch: self._inter = inter1 # interval that needs rotation around z-axis in case it wasn't outside that interval before else: # actually it isn't necessary to distinguish here. I initially thought it is necessary and it would # be if one would move the magnet based on the magnet field of previous values. # I will leave the code here for now, when somebody in the future wants to extend this function # to allow both behaviors he can use the existing code. # theta was in a correct interval before but isn't now ( change of interval ) self.log.debug('need rotation around phi to adjust for negative theta value') self.log.debug('old int: {0}, new int: {1}'.format(self._inter, inter1)) if int(np.abs(self._inter - inter1)) is 1: phi += np.pi # theta wasn't in a correct interval before and is still in the same interval ( in this case do nothing ) elif int(np.abs(self._inter - inter1)) is 0: phi += np.pi else: self.log.warning("There was a difference in intervals larger " "than one between two consecutive movements. This is not supported " "yet.{0}".format(self._inter - inter1)) self._inter = inter1 # adjust the phi values so they are in the right interval. They might be in the wrong interval # due to user input or theta values inter2 = np.ceil(phi / (2 * np.pi)) inter2 = int(inter2) # if inter2 > 0: # inter2 -= 1 phi -= 2 * np.pi * (inter2 - 1) self.log.debug('show old dictionary: {0}'.format(param_dict)) # set the corrected values param_dict['theta'] = theta param_dict['phi'] = phi constr_dict = {mode: {'rad': coord_list}} self.log.debug('show new dictionary: {0}'.format(param_dict)) check_bool = self.check_constraints(constr_dict) if check_bool: check_1 = self.set_coordinates(param_dict) check_2 = self.ramp() else: self.log.warning("move_abs hasn't done anything, see check_constraints message why") return -1 if check_1 is check_2: if check_1 is 0: return 0 else: return -1 def move_rel(self, param_dict): """ Moves stage in given direction (in spheric coordinates with theta and phi in radian) @param dict param_dict: dictionary, which passes all the relevant parameters, which should be changed. Usage: {'axis_label': <the-abs-pos-value>}. 'axis_label' must correspond to a label given to one of the axis. @return int: error code (0:OK, -1:error) """ coord_list = [] answ_dict = self.get_current_field() coord_list.append(answ_dict['x']) coord_list.append(answ_dict['y']) coord_list.append(answ_dict['z']) transform_dict = {'cart': {'rad': coord_list}} coord_list = self.transform_coordinates(transform_dict) label_list = ['rho', 'theta', 'phi'] if param_dict.get('rho') is not None: coord_list[0] += param_dict['rho'] if param_dict.get('theta') is not None: coord_list[1] += param_dict['theta'] if param_dict.get('phi') is not None: coord_list[2] += param_dict['phi'] for key in param_dict.keys(): if key not in label_list: self.log.warning("The key "+key+" provided is no valid key in set_coordinates.") return -1 new_coord_dict = {'rho': coord_list[0], 'theta': coord_list[1], 'phi': coord_list[2]} check_val = self.move_abs(new_coord_dict) return check_val def transform_coordinates(self, param_dict): """ Function for generic coordinate transformation. This is a refactoring to the old functions (4) to be replaced by just one function @param dict param_dict: contains a param_dict, which contains a list of values to be transformed. The transformation depends on the keys of the first and the second dictionary. Possible keys are: "deg", "rad", "cart" for example if the first key is deg and the second is cartesian then the values in the list will be transformed from deg to cartesian. Ordering of the values should be [x,y,z] (cartesian) or [rho, theta, phi] for deg or rad @return list containing the transformed values """ # here all the possible cases for transformations # are checked if param_dict.get('deg') is not None: if param_dict['deg'].get('rad') is not None: try: rho, theta, phi = param_dict['deg'].get('rad') except ValueError: self.log.error('Supplied input list for transform_coordinates has to be of length 3: returning initial values') return [-1, -1, -1] theta = theta*np.pi/180 phi = phi*np.pi/180 return_list = [rho, theta, phi] return return_list if param_dict['deg'].get('cart') is not None: cartesian_list = [] try: rho, theta, phi = param_dict['deg'].get('cart') except ValueError: self.log.error('Supplied input list for transform_coordinates has to be of length 3: returning [-1,-1,-1]') return [-1, -1, -1] # transformations that should probably be revisited. # They are there in case the theta and phi values # are not in the correct range. while theta >= 180: phi += 180 theta = 360 - theta while theta < 0: theta = -theta phi += 180 while phi >= 360: phi += 360 while phi < 0: phi += 360 cartesian_list.append(rho * np.sin(theta * 2 * np.pi / 360) * np.cos(phi * 2 * np.pi / 360)) cartesian_list.append(rho * np.sin(theta * 2 * np.pi / 360) * np.sin(phi * 2 * np.pi / 360)) cartesian_list.append(rho * np.cos(theta * 2 * np.pi / 360)) return cartesian_list if param_dict.get('rad') is not None: if param_dict['rad'].get('deg') is not None: try: rho, theta, phi = param_dict['rad']['deg'] except ValueError: self.log.error("Supplied input list for transform_coordinates has to be of length 3: returning [-1, -1, -1]") return [-1,-1,-1] theta = 180*theta/np.pi phi = 180*phi/np.pi return_list = [rho, theta, phi] return return_list if param_dict['rad'].get('cart') is not None: try: rho, theta, phi = param_dict['rad']['cart'] except ValueError: self.log.error("Supplied input list for transf has to be of length 3: returning [-1, -1, -1]") return [-1,-1,-1] x_val = rho * np.sin(theta) * np.cos(phi) y_val = rho * np.sin(theta) * np.sin(phi) z_val = rho * np.cos(theta) return_list = [x_val, y_val, z_val] return return_list if param_dict.get('cart') is not None: if param_dict['cart'].get('deg') is not None: try: x_val, y_val, z_val = param_dict['cart']['deg'] except ValueError: self.log.error("Supplied input list for transform_coordinates has to be of length 3: returning [-1, -1, -1]") return [-1,-1,-1] rho = np.sqrt(x_val ** 2 + y_val ** 2 + z_val ** 2) if rho == 0: theta = 0 else: theta = np.arccos(z_val/rho) * 360/(2 * np.pi) if x_val == 0 and y_val == 0: phi = 0 else: phi = np.arctan2(y_val, x_val) * 360/(2 * np.pi) if phi < 0: phi += 360 return_list = [rho, theta, phi] return return_list if param_dict['cart'].get('rad') is not None: try: x_val, y_val, z_val = param_dict['cart']['rad'] except ValueError: self.log.error("Supplied input list for transform_coordinates has to be of length 3: returning [-1, -1, -1]") return [-1,-1,-1] rho = np.sqrt(x_val ** 2 + y_val ** 2 + z_val ** 2) if rho == 0: theta = 0 else: theta = np.arccos(z_val/rho) if x_val == 0 and y_val == 0: phi = 0 else: phi = np.arctan2(y_val, x_val) if phi < 0: phi += 2 * np.pi return_list = [rho, theta, phi] return return_list def get_current_field(self): """ Function that asks the magnet for the current field strength in each direction @param: @param x : representing the field strength in x direction @param y : representing the field strength in y direction float z : representing the field strength in z direction """ ask_dict = {} ask_dict['x'] = "FIELD:MAG?\n" ask_dict['y'] = "FIELD:MAG?\n" ask_dict['z'] = "FIELD:MAG?\n" answ_dict = self.ask(ask_dict) # having always a weird bug, where the response of the magnet # doesn't make sense, as it is always the same way I try to # catch these exceptions. # pattern to recognize decimal numbers ( There is one issue here e.g. (0.01940.01345) gives one match # with 0.01940. Don't think it will matter much.) my_pattern = re.compile('[-+]?[0-9][.][0-9]+') try: answ_dict['x'] = float(answ_dict['x']) except ValueError: match_list = re.findall(my_pattern, answ_dict['x']) answ_dict['x'] = float(match_list[0]) try: answ_dict['y'] = float(answ_dict['y']) except ValueError: match_list = re.findall(my_pattern, answ_dict['y']) answ_dict['y'] = float(match_list[0]) try: answ_dict['z'] = float(answ_dict['z']) except ValueError: match_list = re.findall(my_pattern, answ_dict['z']) answ_dict['z'] = float(match_list[0]) return answ_dict def get_pos(self, param_list=None): """ Gets current position of the stage @param list param_list: optional, if a specific position of an axis is desired, then the labels of the needed axis should be passed in the param_list. If nothing is passed, then from each axis the position is asked. @return dict mypos: with keys being the axis labels and item the current position. Given in spheric coordinates with Units T, rad , rad. """ mypos = {} mypos1 = {} answ_dict = self.get_current_field() coord_list = [answ_dict['x'], answ_dict['y'], answ_dict['z']] rho, theta, phi = self.transform_coordinates({'cart': {'rad': coord_list}}) mypos1['rho'] = rho mypos1['theta'] = theta mypos1['phi'] = phi if param_list is None: return mypos1 else: if "rho" in param_list: mypos['rho'] = mypos1['rho'] if "theta" in param_list: mypos['theta'] = mypos1['theta'] if "phi" in param_list: mypos['phi'] = mypos1['phi'] return mypos def stop_hard(self, param_list=None): """ function that pauses the heating of a specific coil depending on the elements in param_list. @param list param_list: Can contain elements 'x', 'y' or 'z'. In the case no list is supplied the heating of all coils is stopped @return integer: 0 everything is ok and -1 an error occured. """ if not param_list: self.soc_x.send(self.utf8_to_byte("PAUSE\n")) self.soc_y.send(self.utf8_to_byte("PAUSE\n")) self.soc_z.send(self.utf8_to_byte("PAUSE\n")) elif len(param_list) > 0: self.log.warning('Some useless parameters were passed.') return -1 else: if 'x' in param_list: self.soc_x.send(self.utf8_to_byte("PAUSE\n")) param_list.remove('x') if 'y' in param_list: self.soc_y.send(self.utf8_to_byte("PAUSE\n")) param_list.remove('y') if 'z' in param_list: self.soc_z.send(self.utf8_to_byte("PAUSE\n")) param_list.remove('z') return 0 def abort(self): """ Stops movement of the stage @return int: error code (0:OK, -1:error) """ # could think about possible exceptions here and # catch them and return -1 in case ab = self.stop_hard() return ab def ask_status(self, param_list = None): """ Function that returns the status of the coils ('x','y' and 'z') given in the param_dict @param list param_list: string (elements allowed 'x', 'y' and 'z') for which the status should be returned. Can be None, then the answer is the same as for the list ['x','y','z']. @return state: returns a string, which contains the number '1' to '10' representing the state, the magnet is in. For further information on the meaning of the numbers see translated_get_status() """ ask_dict = {} for i_dea in range(2): if not param_list: ask_dict['x'] = "STATE?\n" ask_dict['y'] = "STATE?\n" ask_dict['z'] = "STATE?\n" else: for axis in param_list: ask_dict[axis] = "STATE?\n" if i_dea == 0: pass # wait some time not sure if this is necessary. # time.sleep(self.waitingtime) answer_dict = self.ask(ask_dict) return answer_dict def translated_get_status(self, param_list=None): """ Just a translation of the numbers according to the manual supplied by American Magnets, Inc. @param list param_list: string (elements allowed 'x', 'y' and 'z') for which the translated status should be returned. Can be None, then the answer is the same as for the list ['x','y','z'] @return dictionary status_dict: keys are the elements of param_list and the items contain the message for the user. """ status_dict = self.ask_status(param_list) for myiter in status_dict.keys(): stateval = status_dict[myiter] try: if int(stateval) > 10: stateval = int(stateval) while stateval > 10: stateval //= 10 stateval = str(stateval) if stateval == '1': translated_status = 'RAMPING to target field/current' elif stateval == '2': translated_status = 'HOLDING at the target field/current' elif stateval == '3': translated_status = 'PAUSED' elif stateval == '4': translated_status = 'Ramping in MANUAL UP mode' elif stateval == '5': translated_status = 'Ramping in MANUAL DOWN mode' elif stateval == '6': translated_status = 'ZEROING CURRENT (in progress)' elif stateval == '7': translated_status = 'Quench detected' elif stateval == '8': translated_status = 'At ZERO current' elif stateval == '9': translated_status = 'Heating persistent switch' elif stateval == '10': translated_status = 'Cooling persistent switch' else: self.log.warning('Something went wrong in ask_status as the statevalue was not between 1 and 10!') return -1 except ValueError: self.log.warning("Sometimes the magnet returns nonsense after a request") return -1 status_dict[myiter] = translated_status return status_dict # This first version of set and get velocity will be very simple # Normally one can set up several ramping rates for different field # regions and so on. I also leave it to the user to find out how many # segments he has and so on. If nothing is changed the magnet should have # 1 segment and max_val should be the max_val that can be reached in that # direction. def set_velocity(self, param_dict): """ Function to change the ramp rate in T/s (ampere per second) @param dict: contains as keys the different cartesian axes ('x', 'y', 'z') and the dict contains list of parameters, that have to be supplied. In this case this is segment, ramp_rate and maxval. How does this work? The maxval for the current marks the endpoint and in between you have several segments with differen ramp_rates. @return int: error code (0:OK, -1:error) """ tell_dict = {} return_val = 0 internal_counter = 0 constraint_dict = self.get_constraints() if param_dict.get('x') is not None: param_list = list() param_list.append(1) # the segment param_list.append(param_dict['x']) param_list.append(1) # the upper bound of the velocity constraint_x = constraint_dict['rho']['vel_max'] if constraint_x > param_list[1]: tell_dict['x'] = 'CONF:RAMP:RATE:FIELD:' + str(param_list[0]) + ", " + str(param_list[1]) + ", " + str(param_list[2]) else: self.log.warning("constraint vel_max was violated in set_velocity with axis = 'x'") internal_counter += 1 if param_dict.get('y') is not None: param_list = list() param_list.append(1) # the segment param_list.append(param_dict['y']) param_list.append(1) # the upper bound of the velocity constraint_y = constraint_dict['theta']['vel_max'] if constraint_y > param_list[1]: tell_dict['y'] = 'CONF:RAMP:RATE:FIELD:' + str(param_list[0]) + ", " + str(param_list[1]) + ", " + str(param_list[2]) else: self.log.warning("constraint vel_max was violated in set_velocity with axis = 'y'") internal_counter += 1 if param_dict.get('z') is not None: param_list = list() param_list.append(1) # the segment param_list.append(param_dict['z']) param_list.append(3) # the upper bound of the velocity constraint_z = constraint_dict['phi']['vel_max'] if constraint_z > param_list[1]: tell_dict['z'] = 'CONF:RAMP:RATE:FIELD:' + str(param_list[0]) + ", " + str(param_list[1]) + ", " + str(param_list[2]) else: self.log.warning("constraint vel_max was violated in set_velocity with axis = 'z'") internal_counter += 1 if internal_counter > 0: self.tell(tell_dict) else: self.log.warning('There was no statement supplied in change_ramp_rate') return_val = -1 return return_val def get_velocity(self, param_list=None): """ Gets the current velocity for all connected axes. @param dict param_list: optional, if a specific velocity of an axis is desired, then the labels of the needed axis should be passed as the param_list. If nothing is passed, then from each axis the velocity is asked. @return dict: with the axis label as key and the velocity as item. """ ask_dict = {} return_dict = {} if param_list is None: ask_dict['x'] = "RAMP:RATE:FIELD:1?" ask_dict['y'] = "RAMP:RATE:FIELD:1?" ask_dict['z'] = "RAMP:RATE:FIELD:1?" answ_dict = self.ask(ask_dict) return_dict['x'] = float(answ_dict['x'].split(',')[0]) return_dict['y'] = float(answ_dict['y'].split(',')[0]) return_dict['z'] = float(answ_dict['z'].split(',')[0]) else: for axis in param_list: ask_dict[axis] = "RAMP:RATE:FIELD:1?" answ_dict = self.ask(ask_dict) for axis in param_list: return_dict[axis] = float(answ_dict[axis].split(',')[0]) return return_dict def check_constraints(self, param_dict): """ Function that verifies if for a given configuration of field strength exerted through the coils the constraints of the magnet are violated. @param dictionary param_dict: the structure of the dictionary is as follows {'z_mode': {'cart': [a,b,c]}} with available keys 'z_mode' and 'normal_mode'. The dictionary inside the dictionary can contain the label 'deg', 'cart' and 'rad'. The list contains then the new values and checks the constraints for them. z_mode means you can reach fields of 3 T in z-direction as long as the field vector is directed in z-direction within an accuracy of 5°. In this mode you should still be careful and the 5° restriction is kind of arbitrary and not experimented with. @return: boolean check_var: True if the constraints are fulfilled and False otherwise """ # First going to include a local function to check the constraints for cartesian coordinates # This helps to just reuse this function for the check of 'deg' and 'rad' cases. def check_cart_constraints(coord_list, mode): my_boolean = True try: x_val, y_val, z_val = coord_list except ValueError: self.log.error("In check_constraints list has not the right amount of elements (3).") return [-1, -1, -1] if mode == "normal_mode": if np.abs(x_val) > self.x_constr: my_boolean = False if np.abs(y_val) > self.y_constr: my_boolean = False if np.abs(z_val) > self.x_constr: my_boolean = False field_magnitude = np.sqrt(x_val**2 + y_val**2 + z_val**2) if field_magnitude > self.rho_constr: my_boolean = False elif mode == "z_mode": # Either in sphere on top of the cone # or in cone itself. my_boolean = False # angle 5° cone # 3T * cos(5°) height_cone = 2.9886 if (np.abs(z_val) <= height_cone) and ((x_val**2 + y_val**2) <= z_val**2): my_boolean = True elif x_val**2 + y_val**2 + (z_val - height_cone)**2 <= self.rho_constr: my_boolean = True elif x_val**2 + y_val**2 + (z_val + height_cone)**2 <= self.rho_constr: my_boolean = True if not my_boolean: self.log.warning("In check_constraints your settings don't lie in the allowed cone. See the " "function for more information") return my_boolean return_val = False if param_dict.get('normal_mode') is not None: if param_dict['normal_mode'].get("cart") is not None: return_val = check_cart_constraints(param_dict['normal_mode']["cart"], 'normal_mode') if param_dict['normal_mode'].get("rad") is not None: transform_dict = {'rad': {'cart': param_dict['normal_mode']["rad"]}} cart_coord = self.transform_coordinates(transform_dict) return_val = check_cart_constraints(cart_coord, 'normal_mode') # ok degree mode here won't work properly, because I don't check the move constraints if param_dict['normal_mode'].get("deg") is not None: transform_dict = {'deg': {'cart': param_dict['normal_mode']["deg"]}} cart_coord = self.transform_coordinates(transform_dict) return_val = check_cart_constraints(cart_coord, 'normal_mode') elif param_dict.get('z_mode') is not None: if param_dict['z_mode'].get("cart") is not None: return_val = check_cart_constraints(param_dict['z_mode']["cart"], 'z_mode') if param_dict['z_mode'].get("rad") is not None: transform_dict = {'rad':{'cart': param_dict['z_mode']["rad"]}} cart_coord = self.transform_coordinates(transform_dict) return_val = check_cart_constraints(cart_coord, 'z_mode') if param_dict['z_mode'].get("deg") is not None: transform_dict = {'deg': {'cart': param_dict['z_mode']["deg"]}} cart_coord = self.transform_coordinates(transform_dict) return_val = check_cart_constraints(cart_coord, 'z_mode') else: self.log.warning("no valid key was provided, therefore nothing happened in function check_constraints.") return return_val def rho_pos_max(self, param_dict): """ Function that calculates the constraint for rho either given theta and phi values in degree or x, y and z in cartesian coordinates. @param dictionary param_dict: Has to be of the form {'rad': [rho, theta, phi]} supports also 'deg' and 'cart' option. @return float pos_max: the max position for given theta and phi values. Returns -1 in case of failure. """ # so I'm going to rework this function. The answer in the case # of z_mode is easy. (Max value for r is constant 3 True) # For the "normal_mode" I decided to come up with a new # algorithm. # That algorithm can be summarized as follows: # Check if the vector (r,theta,phi) # with length so that it is on the surface of the sphere. In case it conflicts with the # rectangular constraints given by the coils itself (x<=10, y<=10, z<=10) # we need to find the # intersection between the vector and the cube (Sadly this will need # 6 cases, just like a dice), else we are finished. pos_max_dict = {'rho': -1, 'theta': -1, 'phi': -1} pos_max_dict['phi'] = 2*np.pi param_dict = {self.mode: param_dict} if param_dict.get("z_mode") is not None: pos_max_dict['theta'] = np.pi*5/180 # 5° cone if self.check_constraints(param_dict): pos_max_dict['rho'] = self.z_constr else: pos_max_dict['rho'] = 0.0 elif param_dict.get("normal_mode") is not None: pos_max_dict['theta'] = np.pi if param_dict["normal_mode"].get("cart") is not None: transform_dict = {'cart': {'rad': param_dict["normal_mode"].get("cart")}} coord_dict_rad = self.transform_coordinates(transform_dict) coord_dict_rad = {'rad': coord_dict_rad} coord_dict_rad['rad'][0] = self.rho_constr transform_dict = {'rad': {'cart': coord_dict_rad['rad']}} coord_dict_cart = self.transform_coordinates(transform_dict) coord_dict_cart = {'normal_mode': {'cart': coord_dict_cart}} elif param_dict["normal_mode"].get("rad") is not None: # getting the coord list and transforming the coordinates to # cartesian, so cart_constraints can make use of it # setting the radial coordinate, as only the angular coordinates # are of importance and e.g. a zero in the radial component would be # To set it to rho_constr is also important, as it allows a check # if the sphere is the valid constraint in the current direction. coord_list = param_dict["normal_mode"]["rad"] coord_dict_rad = param_dict["normal_mode"] coord_dict_rad['rad'][0] = self.rho_constr transform_dict = {'rad': {'cart': coord_dict_rad['rad']}} coord_dict_cart = self.transform_coordinates(transform_dict) coord_dict_cart = {'normal_mode': {'cart': coord_dict_cart}} elif param_dict["normal_mode"].get("deg") is not None: coord_list = param_dict["normal_mode"]["deg"] coord_dict_deg = param_dict["normal_mode"] coord_dict_deg['deg'][0] = self.rho_constr coord_dict_rad = self.transform_coordinates({'deg': {'rad': coord_dict_deg['deg']}}) coord_dict_rad = {'rad': coord_dict_rad} transform_dict = {'rad': {'cart': coord_dict_rad['rad']}} coord_dict_cart = self.transform_coordinates(transform_dict) coord_dict_cart = {'normal_mode': {'cart': coord_dict_cart}} my_boolean = self.check_constraints(coord_dict_cart) if my_boolean: pos_max_dict['rho'] = self.rho_constr else: # now I need to find out, which plane I need to check phi = coord_dict_rad['rad'][2] theta = coord_dict_rad['rad'][1] # Sides of the rectangular intersecting with position vector if (np.pi/4 <= theta) and (theta < np.pi - np.pi/4): if (7*np.pi/4 < phi < 2*np.pi) or (0 <= phi <= np.pi/4): pos_max_dict['rho'] = self.x_constr/(np.cos(phi)*np.sin(theta)) elif (np.pi/4 < phi) and (phi <= 3*np.pi/4): pos_max_dict['rho'] = self.y_constr / (np.sin(phi)*np.sin(theta)) elif (3*np.pi/4 < phi) and (phi <= 5*np.pi/4): pos_max_dict['rho'] = -self.x_constr/(np.cos(phi)*np.sin(theta)) elif (5*np.pi/4 < phi) and (phi <= 7*np.pi/4): pos_max_dict['rho'] = -self.y_constr / (np.sin(phi)*np.sin(theta)) # Top and bottom of the rectangular elif (0 <= theta) and (theta < np.pi/4): pos_max_dict['rho'] = self.x_constr / np.cos(theta) elif (3*np.pi/4 <= theta) and (theta <= np.pi): pos_max_dict['rho'] = - self.x_constr / np.cos(theta) return pos_max_dict def update_coordinates(self, param_dict): """ A small helper function that does make the functions set_coordinates, transform_coordinates compatible with the interface defined functions. The problem is, that in the interface functions each coordinate is item to an key which represents the axes of the current coordinate system. This function only makes the set of coordinates complete. E.g {'rho': 1.3} to {'rho': 1.3, 'theta': np.pi/2, 'phi': 0 } @param param_dict: Contains the incomplete dictionary @return: the complete dictionary """ current_coord_dict = self.get_pos() for key in current_coord_dict.keys(): if param_dict.get(key) is None: param_dict[key] = current_coord_dict[key] return param_dict def set_magnet_idle_state(self, magnet_idle=True): """ Set the magnet to couple/decouple to/from the control. @param bool magnet_idle: if True then magnet will be set to idle and each movement command will be ignored from the hardware file. If False the magnet will react on movement changes of any kind. @return bool: the actual state which was set in the magnet hardware. True = idle, decoupled from control False = Not Idle, coupled to control """ pass def get_magnet_idle_state(self): """ Retrieve the current state of the magnet, whether it is idle or not. @return bool: the actual state which was set in the magnet hardware. True = idle, decoupled from control False = Not Idle, coupled to control """ pass
tobiasgehring/qudi
hardware/sc_magnet/magnet.py
Python
gpl-3.0
62,977
# -*- coding: utf-8 -*- # # codimension - graphics python two-way code editor and analyzer # Copyright (C) 2010-2017 Sergey Satskiy <sergey.satskiy@gmail.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # """Provides the storage for the search environment""" import os.path from copy import deepcopy from .fileutils import loadJSON, saveJSON # The 'find' item is for the search in the buffer dialogue. Each item is a # dictionary of the following structure: # { 'what': <string>, # 'case': <bool>, 'word': <bool>, 'regexp': <bool> } # # The 'replace' item is for the replace in the buffer dialogue. Each item is a # dictionary of the following structure: # { 'what': <string>, 'with': <string>, # 'case': <bool>, 'word': <bool>, 'regexp': <bool> } # # The 'findinfiles' item is for a modal find in files dialogue box. Each item # is a dictionary of the following structure: # { 'what': <string>, # 'case': <bool>, 'word': <bool>, 'regexp': <bool>, # 'inproject': <bool>, 'inopened': <bool>, 'indir': <bool>, 'dir': <string>, # 'filter': <string> } _DEFAULT_SEARCH_HISTORY = { 'class': [], # [term, ...] 'function': [], # [term, ...] 'global': [], # [term, ...] 'findname': [], # [term, ...] 'findfile': [], # [term, ...] 'find': [], # [ {'term': , 'replace': , # 'cbCase': , 'cbWord': , 'cbRegexp': }, ... ] 'findinfiles': []} # [ {'term': , 'dir': , 'filters': , # 'cbCase': , 'cbWord': , 'cbRegexp': , # 'rbProject': , 'rbOpen': , 'rbDir': }, ... ] class SearchEnvironment: """Loads/stores/saves the search environment""" def __init__(self): self.__props = deepcopy(_DEFAULT_SEARCH_HISTORY) self.__seFileName = None # Default. Could be updated later self.__limit = 32 def reset(self): """Un-binds from the file system""" self.__props = deepcopy(_DEFAULT_SEARCH_HISTORY) self.__seFileName = None def setup(self, dirName): """Binds the parameters to a disk file""" # Just in case - flush the previous data if they were bound SearchEnvironment.save(self) dirName = os.path.realpath(dirName) if not dirName.endswith(os.path.sep): dirName += os.path.sep if not os.path.isdir(dirName): raise Exception('Directory name is expected for the search ' 'environment. The given ' + dirName + ' is not.') self.__seFileName = dirName + "searchenv.json" if os.path.exists(self.__seFileName): SearchEnvironment.load(self) def load(self): """Loads the saved search environment""" if self.__seFileName: default = deepcopy(_DEFAULT_SEARCH_HISTORY) self.__props = loadJSON(self.__seFileName, 'search environment', default) def save(self): """Saves the search environment into a file""" if self.__seFileName: saveJSON(self.__seFileName, self.__props, 'search environment') def __addToContainer(self, element, item): """Common implementation of adding a search item""" if item in self.__props[element]: self.__props[element].remove(item) self.__props[element].insert(0, item) if len(self.__props[element]) > self.__limit: self.__props[element] = self.__props[element][0:self.__limit] SearchEnvironment.save(self) def __setContainer(self, item, history): """Generic container setter which respects the limit""" if len(history) > self.__limit: self.__props[item] = history[0:self.__limit] else: self.__props[item] = history SearchEnvironment.save(self) def setLimit(self, newLimit): """Sets the new limit""" self.__limit = newLimit @property def findClassHistory(self): """Provides the find class history""" return self.__props['class'] @findClassHistory.setter def findClassHistory(self, history): self.__setContainer('class', history) def addToFindClassHistory(self, item): """Adds an item to the class history""" self.__addToContainer('class', item) @property def findFunctionHistory(self): """Provides the find function history""" return self.__props['function'] @findFunctionHistory.setter def findFunctionHistory(self, history): self.__setContainer('function', history) def addToFindFunctionHistory(self, item): """Adds an item to the function history""" self.__addToContainer('function', item) @property def findGlobalHistory(self): """Provides the find global history""" return self.__props['global'] @findGlobalHistory.setter def findGlobalHistory(self, history): self.__setContainer('global', history) def addToFindGlobalHistory(self, item): """Adds an item to the global history""" self.__addToContainer('global', item) @property def findNameHistory(self): """Provides the find name history""" return self.__props['findname'] @findNameHistory.setter def findNameHistory(self, history): self.__setContainer('findname', history) def addToFindNameHistory(self, item): """Adds an item to the name history""" self.__addToContainer('findname', item) @property def findFileHistory(self): """Provides the find file history""" return self.__props['findfile'] @findFileHistory.setter def findFileHistory(self, history): self.__setContainer('findfile', history) def addToFindFileHistory(self, item): """Adds an item to the file history""" self.__addToContainer('findfile', item) @property def findHistory(self): """Provides the find history""" return self.__props['find'] @findHistory.setter def findHistory(self, history): self.__setContainer('find', history) def addToFindHistory(self, item): """Adds an item to the file history""" self.__addToContainer('find', item) @property def findInFilesHistory(self): """Provides the find in files history""" return self.__props['findinfiles'] @findInFilesHistory.setter def findInFilesHistory(self, history): self.__setContainer('findinfiles', history) def addToFindInFilesHistory(self, item): """Adds an item to the file history""" self.__addToContainer('findinfiles', item)
SergeySatskiy/codimension
codimension/utils/searchenv.py
Python
gpl-3.0
7,260
""" This contains all error handling functions for the logistic Environment Module. """ class Error(Exception): """Base class for exceptions in this module.""" pass class ActionNotAList(Error): """Exception raised when goal is not plausible.""" def __str__(self): return "Actions passed is not a list or tuple!" class GoalNotPlausible(Error): """Exception raised when goal is not plausible.""" def __str__(self): return "Goal is NOT plausible!" class AirplaneMaxBoxExided(Error): """Exception raised for errors in the airplanes when a box is added.""" def __init__(self, obj): self.obj = obj def __str__(self): return "{0} max boxes exceeded!".format(self.obj) class BoxAlreadyAssigned(Error): """Exception raised for errors in the assignment of boxes.""" def __init__(self, name): self.name = name def __str__(self): return "{0} already assigned or not exist!".format(self.name) class AirplaneNotExist(Error): """Exception raised for errors in the assignment of airplanes.""" def __init__(self, name): self.name = name def __str__(self): return "{0} not exist!".format(self.name) class AirplaneAlreadyAssigned(Error): """Exception raised for errors in the assignment of airplanes.""" def __init__(self, name): self.name = name def __str__(self): return "{0} already assigned or not exist!".format(self.name) class LinkNotExist(Error): """Exception raised when two airport are not linked.""" def __init__(self, from_, to_): self.from_ = from_ self.to_ = to_ def __str__(self): return "Link from {0} to {1} not exist!".format(self.from_, self.to_)
MircoT/AI-Project-PlannerEnvironment
agents_dir/errorObjs.py
Python
mit
1,779
# -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function, unicode_literals import django_countries.fields from django.conf import settings from django.db import migrations, models class Migration(migrations.Migration): dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)] operations = [ migrations.CreateModel( name="CountryAlias", fields=[ ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), ( "is_active", models.BooleanField( default=True, help_text="Whether this item is active, use this instead of deleting" ), ), ( "created_on", models.DateTimeField(help_text="When this item was originally created", auto_now_add=True), ), ("modified_on", models.DateTimeField(help_text="When this item was last modified", auto_now=True)), ("country", django_countries.fields.CountryField(max_length=2)), ("name", models.CharField(help_text="The name for our alias", max_length=128)), ( "created_by", models.ForeignKey( related_name="countries_countryalias_creations", on_delete=models.PROTECT, to=settings.AUTH_USER_MODEL, help_text="The user which originally created this item", ), ), ( "modified_by", models.ForeignKey( related_name="countries_countryalias_modifications", on_delete=models.PROTECT, to=settings.AUTH_USER_MODEL, help_text="The user which last modified this item", ), ), ], options={"abstract": False}, bases=(models.Model,), ) ]
rapidpro/ureport
ureport/countries/migrations/0001_initial.py
Python
agpl-3.0
2,161
''' python src/train.py \ --train_dir=/raid/pengchong_data/tfmodel_test/ \ --dataset_dir=/raid/pengchong_data/Data/VOC/VOCdevkit/TFRecords/2007 \ --max_number_of_steps=100 \ --batch_size=2 ''' import os import tensorflow as tf from datasets import dataset_factory from nets import nets_factory, yolo_v2 from preprocessing import yolo_v2_preprocessing slim = tf.contrib.slim tf.app.flags.DEFINE_string( 'master', '', 'The address of the TensorFlow master to use.') tf.app.flags.DEFINE_string( 'train_dir', '/tmp/tfmodel/', 'Directory where checkpoints and event logs are written to.') tf.app.flags.DEFINE_integer('num_clones', 1, 'Number of model clones to deploy.') tf.app.flags.DEFINE_boolean('clone_on_cpu', False, 'Use CPUs to deploy clones.') tf.app.flags.DEFINE_integer('worker_replicas', 1, 'Number of worker replicas.') tf.app.flags.DEFINE_integer( 'num_ps_tasks', 0, 'The number of parameter servers. If the value is 0, then the parameters ' 'are handled locally by the worker.') tf.app.flags.DEFINE_integer( 'num_readers', 4, 'The number of parallel readers that read data from the dataset.') tf.app.flags.DEFINE_integer( 'num_preprocessing_threads', 4, 'The number of threads used to create the batches.') tf.app.flags.DEFINE_integer( 'log_every_n_steps', 10, 'The frequency with which logs are print.') tf.app.flags.DEFINE_integer( 'save_summaries_secs', 60, 'The frequency with which summaries are saved, in seconds.') tf.app.flags.DEFINE_integer( 'save_interval_secs', 600, 'The frequency with which the model is saved, in seconds.') tf.app.flags.DEFINE_integer( 'task', 0, 'Task id of the replica running the training.') ###################### # Optimization Flags # ###################### tf.app.flags.DEFINE_float( 'weight_decay', 0.0005, 'The weight decay on the model weights.') tf.app.flags.DEFINE_string( 'optimizer', 'rmsprop', 'The name of the optimizer, one of "adadelta", "adagrad", "adam",' '"ftrl", "momentum", "sgd" or "rmsprop".') tf.app.flags.DEFINE_float( 'adadelta_rho', 0.95, 'The decay rate for adadelta.') tf.app.flags.DEFINE_float( 'adagrad_initial_accumulator_value', 0.1, 'Starting value for the AdaGrad accumulators.') tf.app.flags.DEFINE_float( 'adam_beta1', 0.9, 'The exponential decay rate for the 1st moment estimates.') tf.app.flags.DEFINE_float( 'adam_beta2', 0.999, 'The exponential decay rate for the 2nd moment estimates.') tf.app.flags.DEFINE_float('opt_epsilon', 1.0, 'Epsilon term for the optimizer.') tf.app.flags.DEFINE_float('ftrl_learning_rate_power', -0.5, 'The learning rate power.') tf.app.flags.DEFINE_float( 'ftrl_initial_accumulator_value', 0.1, 'Starting value for the FTRL accumulators.') tf.app.flags.DEFINE_float( 'ftrl_l1', 0.0, 'The FTRL l1 regularization strength.') tf.app.flags.DEFINE_float( 'ftrl_l2', 0.0, 'The FTRL l2 regularization strength.') tf.app.flags.DEFINE_float( 'momentum', 0.9, 'The momentum for the MomentumOptimizer and RMSPropOptimizer.') tf.app.flags.DEFINE_float('rmsprop_decay', 0.9, 'Decay term for RMSProp.') ####################### # Learning Rate Flags # ####################### tf.app.flags.DEFINE_string( 'learning_rate_decay_type', 'exponential', 'Specifies how the learning rate is decayed. One of "fixed", "exponential",' ' or "polynomial"') tf.app.flags.DEFINE_float('learning_rate', 0.01, 'Initial learning rate.') tf.app.flags.DEFINE_float( 'end_learning_rate', 0.0001, 'The minimal end learning rate used by a polynomial decay learning rate.') tf.app.flags.DEFINE_float( 'label_smoothing', 0.0, 'The amount of label smoothing.') tf.app.flags.DEFINE_float( 'learning_rate_decay_factor', 0.94, 'Learning rate decay factor.') tf.app.flags.DEFINE_float( 'num_epochs_per_decay', 2.0, 'Number of epochs after which learning rate decays.') tf.app.flags.DEFINE_bool( 'sync_replicas', False, 'Whether or not to synchronize the replicas during training.') tf.app.flags.DEFINE_integer( 'replicas_to_aggregate', 1, 'The Number of gradients to collect before updating params.') tf.app.flags.DEFINE_float( 'moving_average_decay', None, 'The decay to use for the moving average.' 'If left as None, then moving averages are not used.') ####################### # Dataset Flags # ####################### tf.app.flags.DEFINE_string( 'dataset_name', 'voc_2007', 'The name of the dataset to load.') tf.app.flags.DEFINE_string( 'dataset_split_name', 'train', 'The name of the train/test split.') tf.app.flags.DEFINE_string( 'dataset_dir', None, 'The directory where the dataset files are stored.') tf.app.flags.DEFINE_integer( 'labels_offset', 0, 'An offset for the labels in the dataset. This flag is primarily used to ' 'evaluate the VGG and ResNet architectures which do not use a background ' 'class for the ImageNet dataset.') tf.app.flags.DEFINE_string( 'model_name', 'yolo_v2', 'The name of the architecture to train.') tf.app.flags.DEFINE_string( 'preprocessing_name', None, 'The name of the preprocessing to use. If left ' 'as `None`, then the model_name flag is used.') tf.app.flags.DEFINE_integer( 'batch_size', 32, 'The number of samples in each batch.') tf.app.flags.DEFINE_integer( 'num_classes', 20, 'The number of classes.') tf.app.flags.DEFINE_integer( 'train_image_size', (416, 416), 'Train image size') tf.app.flags.DEFINE_integer('max_number_of_steps', 10000, 'The maximum number of training steps.') ##################### # Fine-Tuning Flags # ##################### tf.app.flags.DEFINE_string( 'checkpoint_path', None, 'The path to a checkpoint from which to fine-tune.') tf.app.flags.DEFINE_string( 'checkpoint_exclude_scopes', None, 'Comma-separated list of scopes of variables to exclude when restoring ' 'from a checkpoint.') tf.app.flags.DEFINE_string( 'trainable_scopes', None, 'Comma-separated list of scopes to filter the set of variables to train.' 'By default, None would train all the variables.') tf.app.flags.DEFINE_boolean( 'ignore_missing_vars', False, 'When restoring a checkpoint would ignore missing variables.') FLAGS = tf.app.flags.FLAGS def inference_sequential(image_batch): network_fn = nets_factory.get_network_fn( name=FLAGS.model_name, num_classes=FLAGS.num_classes, is_training=True, weight_decay=FLAGS.weight_decay, num_anchors=5) net, end_points = network_fn(image_batch) box_coordinate, box_confidence, box_class_probs = yolo_v2.yolo_v2_head(net, FLAGS.num_classes, [[1, 2], [1, 3], [2, 1], [3, 1], [1, 1]], True) # preds = tf.reduce_max(box_class_probs, 4) # preds = tf.one_hot(tf.cast(preds, tf.int32), FLAGS.num_classes) # return preds return box_coordinate, box_confidence, box_class_probs # =========================================================================== # # Main training routine. # =========================================================================== # def main(_): with tf.Graph().as_default(): summaries = set(tf.get_collection(tf.GraphKeys.SUMMARIES)) global_step = tf.train.create_global_step() # Select the dataset. dataset = dataset_factory.get_dataset( FLAGS.dataset_name, FLAGS.dataset_split_name, FLAGS.dataset_dir) max_box_num_per_image = dataset_factory.get_box_num_per_image(FLAGS.dataset_name, FLAGS.dataset_split_name) provider = slim.dataset_data_provider.DatasetDataProvider( dataset, num_readers=FLAGS.num_readers, common_queue_capacity=20 * FLAGS.batch_size, common_queue_min=10 * FLAGS.batch_size) # Get input for network: image, labels, bboxes. [image, glabels, gbboxes, box_num] = provider.get(['image', 'object/label', 'object/bbox', 'box_num']) train_image_size = FLAGS.train_image_size image, gbboxes = yolo_v2_preprocessing.preprocess_data(image, glabels, gbboxes, train_image_size, max_box_num_per_image) image_batch, gbboxes_batch = tf.train.batch( [image, gbboxes], batch_size=FLAGS.batch_size, num_threads=FLAGS.num_preprocessing_threads, capacity=5 * FLAGS.batch_size) batch_queue = slim.prefetch_queue.prefetch_queue( [image_batch, gbboxes_batch], capacity=2) image_batch, gbboxes_batch = batch_queue.dequeue() summaries.add(tf.summary.image('batch image', image_batch)) print(gbboxes_batch) box_coordinate, box_confidence, box_class_probs = inference_sequential(image_batch) total_loss, confidence_loss, coordinate_loss, category_loss, xy_loss, wh_loss, objects_loss, no_objects_loss = yolo_v2.yolo_v2_loss( box_coordinate, box_confidence, box_class_probs, [[1, 2], [1, 3], [2, 1],[3, 1], [1, 1]], gbboxes_batch, num_classes=FLAGS.num_classes) summaries.add(tf.summary.scalar('loss_total', total_loss)) summaries.add(tf.summary.scalar('loss_confidence', confidence_loss)) summaries.add(tf.summary.scalar('loss_confidence_object', objects_loss)) summaries.add(tf.summary.scalar('loss_confidence_no_object', no_objects_loss)) summaries.add(tf.summary.scalar('loss_coordinate', coordinate_loss)) summaries.add(tf.summary.scalar('loss_coordinate_xy', xy_loss)) summaries.add(tf.summary.scalar('loss_coordinate_wh', wh_loss)) summaries.add(tf.summary.scalar('loss_category', category_loss)) # optimizer = tf.train.GradientDescentOptimizer(0.01) optimizer = tf.train.AdamOptimizer( learning_rate=FLAGS.learning_rate, beta1=FLAGS.adam_beta1, beta2=FLAGS.adam_beta2, epsilon=FLAGS.opt_epsilon) train_op = slim.learning.create_train_op(total_loss, optimizer) summaries |= set(tf.get_collection(tf.GraphKeys.SUMMARIES)) summary_op = tf.summary.merge(list(summaries), name='summary_op') sess_config = tf.ConfigProto() sess_config.gpu_options.allow_growth = True final_loss = slim.learning.train(train_op, logdir=FLAGS.train_dir, summary_op=summary_op, global_step=global_step, number_of_steps=FLAGS.max_number_of_steps, log_every_n_steps=FLAGS.log_every_n_steps, save_summaries_secs=FLAGS.save_summaries_secs, save_interval_secs=FLAGS.save_interval_secs, session_config=sess_config) print('Finished training. Last batch loss %f' % final_loss) if __name__ == '__main__': os.environ['CUDA_VISIBLE_DEVICES'] = '0' tf.app.run()
PaulChongPeng/YOLO2TensorFlow
src/train.py
Python
apache-2.0
11,468
""" A Cobbler Profile. A profile is a reference to a distribution, possibly some kernel options, possibly some Virt options, and some kickstart data. Copyright 2006-2008, Red Hat, Inc Michael DeHaan <mdehaan@redhat.com> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA """ import utils import item import time from cexceptions import * from utils import _ class Profile(item.Item): TYPE_NAME = _("profile") COLLECTION_TYPE = "profile" def make_clone(self): ds = self.to_datastruct() cloned = Profile(self.config) cloned.from_datastruct(ds) return cloned def clear(self,is_subobject=False): """ Reset this object. """ self.name = None self.uid = "" self.random_id = "" self.owners = self.settings.default_ownership self.distro = (None, '<<inherit>>')[is_subobject] self.enable_menu = (self.settings.enable_menu, '<<inherit>>')[is_subobject] self.kickstart = (self.settings.default_kickstart , '<<inherit>>')[is_subobject] self.kernel_options = ({}, '<<inherit>>')[is_subobject] self.kernel_options_post = ({}, '<<inherit>>')[is_subobject] self.ks_meta = ({}, '<<inherit>>')[is_subobject] self.template_files = ({}, '<<inherit>>')[is_subobject] self.virt_cpus = (1, '<<inherit>>')[is_subobject] self.virt_file_size = (self.settings.default_virt_file_size, '<<inherit>>')[is_subobject] self.virt_ram = (self.settings.default_virt_ram, '<<inherit>>')[is_subobject] self.repos = ([], '<<inherit>>')[is_subobject] self.depth = 1 self.virt_type = (self.settings.default_virt_type, '<<inherit>>')[is_subobject] self.virt_path = ("", '<<inherit>>')[is_subobject] self.virt_bridge = (self.settings.default_virt_bridge, '<<inherit>>')[is_subobject] self.dhcp_tag = ("default", '<<inherit>>')[is_subobject] self.mgmt_classes = ([], '<<inherit>>')[is_subobject] self.parent = '' self.server = "<<inherit>>" self.comment = "" self.ctime = 0 self.mtime = 0 self.name_servers = (self.settings.default_name_servers, '<<inherit>>')[is_subobject] self.redhat_management_key = "<<inherit>>" def from_datastruct(self,seed_data): """ Load this object's properties based on seed_data """ self.parent = self.load_item(seed_data,'parent','') self.name = self.load_item(seed_data,'name') self.owners = self.load_item(seed_data,'owners',self.settings.default_ownership) self.distro = self.load_item(seed_data,'distro') self.enable_menu = self.load_item(seed_data,'enable_menu', self.settings.enable_menu) self.kickstart = self.load_item(seed_data,'kickstart') self.kernel_options = self.load_item(seed_data,'kernel_options') self.kernel_options_post = self.load_item(seed_data,'kernel_options_post') self.ks_meta = self.load_item(seed_data,'ks_meta') self.template_files = self.load_item(seed_data,'template_files', {}) self.repos = self.load_item(seed_data,'repos', []) self.depth = self.load_item(seed_data,'depth', 1) self.dhcp_tag = self.load_item(seed_data,'dhcp_tag', 'default') self.server = self.load_item(seed_data,'server', '<<inherit>>') self.mgmt_classes = self.load_item(seed_data,'mgmt_classes', []) self.comment = self.load_item(seed_data,'comment','') self.ctime = self.load_item(seed_data,'ctime',0) self.mtime = self.load_item(seed_data,'mtime',0) self.name_servers = self.load_item(seed_data,'name_servers',[]) self.redhat_management_key = self.load_item(seed_data,'redhat_management_key', '<<inherit>>') # backwards compatibility if type(self.repos) != list: # ensure we are formatted correctly though if some repo # defs don't exist on this side, don't fail as we need # to convert everything -- cobbler check can report it self.set_repos(self.repos,bypass_check=True) self.set_parent(self.parent) # virt specific self.virt_ram = self.load_item(seed_data,'virt_ram',self.settings.default_virt_ram) self.virt_file_size = self.load_item(seed_data,'virt_file_size',self.settings.default_virt_file_size) self.virt_path = self.load_item(seed_data,'virt_path') self.virt_type = self.load_item(seed_data,'virt_type', self.settings.default_virt_type) self.virt_bridge = self.load_item(seed_data,'virt_bridge', self.settings.default_virt_bridge) self.virt_cpus = self.load_item(seed_data,'virt_cpus',1) # backwards compatibility -- convert string entries to dicts for storage if self.kernel_options != "<<inherit>>" and type(self.kernel_options) != dict: self.set_kernel_options(self.kernel_options) if self.kernel_options_post != "<<inherit>>" and type(self.kernel_options_post) != dict: self.set_kernel_options_post(self.kernel_options_post) if self.ks_meta != "<<inherit>>" and type(self.ks_meta) != dict: self.set_ksmeta(self.ks_meta) if self.repos != "<<inherit>>" and type(self.ks_meta) != list: self.set_repos(self.repos,bypass_check=True) self.set_enable_menu(self.enable_menu) self.set_owners(self.owners) self.set_mgmt_classes(self.mgmt_classes) self.set_template_files(self.template_files) self.uid = self.load_item(seed_data,'uid','') if self.uid == '': self.uid = self.config.generate_uid() self.random_id = self.load_item(seed_data,'random_id','') if self.random_id == '' or len(self.random_id) != 4: self.random_id = self.config.generate_random_id(4) return self def set_parent(self,parent_name): """ Instead of a --distro, set the parent of this object to another profile and use the values from the parent instead of this one where the values for this profile aren't filled in, and blend them together where they are hashes. Basically this enables profile inheritance. To use this, the object MUST have been constructed with is_subobject=True or the default values for everything will be screwed up and this will likely NOT work. So, API users -- make sure you pass is_subobject=True into the constructor when using this. """ if parent_name is None or parent_name == '': self.parent = '' return True if parent_name == self.name: # check must be done in two places as set_parent could be called before/after # set_name... raise CX(_("self parentage is weird")) found = self.config.profiles().find(name=parent_name) if found is None: raise CX(_("profile %s not found, inheritance not possible") % parent_name) self.parent = parent_name self.depth = found.depth + 1 return True def set_distro(self,distro_name): """ Sets the distro. This must be the name of an existing Distro object in the Distros collection. """ d = self.config.distros().find(name=distro_name) if d is not None: self.distro = distro_name self.depth = d.depth +1 # reset depth if previously a subprofile and now top-level return True raise CX(_("distribution not found")) def set_redhat_management_key(self,key): return utils.set_redhat_management_key(self,key) def set_name_servers(self,data): data = utils.input_string_or_list(data) self.name_servers = data return True def set_enable_menu(self,enable_menu): """ Sets whether or not the profile will be listed in the default PXE boot menu. This is pretty forgiving for YAML's sake. """ self.enable_menu = utils.input_boolean(enable_menu) return True def set_dhcp_tag(self,dhcp_tag): if dhcp_tag is None: dhcp_tag = "" self.dhcp_tag = dhcp_tag return True def set_server(self,server): if server is None or server == "": server = "<inherit>" self.server = server return True def set_kickstart(self,kickstart): """ Sets the kickstart. This must be a NFS, HTTP, or FTP URL. Or filesystem path. Minor checking of the URL is performed here. """ if kickstart == "" or kickstart is None: self.kickstart = "" return True if kickstart == "<<inherit>>": self.kickstart = kickstart return True if utils.find_kickstart(kickstart): self.kickstart = kickstart return True raise CX(_("kickstart not found: %s") % kickstart) def set_virt_cpus(self,num): return utils.set_virt_cpus(self,num) def set_virt_file_size(self,num): return utils.set_virt_file_size(self,num) def set_virt_ram(self,num): return utils.set_virt_ram(self,num) def set_virt_type(self,vtype): return utils.set_virt_type(self,vtype) def set_virt_bridge(self,vbridge): return utils.set_virt_bridge(self,vbridge) def set_virt_path(self,path): return utils.set_virt_path(self,path) def set_repos(self,repos,bypass_check=False): return utils.set_repos(self,repos,bypass_check) def get_parent(self): """ Return object next highest up the tree. """ if self.parent is None or self.parent == '': result = self.config.distros().find(name=self.distro) else: result = self.config.profiles().find(name=self.parent) return result def is_valid(self): """ A profile only needs a name and a distro. Kickstart info, as well as Virt info, are optional. (Though I would say provisioning without a kickstart is *usually* not a good idea). """ if self.parent is None or self.parent == '': # all values must be filled in if not inheriting from another profile if self.name is None: raise CX(_("no name specified")) if self.distro is None: raise CX(_("no distro specified")) else: # if inheriting, specifying distro is not allowed, and # name is required, but there are no other rules. if self.name is None: raise CX(_("no name specified")) if self.distro != "<<inherit>>": raise CX(_("cannot override distro when inheriting a profile")) return True def to_datastruct(self): """ Return hash representation for the serializer """ return { 'name' : self.name, 'owners' : self.owners, 'distro' : self.distro, 'enable_menu' : self.enable_menu, 'kickstart' : self.kickstart, 'kernel_options' : self.kernel_options, 'kernel_options_post' : self.kernel_options_post, 'virt_file_size' : self.virt_file_size, 'virt_ram' : self.virt_ram, 'virt_bridge' : self.virt_bridge, 'virt_cpus' : self.virt_cpus, 'ks_meta' : self.ks_meta, 'template_files' : self.template_files, 'repos' : self.repos, 'parent' : self.parent, 'depth' : self.depth, 'virt_type' : self.virt_type, 'virt_path' : self.virt_path, 'dhcp_tag' : self.dhcp_tag, 'server' : self.server, 'mgmt_classes' : self.mgmt_classes, 'comment' : self.comment, 'ctime' : self.ctime, 'mtime' : self.mtime, 'name_servers' : self.name_servers, 'uid' : self.uid, 'random_id' : self.random_id, 'redhat_management_key' : self.redhat_management_key } def printable(self): """ A human readable representaton """ buf = _("profile : %s\n") % self.name if self.distro == "<<inherit>>": buf = buf + _("parent : %s\n") % self.parent else: buf = buf + _("distro : %s\n") % self.distro buf = buf + _("comment : %s\n") % self.comment buf = buf + _("created : %s\n") % time.ctime(self.ctime) buf = buf + _("dhcp tag : %s\n") % self.dhcp_tag buf = buf + _("enable menu : %s\n") % self.enable_menu buf = buf + _("kernel options : %s\n") % self.kernel_options buf = buf + _("kickstart : %s\n") % self.kickstart buf = buf + _("ks metadata : %s\n") % self.ks_meta buf = buf + _("mgmt classes : %s\n") % self.mgmt_classes buf = buf + _("modified : %s\n") % time.ctime(self.mtime) buf = buf + _("name servers : %s\n") % self.name_servers buf = buf + _("owners : %s\n") % self.owners buf = buf + _("post kernel options : %s\n") % self.kernel_options_post buf = buf + _("redhat mgmt key : %s\n") % self.redhat_management_key buf = buf + _("repos : %s\n") % self.repos buf = buf + _("server : %s\n") % self.server buf = buf + _("template_files : %s\n") % self.template_files buf = buf + _("virt bridge : %s\n") % self.virt_bridge buf = buf + _("virt cpus : %s\n") % self.virt_cpus buf = buf + _("virt file size : %s\n") % self.virt_file_size buf = buf + _("virt path : %s\n") % self.virt_path buf = buf + _("virt ram : %s\n") % self.virt_ram buf = buf + _("virt type : %s\n") % self.virt_type return buf def remote_methods(self): return { 'name' : self.set_name, 'parent' : self.set_parent, 'profile' : self.set_name, 'distro' : self.set_distro, 'enable-menu' : self.set_enable_menu, 'enable_menu' : self.set_enable_menu, 'kickstart' : self.set_kickstart, 'kopts' : self.set_kernel_options, 'kopts-post' : self.set_kernel_options_post, 'kopts_post' : self.set_kernel_options_post, 'virt-file-size' : self.set_virt_file_size, 'virt_file_size' : self.set_virt_file_size, 'virt-ram' : self.set_virt_ram, 'virt_ram' : self.set_virt_ram, 'ksmeta' : self.set_ksmeta, 'template-files' : self.set_template_files, 'template_files' : self.set_template_files, 'repos' : self.set_repos, 'virt-path' : self.set_virt_path, 'virt_path' : self.set_virt_path, 'virt-type' : self.set_virt_type, 'virt_type' : self.set_virt_type, 'virt-bridge' : self.set_virt_bridge, 'virt_bridge' : self.set_virt_bridge, 'virt-cpus' : self.set_virt_cpus, 'virt_cpus' : self.set_virt_cpus, 'dhcp-tag' : self.set_dhcp_tag, 'dhcp_tag' : self.set_dhcp_tag, 'server' : self.set_server, 'owners' : self.set_owners, 'mgmt-classes' : self.set_mgmt_classes, 'mgmt_classes' : self.set_mgmt_classes, 'comment' : self.set_comment, 'name_servers' : self.set_name_servers, 'redhat_management_key' : self.set_redhat_management_key }
rubenk/cobbler
cobbler/item_profile.py
Python
gpl-2.0
18,189
def encrypt(data, key): data = data.upper() result = "" for char in data: char_possition = ord(char) char_new_possition = char_possition + key if char_new_possition > 90: char_new_possition -= 26 result += chr(char_new_possition) return result def decrypt(data, key): data = data.upper() result = "" for char in data: char_possition = ord(char) char_new_possition = char_possition - key if char_new_possition < 65: char_new_possition += 26 result += chr(char_new_possition) return result def crack(data, alphabet_size): results = [] for key in range(alphabet_size): results.append(decrypt(data, key)) return results
wilima/cryptography
cryptography/ciphers/shift.py
Python
mit
770
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function import unittest import paddle.fluid as fluid import paddle import numpy as np class TestDataBalance(unittest.TestCase): def prepare_data(self): def fake_data_generator(): for n in range(self.total_ins_num): yield np.ones((3, 4)) * n, n # Prepare data with fluid.program_guard(fluid.Program(), fluid.Program()): reader = paddle.batch( fake_data_generator, batch_size=self.batch_size) feeder = fluid.DataFeeder( feed_list=[ fluid.layers.data( name='image', shape=[3, 4], dtype='float32'), fluid.layers.data( name='label', shape=[1], dtype='int64'), ], place=fluid.CPUPlace()) self.num_batches = fluid.recordio_writer.convert_reader_to_recordio_file( self.data_file_name, reader, feeder) def prepare_lod_data(self): def fake_data_generator(): for n in range(1, self.total_ins_num + 1): d1 = (np.ones((n, 3)) * n).astype('float32') d2 = (np.array(n).reshape((1, 1))).astype('int32') yield d1, d2 # Prepare lod data with fluid.program_guard(fluid.Program(), fluid.Program()): with fluid.recordio_writer.create_recordio_writer( filename=self.lod_data_file_name) as writer: eof = False generator = fake_data_generator() while (not eof): data_batch = [ np.array([]).reshape((0, 3)), np.array([]).reshape( (0, 1)) ] lod = [0] for _ in range(self.batch_size): try: ins = next(generator) except StopIteration: eof = True break for i, d in enumerate(ins): data_batch[i] = np.concatenate( (data_batch[i], d), axis=0) lod.append(lod[-1] + ins[0].shape[0]) if data_batch[0].shape[0] > 0: for i, d in enumerate(data_batch): t = fluid.LoDTensor() t.set(data_batch[i], fluid.CPUPlace()) if i == 0: t.set_lod([lod]) writer.append_tensor(t) writer.complete_append_tensor() def setUp(self): self.use_cuda = fluid.core.is_compiled_with_cuda() self.data_file_name = './data_balance_test.recordio' self.lod_data_file_name = './data_balance_with_lod_test.recordio' self.total_ins_num = 50 self.batch_size = 10 self.prepare_data() self.prepare_lod_data() def main(self): main_prog = fluid.Program() startup_prog = fluid.Program() with fluid.program_guard(main_prog, startup_prog): data_reader = fluid.layers.io.open_files( filenames=[self.data_file_name], shapes=[[-1, 3, 4], [-1, 1]], lod_levels=[0, 0], dtypes=['float32', 'int64']) if self.use_cuda: data_reader = fluid.layers.double_buffer(data_reader) image, label = fluid.layers.read_file(data_reader) place = fluid.CUDAPlace(0) if self.use_cuda else fluid.CPUPlace() exe = fluid.Executor(place) exe.run(startup_prog) build_strategy = fluid.BuildStrategy() build_strategy.enable_data_balance = True parallel_exe = fluid.ParallelExecutor( use_cuda=self.use_cuda, main_program=main_prog, build_strategy=build_strategy) if (parallel_exe.device_count > self.batch_size): print("WARNING: Unittest TestDataBalance skipped. \ For the result is not correct when device count \ is larger than batch size.") exit(0) fetch_list = [image.name, label.name] data_appeared = [False] * self.total_ins_num while (True): try: image_val, label_val = parallel_exe.run(fetch_list, return_numpy=True) except fluid.core.EOFException: break ins_num = image_val.shape[0] broadcasted_label = np.ones( (ins_num, 3, 4)) * label_val.reshape((ins_num, 1, 1)) self.assertEqual(image_val.all(), broadcasted_label.all()) for l in label_val: self.assertFalse(data_appeared[l[0]]) data_appeared[l[0]] = True for i in data_appeared: self.assertTrue(i) def main_lod(self): main_prog = fluid.Program() startup_prog = fluid.Program() with fluid.program_guard(main_prog, startup_prog): data_reader = fluid.layers.io.open_files( filenames=[self.lod_data_file_name], shapes=[[-1, 3], [-1, 1]], lod_levels=[1, 0], dtypes=['float32', 'int32']) ins, label = fluid.layers.read_file(data_reader) place = fluid.CUDAPlace(0) if self.use_cuda else fluid.CPUPlace() exe = fluid.Executor(place) exe.run(startup_prog) build_strategy = fluid.BuildStrategy() build_strategy.enable_data_balance = True parallel_exe = fluid.ParallelExecutor( use_cuda=self.use_cuda, main_program=main_prog, build_strategy=build_strategy) if parallel_exe.device_count > self.batch_size: print("WARNING: Unittest TestDataBalance skipped. \ For the result is not correct when device count \ is larger than batch size.") exit(0) fetch_list = [ins.name, label.name] data_appeared = [False] * self.total_ins_num while (True): try: ins_tensor, label_tensor = parallel_exe.run( fetch_list, return_numpy=False) except fluid.core.EOFException: break ins_val = np.array(ins_tensor) label_val = np.array(label_tensor) ins_lod = ins_tensor.lod()[0] self.assertEqual(ins_val.shape[1], 3) self.assertEqual(label_val.shape[1], 1) self.assertEqual(len(ins_lod) - 1, label_val.shape[0]) for i in range(0, len(ins_lod) - 1): ins_elem = ins_val[ins_lod[i]:ins_lod[i + 1]][:] label_elem = label_val[i][0] self.assertEqual(ins_elem.all(), label_elem.all()) self.assertFalse(data_appeared[int(label_elem - 1)]) data_appeared[int(label_elem - 1)] = True for i in data_appeared: self.assertTrue(i) def test_all(self): self.main() self.main_lod() if __name__ == '__main__': unittest.main()
QiJune/Paddle
python/paddle/fluid/tests/unittests/test_data_balance.py
Python
apache-2.0
8,145
import datetime import boto.ec2 import boto.ec2.cloudwatch import boto.ec2.autoscale import boto.ses from boto.ec2.autoscale import LaunchConfiguration, AutoScalingGroup from boto.ec2.autoscale.tag import Tag import boto.utils from juliabox.plugins.compute_ec2 import CompEC2 from juliabox.jbox_util import LoggerMixin class Cluster(LoggerMixin): @staticmethod def get_spot_price(inst_type, minutes=60): conn = Cluster._ec2() end = datetime.datetime.utcnow() start = end - datetime.timedelta(minutes=minutes) next_token = None avzone_pricevals = {} avzone_pricestats = {} def median(lst): lst = sorted(lst) if len(lst) < 1: return None if len(lst) %2 == 1: return lst[((len(lst)+1)/2)-1] else: return float(sum(lst[(len(lst)/2)-1:(len(lst)/2)+1]))/2.0 def add_price(az, price): if az in avzone_pricevals: pricevals = avzone_pricevals[az] else: avzone_pricevals[az] = pricevals = [] pricevals.append(price) while True: prices = conn.get_spot_price_history(instance_type=inst_type, start_time=start.isoformat(), end_time=end.isoformat(), next_token=next_token) for p in prices: add_price(p.availability_zone, p.price) next_token = prices.next_token if (next_token is None) or (len(next_token) == 0): break for avzone, prices in avzone_pricevals.iteritems(): avzone_pricestats[avzone] = { 'count': len(prices), 'min': min(prices), 'avg': sum(prices)/float(len(prices)), 'median': median(prices), 'max': max(prices) } return avzone_pricestats @staticmethod def terminate_by_placement_group(gname): conn = Cluster._ec2() instances = conn.get_only_instances(filters={"placement-group-name": gname, "instance-state-name": "running"}) conn.terminate_instances(instance_ids=[i.id for i in instances]) @staticmethod def get_placement_group(gname): existing = Cluster.get_placement_groups(gname) return existing if (existing is None) else existing[0] @staticmethod def get_placement_groups(gname=None): conn = Cluster._ec2() try: existing = conn.get_all_placement_groups(gname) except boto.exception.EC2ResponseError as ex: #print("\t%s" % (repr(ex),)) return None if len(existing) == 0: return None return existing @staticmethod def create_placement_group(gname): if Cluster.get_placement_group(gname) is None: conn = Cluster._ec2() return conn.create_placement_group(gname, strategy='cluster') return True @staticmethod def delete_placement_group(gname): pgrp = Cluster.get_placement_group(gname) if pgrp is not None: pgrp.delete() Cluster.log_info("Deleted placement group %s", gname) else: Cluster.log_info("Placement group %s does not exist", gname) @staticmethod def get_launch_config(lconfig_name): auto_scale_conn = Cluster._autoscale() configs = auto_scale_conn.get_all_launch_configurations(names=[lconfig_name]) if len(configs) > 0: return configs[0] return None @staticmethod def create_launch_config(lconfig_name, image_id, inst_type, key_name, security_groups, spot_price=0, user_data_file=None, user_data=None, block_dev_mappings=None, ebs_optimized=False, overwrite=False): existing_config = Cluster.get_launch_config(lconfig_name) if existing_config is not None: if overwrite: existing_config.delete() Cluster.log_info("Deleted launch config %s to overwrite new config", lconfig_name) else: Cluster.log_error("Launch config %s already exists.", lconfig_name) raise Exception("Launch configuration already exists") auto_scale_conn = Cluster._autoscale() if user_data is None: if user_data_file is not None: with open(user_data_file, 'r') as udf: user_data = udf.read() lconfig = LaunchConfiguration() lconfig.instance_type = inst_type lconfig.name = lconfig_name lconfig.image_id = image_id lconfig.key_name = key_name lconfig.security_groups = security_groups lconfig.user_data = user_data if spot_price > 0: lconfig.spot_price = spot_price if block_dev_mappings is not None: lconfig.block_device_mappings = block_dev_mappings if ebs_optimized: lconfig.ebs_optimized = True auto_scale_conn.create_launch_configuration(lconfig) Cluster.log_info("Created launch configuration %s", lconfig.name) @staticmethod def delete_launch_config(lconfig_name): existing_config = Cluster.get_launch_config(lconfig_name) if existing_config is not None: existing_config.delete() Cluster.log_info("Deleted launch config %s", lconfig_name) else: Cluster.log_info("Launch config %s does not exist", lconfig_name) @staticmethod def create_autoscale_group(gname, lconfig_name, placement_group, size, zones=None): existing_group = CompEC2._get_autoscale_group(gname) if existing_group is not None: Cluster.log_error("Autoscale group %s already exists!", gname) return None tags = [Tag(key='Name', value=gname, propagate_at_launch=True, resource_id=gname)] if zones is None: zones = [x.name for x in Cluster._ec2().get_all_zones()] Cluster.log_info("zones: %r", zones) ag = AutoScalingGroup(group_name=gname, availability_zones=zones, launch_config=lconfig_name, placement_group=placement_group, tags=tags, desired_capacity=0, min_size=0, max_size=size) conn = Cluster._autoscale() return conn.create_auto_scaling_group(ag) @staticmethod def delete_autoscale_group(gname, force=False): existing_group = CompEC2._get_autoscale_group(gname) if existing_group is not None: existing_group.delete(force_delete=force) Cluster.log_error("Autoscale group %s deleted (forced=%r)", gname, force) else: Cluster.log_info("Autoscale group %s does not exist", gname) return None # @staticmethod # def launch_into_placement_group(gname, ami_name, key, inst_type, num_inst, sec_grp, spot_price=None): # conn = CloudHost.connect_ec2() # # ami = CloudHost.get_image(ami_name) # if ami is None: # CloudHost.log_error("Image with name %s not found.", ami_name) # return None # # ami_id = ami.id # # if spot_price is None: # resev = conn.run_instances(ami_id, min_count=num_inst, max_count=num_inst, # key_name=key, instance_type=inst_type, security_groups=[sec_grp], # placement=CloudHost.REGION, placement_group=gname) # else: # resev = conn.request_spot_instances(spot_price, ami_id, count=num_inst, # launch_group=gname, # key_name=key, instance_type=inst_type, security_groups=[sec_grp], # placement=CloudHost.REGION, placement_group=gname) # return resev.id # # # @staticmethod # # def get_spot_request(gname): # # conn = CloudHost.connect_ec2() # # conn.get_all_spot_instance_requests() # # @staticmethod # def wait_for_placement_group(gname, num_inst): # if Cluster.get_placement_group(gname) is None: # return False, -1 # count = len(CloudHost.get_public_addresses_by_placement_group(gname)) # return (num_inst == count), count # @staticmethod # def get_public_hostnames_by_tag(tag, value): # conn = CompEC2._connect_ec2() # instances = conn.get_only_instances(filters={"tag:"+tag: value, "instance-state-name": "running"}) # return [i.public_dns_name for i in instances] # # @staticmethod # def get_private_hostnames_by_tag(tag, value): # conn = CompEC2._connect_ec2() # instances = conn.get_only_instances(filters={"tag:"+tag: value, "instance-state-name": "running"}) # return [i.private_dns_name for i in instances] @staticmethod def get_public_hostnames_by_placement_group(gname): conn = Cluster._ec2() instances = conn.get_only_instances(filters={"placement-group-name": gname, "instance-state-name": "running"}) return [i.public_dns_name for i in instances] @staticmethod def get_public_ips_by_placement_group(gname): conn = Cluster._ec2() instances = conn.get_only_instances(filters={"placement-group-name": gname, "instance-state-name": "running"}) return [i.ip_address for i in instances] @staticmethod def get_private_hostnames_by_placement_group(gname): conn = Cluster._ec2() instances = conn.get_only_instances(filters={"placement-group-name": gname, "instance-state-name": "running"}) return [i.private_dns_name for i in instances] @staticmethod def get_private_ips_by_placement_group(gname): conn = Cluster._ec2() instances = conn.get_only_instances(filters={"placement-group-name": gname, "instance-state-name": "running"}) return [i.private_ip_address for i in instances] @staticmethod def _ec2(): return CompEC2._connect_ec2() @staticmethod def _autoscale(): return CompEC2._connect_autoscale() @staticmethod def get_autoscale_group(gname): return CompEC2._get_autoscale_group(gname) @staticmethod def get_autoscaled_instances(gname=None): return CompEC2.get_all_instances(gname)
mdpradeep/JuliaBox
engine/src/juliabox/plugins/compute_ec2/awscluster.py
Python
mit
10,721
# -*- coding: utf-8 -*- import six def isnum(data): return isinstance(data, six.integer_types + (float,)) def isucode(data): return isinstance(data, six.text_type) def ucode(data, *args, **kwargs): if isinstance(data, six.binary_type): return data.decode(*args, **kwargs) return data
atzm/amazonas
amazonas/util/compat.py
Python
bsd-2-clause
315
"""empty message Revision ID: 6d8e9e4138bf Revises: 445667ce6268 Create Date: 2016-03-03 10:36:03.205829 """ # revision identifiers, used by Alembic. revision = '6d8e9e4138bf' down_revision = '445667ce6268' from alembic import op import app import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('provas', sa.Column('data_inicio', sa.DateTime(), nullable=True)) op.add_column('provas', sa.Column('tempo_execucao', sa.Integer(), nullable=True)) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('provas', 'tempo_execucao') op.drop_column('provas', 'data_inicio') ### end Alembic commands ###
Maethorin/concept2
migrations/versions/6d8e9e4138bf_.py
Python
mit
760
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """An in-process, local XLA client in Python, supporting AOT compilation.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections import enum # pylint: disable=g-bad-import-order import inspect import itertools import os import numpy as np from tensorflow.compiler.xla import xla_data_pb2 from tensorflow.compiler.xla.python import pywrap_xla as c_api # Most functions are snake_case for consistency with other modules, whereas # method names of ComputationBuilder and LocalComputation are CamelCase for # consistency with XLA. # pylint: disable=invalid-name _OP_METADATA_FIELDS = [ 'op_type', 'op_name', 'source_file', 'source_line', ] OpMetadata = collections.namedtuple('OpMetadata', _OP_METADATA_FIELDS) def OpMetadataToProto(pyobj): proto = xla_data_pb2.OpMetadata() for field in _OP_METADATA_FIELDS: attr = getattr(pyobj, field) if attr is not None: setattr(proto, field, attr) return proto def CurrentSourceInfoMetadata(op_type=None, op_name=None, skip_frames=1): """Helper for use in source mapping that returns an OpMetadata object.""" full_filename, lineno = inspect.stack()[skip_frames][1:3] filename = os.path.basename(full_filename) return OpMetadata( op_type=op_type, op_name=op_name, source_file=filename, source_line=lineno) class PaddingType(enum.Enum): VALID = 1 SAME = 2 def _convert_padding_type_to_pad_values(padding_type, lhs_dims, rhs_dims, window_strides): """Maps PaddingType (VALID or SAME) to pad values (list of pairs of ints).""" if padding_type == PaddingType.VALID: return [(0, 0)] * len(window_strides) out_shape = np.ceil(np.true_divide(lhs_dims, window_strides)).astype(int) pad_sizes = [max((out_size - 1) * stride + filter_size - in_size, 0) for out_size, stride, filter_size, in_size in zip(out_shape, window_strides, rhs_dims, lhs_dims)] return [(pad_size // 2, pad_size - pad_size // 2) for pad_size in pad_sizes] _UNARY_OPS = [ 'Not', 'Abs', 'Exp', 'Floor', 'Round', 'Ceil', 'Log', 'Sign', 'Cos', 'Sin', 'Tanh', 'SqrtF32', 'SquareF32', 'IsFinite', 'ReciprocalF32', 'Neg', 'Sort', ] _BINARY_OPS = [ 'Eq', 'Ne', 'Ge', 'Gt', 'Lt', 'Le', 'Add', 'Sub', 'Mul', 'Div', 'Rem', 'Max', 'Min', 'And', 'Or', 'Pow', ] XLA_ELEMENT_TYPE_TO_DTYPE = { xla_data_pb2.PRED: np.dtype('bool'), xla_data_pb2.S8: np.dtype('int8'), xla_data_pb2.S16: np.dtype('int16'), xla_data_pb2.S32: np.dtype('int32'), xla_data_pb2.S64: np.dtype('int64'), xla_data_pb2.U8: np.dtype('uint8'), xla_data_pb2.U16: np.dtype('uint16'), xla_data_pb2.U32: np.dtype('uint32'), xla_data_pb2.U64: np.dtype('uint64'), xla_data_pb2.F16: np.dtype('float16'), xla_data_pb2.F32: np.dtype('float32'), xla_data_pb2.F64: np.dtype('float64'), xla_data_pb2.C64: np.dtype('complex64'), xla_data_pb2.TUPLE: np.dtype(np.object), } # Note the conversion on the key. Numpy has a known issue wherein dtype hashing # doesn't work as expected (https://github.com/numpy/numpy/issues/7242). Thus, # when keying by dtype in this dict, we use the string form of dtypes. DTYPE_TO_XLA_ELEMENT_TYPE = {str(dt): et for et, dt in XLA_ELEMENT_TYPE_TO_DTYPE.items()} def dtype_to_etype(dtype): """Convenience function for reading DTYPE_TO_XLA_ELEMENT_TYPE.""" return DTYPE_TO_XLA_ELEMENT_TYPE[str(np.dtype(dtype))] class LocalBuffer(object): """Represents a handle to data owned by XLA. The referent is ready for use in executing a local, compiled Computation. On XLA platforms involving a device (e.g. GPU), this means the referent is in device memory. """ def __init__(self, c_local_shaped_buffer): self.c_local_shaped_buffer = c_local_shaped_buffer self._delete = c_api.DeleteLocalShapedBuffer @staticmethod def from_py(npval, layout_fn=None): npval = require_numpy_array_layout(npval) if layout_fn: shape = Shape.from_numpy(npval) shape = shape.map_leaves(layout_fn) else: shape = None return LocalBuffer(c_api.LocalShapedBuffer.FromLiteral(npval, shape)) def to_py(self): return self.c_local_shaped_buffer.ToLiteral() def delete(self): if self.c_local_shaped_buffer is not None: self._delete(self.c_local_shaped_buffer) self.c_local_shaped_buffer = None def is_deleted(self): return self.c_local_shaped_buffer is None def __del__(self): self.delete() class Shape(object): """XLA shape. Represents an XLA shape by a corresponding Python/Numpy type and a list of dimensions, which are themselves Shapes in case this one represents an XLA tuple. """ def __init__(self, np_dtype, dimensions, minor_to_major=None): assert isinstance(dimensions, tuple) self.np_dtype = np_dtype self._dimensions = dimensions self._minor_to_major = minor_to_major self._check_minor_to_major() def __eq__(self, other): # pylint: disable=protected-access return (self.np_dtype == other.np_dtype and self._dimensions == other._dimensions and self._minor_to_major == other._minor_to_major) def __repr__(self): return ('xla_client.Shape(np_dtype={!r}, dimensions={!r}, ' 'minor_to_major={!r})').format(self.np_dtype, self._dimensions, self._minor_to_major) def element_type(self): return DTYPE_TO_XLA_ELEMENT_TYPE[str(self.np_dtype)] def is_tuple(self): return self.element_type() == xla_data_pb2.TUPLE def dimensions(self): if self.is_tuple(): raise ValueError('Tuple shape has no dimensions') return self._dimensions def minor_to_major(self): return self._minor_to_major def tuple_shapes(self): if not self.is_tuple(): raise ValueError('Shape is not a tuple shape') return self._dimensions def rank(self): return len(self.dimensions()) def map_leaves(self, f): """Map f over each leaf-level array subshape. Args: f: The function to apply. Whenever f returns None, the identity is applied instead. Returns: A new Shape with the mapped leaves. """ if self.is_tuple(): children = tuple(child.map_leaves(f) for child in self.tuple_shapes()) return Shape(np.dtype('O'), children) else: mapped = f(self) return self if mapped is None else mapped def _check_minor_to_major(self): mtm = self._minor_to_major if self.is_tuple(): assert mtm is None, self if mtm is not None: assert self.rank() == len(mtm), self assert sorted(mtm) == range(len(mtm)), self def update_minor_to_major(self, minor_to_major): if not isinstance(minor_to_major, tuple): raise TypeError('minor_to_major must be a tuple') updated = Shape(self.np_dtype, tuple(self.dimensions()), minor_to_major) updated._check_minor_to_major() # pylint: disable=protected-access return updated @staticmethod def from_numpy(npval): def convert(npval): if isinstance(npval, tuple): return Shape(np.dtype('O'), tuple(convert(elt) for elt in npval)) else: return Shape(npval.dtype, np.shape(npval)) return convert(require_numpy_array_layout(npval)) def _wrap_shape(shape_info): dtype, dims = shape_info element_type = DTYPE_TO_XLA_ELEMENT_TYPE[str(dtype)] if element_type == xla_data_pb2.TUPLE: dims = tuple(_wrap_shape(subshape_info) for subshape_info in dims) return Shape(dtype, dims) def _wrap_data_handle(handle): cdh = xla_data_pb2.ComputationDataHandle() cdh.handle = handle return cdh def _unwrap_data_handle(handle_proto): return handle_proto.handle def _unwrap_data_handles(handle_protos): return [_unwrap_data_handle(cdh) for cdh in handle_protos] def require_numpy_array_layout(value): if isinstance(value, tuple): return tuple(require_numpy_array_layout(x) for x in value) else: return np.require(value, requirements=['C', 'A']) class CompileOptions(object): """Python object for XLA compile options. These options can be passed to the 'compile' step when using a local XLA client. """ def __init__(self): self.generate_hlo_graph = None self.dump_optimized_hlo_proto_to = None self.dump_per_pass_hlo_proto_to = None self.hlo_profile = False def transfer_to_infeed(value, replica_number=None): """Transfers the given value into the XLA infeed queue. XLA's infeed queue is a single queue that feeds the "XLA virtual machine" with a totally ordered stream of values. This is dequeued from XLA computations via the Infeed() operation. Args: value: the value that the caller would like to enqueue into the XLA infeed queue replica_number: the replica number to infeed the value to -- if not provided, then the default replica (trivially replica 0) is used. """ if replica_number is None: c_api.TransferToInfeedLocal(require_numpy_array_layout(value)) else: c_api.TransferToInfeedLocalReplica( require_numpy_array_layout(value), replica_number) def transfer_from_outfeed(shape, replica_number=None): """Transfers a literal of the given shape from replica_number's outfeed. Args: shape: The shape of the value to transfer from outfeed. replica_number: The replica number ordinal to transfer the outfeed value from. (Each replica has a distinct outfeed queue.) Returns: The literal value that is produced from the outfeed queue. """ return c_api.TransferFromOutfeedLocalReplica(shape, replica_number or 0) class LocalComputation(object): """Python wrapper for a local XLA Computation. A LocalComputation can be executed if it is compiled. Otherwise, it can still be used as a Computation where required by the ComputationBuilder methods. """ def __init__(self, c_local_computation, is_compiled): self.c_local_computation = c_local_computation self.is_compiled = is_compiled # Ensure a reference to C-based destructor for use in __del__. if is_compiled: assert isinstance(c_local_computation, c_api.CompiledLocalComputation) self._delete = c_api.DeleteCompiledLocalComputation else: assert isinstance(c_local_computation, c_api.LocalComputation) self._delete = c_api.DeleteLocalComputation def Compile(self, argument_shapes=(), compile_options=None, layout_fn=None): """Compiles an un-compiled local computation. Local computations are the result of a "LocalComputationBuild'ing" process -- they start in uncompiled form, and via a call to Compile() turn into a compiled local computation. Raises: ValueError: if this is already a compiled local computation. Arguments: argument_shapes: parameter shapes -- they are first laid out by layout_fn if layout_fn is provided. Otherwise, the default layout for those shapes will be used. compile_options: options to use for compilation, includes an optional laid out result shape for the computation. layout_fn: lambda that is used to lay out the argument/result shapes. Returns: A newly *compiled* local computation instance. """ if self.is_compiled: raise ValueError('Attempt to compile a compiled local XLA computation.') if layout_fn: argument_shapes = [ shape.map_leaves(layout_fn) for shape in argument_shapes ] result_shape = _wrap_shape(self.c_local_computation.GetReturnValueShape()) result_shape = result_shape.map_leaves(layout_fn) compile_options = compile_options or CompileOptions() compile_options.result_shape = result_shape return LocalComputation( self.c_local_computation.Compile(argument_shapes, compile_options), is_compiled=True) def CompileWithExampleArguments(self, arguments=(), compile_options=None, layout_fn=None): return self.Compile( argument_shapes=[Shape.from_numpy(arg) for arg in arguments], compile_options=compile_options, layout_fn=layout_fn) def Execute(self, arguments=(), layout_fn=None): """Execute with Python values as arguments and return value.""" if not self.is_compiled: raise ValueError('Cannot execute an uncompiled local XLA computation.') argument_shapes = [Shape.from_numpy(arg) for arg in arguments] if layout_fn: argument_shapes = [ shape.map_leaves(layout_fn) for shape in argument_shapes ] else: argument_shapes = [None for shape in argument_shapes] arguments = tuple(map(require_numpy_array_layout, arguments)) return self.c_local_computation.Execute(arguments, argument_shapes) def ExecuteWithLocalBuffers(self, arguments=()): """Execute with LocalBuffer arguments and return value.""" if not self.is_compiled: raise ValueError('Cannot execute an uncompiled local XLA computation.') arguments = tuple(arguments) if any(arg.is_deleted() for arg in arguments): raise ValueError('Executing with deleted local buffer argument') return LocalBuffer( self.c_local_computation.ExecuteWithShapedBuffers( [arg.c_local_shaped_buffer for arg in arguments])) def __del__(self): self._delete(self.c_local_computation) class ComputationBuilder(object): """XLA computation builder. Enqueues XLA ops in sequence and in order to build a LocalComputation, which in turn can be compiled into a CompiledLocalComputation, which in turn can be locally executed. """ # The methods of this class map 1-to-1 onto the XLA C++ # computation builder API. Therefore, there's no need to laboriously list # arguments and return values for every method, especially where it's obvious. # # pylint: disable=g-doc-return-or-yield # pylint: disable=g-doc-args def __init__(self, name): self._client = c_api.LocalComputationBuilder(name.encode('utf8')) self._parameter_numbering = itertools.count() def Build(self): return LocalComputation(self._client.Build(), is_compiled=False) def SetOpMetadata(self, op_metadata): """Set metadata for operations that are about to be enqueued.""" self._client.SetOpMetadata(op_metadata) def ClearOpMetadata(self): """Clear metadata for operations that are about to be enqueued.""" self._client.ClearOpMetadata() def Infeed(self, shape): """Enqueues an infeed op onto the computation. Infeed operations dequeue data of the given shape from the device's infeed queue for subsequent use in the computation. Returns: A ComputationDataHandle message. """ return _wrap_data_handle(self._client.Infeed(shape)) def Outfeed(self, operand): """Enqueues an outfeed op onto the computation. Outfeed operations enqueue data, using the given operand, onto the XLA outfeed queue for subsequent dequeue via the client API. """ self._client.Outfeed( _unwrap_data_handle(operand), self.GetShape(operand), ''.encode('utf-8')) def Constant(self, value): """Enqueues a constant op onto the computation. Args: value: value for the constant, as a np.array with an explicit dtype set to one of the supported types. Returns: A ComputationDataHandle message. """ value = require_numpy_array_layout(value) return _wrap_data_handle(self._client.ConstantLiteral(value)) def ConstantF32Scalar(self, value): """Convenience method to enqueue a scalar F32 constant op. Args: value: a floating-point number. Returns: A ComputationDataHandle message. """ return self.Constant(np.array(value, dtype=np.float32)) def ConstantF64Scalar(self, value): """Convenience method to enqueue a scalar F32 constant op. Args: value: a floating-point number. Returns: A ComputationDataHandle message. """ return self.Constant(np.array(value, dtype=np.float64)) def ConstantS32Scalar(self, value): """Convenience method to enqueue a scalar S32 constant op. Args: value: a floating-point number. Returns: A ComputationDataHandle message. """ return self.Constant(np.array(value, dtype=np.int32)) def ConstantS64Scalar(self, value): """Convenience method to enqueue a scalar S64 constant op. Args: value: a floating-point number. Returns: A ComputationDataHandle message. """ return self.Constant(np.array(value, dtype=np.int64)) def ConstantPredScalar(self, value): """Convenience method to enqueue a scalar PRED constant op. Args: value: a boolean value. Returns: A ComputationDataHandle message. """ return self.Constant(np.array(value, dtype=np.bool)) def ParameterWithShape(self, shape, name=None, parameter_num=None): """Enqueues a Parameter op onto the computation, given a shape. Args: shape: the parameter's shape as a Shape object. name: optional string name for the parameter. parameter_num: parameter number in the computation function. If None, the next linear parameter number is used. The default value capability can be used for auto-numbering. If you're using auto-numbering for some parameters, use it for *all* parameters to avoid clashes. Returns: A ComputationDataHandle message. """ if name is None: name = '' if parameter_num is None: parameter_num = next(self._parameter_numbering) return _wrap_data_handle( self._client.Parameter(parameter_num, shape, name.encode('utf8'))) def ParameterFromNumpy(self, value, name=None, parameter_num=None): """Enqueues a Parameter op onto the computation. Args: value: a Numpy array, or a nested tuple thereof, from which the shape is inferred. name: as in ParameterWithShape. parameter_num: as in ParameterWithShape. Returns: A ComputationDataHandle message. """ return self.ParameterWithShape( Shape.from_numpy(value), name=name, parameter_num=parameter_num) def Broadcast(self, operand, sizes): """Enqueues a broadcast operation onto the computation. Args: operand: the operand ComputationDataHandle to broadcast. sizes: an iterable of broadcast sizes. Returns: A ComputationDataHandle representing the added broadcast op. """ return _wrap_data_handle( self._client.Broadcast(_unwrap_data_handle(operand), sizes)) def Concatenate(self, operands, dimension): """Enqueues a concatenate operation onto the computation. Args: operands: the operands to concatenate. dimension: the dimension in which to perform the concatenation. Returns: A ComputationDataHandle representing the added concatenate op. """ return _wrap_data_handle( self._client.ConcatInDim(_unwrap_data_handles(operands), dimension)) def ConvertElementType(self, operand, new_element_type): """Enqueues an element type conversion operation onto the computation. Args: operand: the operand to convert. new_element_type: the target primitive type. Returns: A ComputationDataHandle representing the added conversion op. """ return _wrap_data_handle( self._client.ConvertElementType( _unwrap_data_handle(operand), new_element_type)) def GetShape(self, operand): return _wrap_shape(self._client.GetShape(_unwrap_data_handle(operand))) def GetReturnValueShape(self): return _wrap_shape(self._client.GetReturnValueShape()) def GetComputationStats(self): raise NotImplementedError() def Pad(self, operand, padding_value, padding_config): """Enqueues a Pad operation onto the computation. Args: operand: ComputationDataHandle representing the array to pad. padding_value: ComputationDataHandle representing the scalar pad value. padding_config: either an xla_data_pb2.PaddingConfig or a list of integer triples (edge_padding_low, edge_padding_high, interior_padding) representing the configuration of the padding operation. Returns: A ComputationDataHandle representing the added Pad op. """ if not isinstance(padding_config, xla_data_pb2.PaddingConfig): padding_config = GetPaddingConfigFromTriples(padding_config) return _wrap_data_handle( self._client.Pad(_unwrap_data_handle(operand), _unwrap_data_handle(padding_value), padding_config)) def Reshape(self, operand, dimensions, new_sizes): """Enqueues a reshape op onto the computation. Args: operand: ComputationDataHandle representing the array to be reshaped. dimensions: sequence of integers encoding the order in which dimensions are collapsed or None, in which case dimensions are flattened in order. new_sizes: sequence of integers encoding the new dimension sizes (shape). Returns: A ComputationDataHandle representing the added Reshape op. """ if dimensions is None: ndim = len(self.GetShape(operand).dimensions()) dimensions = tuple(range(ndim)) return _wrap_data_handle( self._client.Reshape( _unwrap_data_handle(operand), dimensions, new_sizes)) def CrossReplicaSum(self, operand): """CrossReplicaSum op. Args: operand: the operand to sum across replica instances. Returns: A ComputationDataHandle that has the sum of the value among all replicas. """ return _wrap_data_handle( self._client.CrossReplicaSum(_unwrap_data_handle(operand))) def Collapse(self, operand, dimensions): """Collapse op.""" return _wrap_data_handle( self._client.Collapse(_unwrap_data_handle(operand), dimensions)) def Trans(self, operand): """Specialized matrix transpose op.""" return _wrap_data_handle( self._client.Transpose(_unwrap_data_handle(operand), [1, 0])) def Transpose(self, operand, permutation): """Transpose op.""" return _wrap_data_handle( self._client.Transpose(_unwrap_data_handle(operand), permutation)) def Rev(self, operand, dimensions): """Rev op.""" return _wrap_data_handle( self._client.Rev(_unwrap_data_handle(operand), dimensions)) def Clamp(self, min, operand, max): # pylint: disable=redefined-builtin """Clamp op.""" return _wrap_data_handle( self._client.Clamp(_unwrap_data_handle(min), _unwrap_data_handle(operand), _unwrap_data_handle(max))) def SelectAndScatter(self, operand, select, window_dimensions, window_strides, padding, source, init_value, scatter): """Select and scatter op, used by the gradient of ReduceWindow. Args: operand: ComputationDataHandle for array of dimension N and type T over which the windows slide. select: Computation of type (T, T) -> Pred to apply to the elements of each window to indicate which element is selected. window_dimensions: sequence of N integers for dimensions of the window. window_strides: sequence of N integers for the strides of the window. padding: PaddingType representing either 'SAME' or 'VALID ' padding. source: ComputationDataHandle for array of type T with values to scatter. init_value: ComputationDataHandle of scalar type T for initial out value. scatter: Computation of type (T, T) -> T to apply to each scatter source element with its destination element. Returns: A ComputationDataHandle representing the added SelectAndScatter op. """ pads = _convert_padding_type_to_pad_values( padding, self.GetShape(operand).dimensions(), window_dimensions, window_strides) return _wrap_data_handle( self._client.SelectAndScatterWithGeneralPadding( _unwrap_data_handle(operand), select.c_local_computation, window_dimensions, window_strides, pads, _unwrap_data_handle(source), _unwrap_data_handle(init_value), scatter.c_local_computation)) def Select(self, pred, on_true, on_false): """Element-wise selection op. Constructs an output array from elements of two input arrays, based on the values of a predicate array. """ return _wrap_data_handle( self._client.Select( _unwrap_data_handle(pred), _unwrap_data_handle(on_true), _unwrap_data_handle(on_false))) def Slice(self, operand, start_indices, limit_indices, strides=None): """Enqueues a slice operation onto the computation. Args: operand: ComputationDataHandle for the N dimensional array to be sliced. start_indices: iterable of N integers containing the starting indices of the slice for each dimension. limit_indices: iterable of N integers containing the ending indices (exclusive) of the slice for each dimension. strides: optional iterable of N integers containing the stride sizes for each dimension. Returns: A ComputationDataHandle representing the added Slice op. """ if strides is None: start_indices = list(start_indices) strides = [1] * len(start_indices) return _wrap_data_handle( self._client.Slice( _unwrap_data_handle(operand), start_indices, limit_indices, strides)) def SliceInDim(self, operand, start_index, limit_index, stride, dimno): """Enqueues a slice-in-dimension operation onto the computation. Args: operand: ComputationDataHandle for the N dimensional array to be sliced. start_index: an integer containing the start index of the slice. limit_index: an integer containing the end index of the slice. stride: an integer containing the stride size for the slice. dimno: an integer indicating the dimension along which to slice. Returns: A ComputationDataHandle representing the added Slice op. """ return _wrap_data_handle( self._client.SliceInDim( _unwrap_data_handle(operand), start_index, limit_index, stride, dimno)) def DynamicSlice(self, operand, start_indices, slice_sizes): """Enqueues a slice op with dynamic start indices onto the computation. Args: operand: ComputationDataHandle for the N dimensional array to be sliced. start_indices: ComputationDataHandle for the 1D array of N integers containing the starting indices of the slice. slice_sizes: iterable of N integers containing the slice sizes in each dimension. Returns: A ComputationDataHandle representing the added DynamicSlice op. """ return _wrap_data_handle( self._client.DynamicSlice( _unwrap_data_handle(operand), _unwrap_data_handle(start_indices), slice_sizes)) def DynamicUpdateSlice(self, operand, update, start_indices): """Enqueues a dynamic update slice operation onto the computation. Args: operand: ComputationDataHandle for the N dimensional array to be updated. update: N dimensional array comprising the slice update. start_indices: Rank-1 array of N integers comprising the starting indices of the slice along each dimension. Returns: A ComputationDataHandle representing the added DynamicUpdateSlice op. """ return _wrap_data_handle( self._client.DynamicUpdateSlice( _unwrap_data_handle(operand), _unwrap_data_handle(update), _unwrap_data_handle(start_indices))) def Tuple(self, *ops): """Enqueues a tuple operation onto the computation. Args: ops: a sequence of tuple operands (each a ComputationDataHandle). Returns: A ComputationDataHandle representing the added Tuple op. """ return _wrap_data_handle(self._client.Tuple(_unwrap_data_handles(ops))) def GetTupleElement(self, tup, index): """Enqueues a 'get tuple element' operation onto the computation. Args: tup: the tuple operand (a ComputationDataHandle). index: numeric index to select from the tuple. Returns: A ComputationDataHandle representing the added GetTupleElement op. """ return _wrap_data_handle( self._client.GetTupleElement(_unwrap_data_handle(tup), index)) def Call(self, computation_to_apply, operands): """Enqueues a call operation onto the computation. Args: computation_to_apply: a Computation object. operands: an iterable of ComputationDataHandle. The number and types of operands must match the arity of computation_to_apply. Returns: A ComputationDataHandle representing the added call op. """ return _wrap_data_handle( self._client.Call(computation_to_apply.c_local_computation, _unwrap_data_handles(operands))) def Map(self, operands, computation_to_apply, dimensions, static_operands=()): """Enqueues a map operation onto the computation. Args: operands: an iterable of ComputationDataHandle. computation_to_apply: a Computation object. dimensions: dimensions over which to apply map the function. static_operands: auxiliary arguments passed to the applied computation. Returns: A ComputationDataHandle representing the added Map op. """ return _wrap_data_handle( self._client.Map( _unwrap_data_handles(operands), computation_to_apply.c_local_computation, dimensions, _unwrap_data_handles(static_operands))) def Reduce(self, operand, init_value, computation_to_apply, dimensions): """Enqueues a reduction operation onto the computation. Args: operand: reduction operand (ComputationDataHandle). init_value: reduction initial value (ComputationDataHandle). computation_to_apply: a Computation object - binary reduction function. dimensions: sequence of dimensions (integers) to reduce on. Returns: A ComputationDataHandle representing the added Reduce op. """ return _wrap_data_handle( self._client.Reduce( _unwrap_data_handle(operand), _unwrap_data_handle(init_value), computation_to_apply.c_local_computation, dimensions)) def ReduceWindow(self, operand, init_value, computation_to_apply, window_dimensions, window_strides, padding): """Enqueues a windowed reduction operation onto the computation. Args: operand: reduction operand (ComputationDataHandle). init_value: reduction initial value (ComputationDataHandle). computation_to_apply: a binary reduction function (Computation). window_dimensions: dimensions of window (sequence of integers). window_strides: strides for window (sequence of integers). padding: PaddingType representing either 'SAME' or 'VALID' padding. Returns: A ComputationDataHandle representing the added ReduceWindow op. """ pads = _convert_padding_type_to_pad_values( padding, self.GetShape(operand).dimensions(), window_dimensions, window_strides) return _wrap_data_handle( self._client.ReduceWindowWithGeneralPadding( _unwrap_data_handle(operand), _unwrap_data_handle(init_value), computation_to_apply.c_local_computation, window_dimensions, window_strides, pads)) def RngNormal(self, mu, sigma, dims): """Enqueues an RngNormal operation onto the computation. Args: mu: A ComputationDataHandle to an F32 scalar specifying the mean. sigma: A ComputationDataHandle to an F32 scalar specifying the standard deviation. dims: A 1D array-like of nonnegative integers specifying the dimensions. Returns: a ComputationDataHandle to the generated array of F32 values. """ shape = Shape(self.GetShape(mu).np_dtype, dims) return _wrap_data_handle( self._client.RngNormal( _unwrap_data_handle(mu), _unwrap_data_handle(sigma), shape)) def RngUniform(self, a, b, dims): """Enqueues an RngUniform operation onto the computation. Args: a: a ComputationDataHandle to an F32, S32, or U32 scalar (consistent with the type of b) specifying the low end of the interval [a, b) over which values are generated. b: a ComputationDataHandle to an F32, S32, or U32 scalar (consistent with the type of a) specifying the high end of the interval [a, b) over which values are generated. dims: A 1D array-like of nonnegative integers specifying the dimensions. Returns: a ComputationDataHandle to the generated array of values with the same numeric type (F32, S32, or U32) as the arguments a and b. """ shape = Shape(self.GetShape(a).np_dtype, dims) return _wrap_data_handle( self._client.RngUniform( _unwrap_data_handle(a), _unwrap_data_handle(b), shape)) def While(self, cond, body, init): """Enqueues a While operation onto the computation. Args: cond: a Computation for the loop condition, which has type T -> PRED body: a Computation for the loop body, which has type T -> T init: a ComputationDataHandle for the initial parameter, which has type T Returns: a ComputationDataHandle representing the While operation. """ return _wrap_data_handle( self._client.While(cond.c_local_computation, body.c_local_computation, _unwrap_data_handle(init))) def Conditional(self, pred, true_operand, true_computation, false_operand, false_computation): """Enqueues a Conditional operation onto the computation. Args: predicate: a ComputationDataHandle to test, which has scalar type PRED true_operand: a ComputationDataHandle of type T_0 true_computation: a Computation to apply to true_operand, type T_0 -> S false_operand: a ComputationDatahandle of type T_1 false_computation: a Computation to apply to false_operand, type T_1 -> S Returns: a ComputationDataHandle representing the Conditional operation. """ return _wrap_data_handle( self._client.Conditional( _unwrap_data_handle(pred), _unwrap_data_handle(true_operand), true_computation.c_local_computation, _unwrap_data_handle(false_operand), false_computation.c_local_computation)) def IsConstant(self, operand, num_parameters=0): """Enqueues an IsConstant operation onto the computation. Args: operand: a ComputationDataHandle to test. num_parameters: optional int, number of computation parameters to treat as constant (default 0). Returns: bool indicating whether `operand` is a compile-time constant, meaning its value does not depend on parameters with index greater than or equal to `num_parameters`. """ return self._client.IsConstant(_unwrap_data_handle(operand), num_parameters) def Dot(self, lhs, rhs): """Enqueues a dot operation onto the computation. Args: lhs: ComputationDataHandle for the rank 1 or rank 2 left-hand-side array. rhs: ComputationDataHandle for the rank 1 or rank 2 right-hand-side array. Returns: a ComputationDataHandle representing the Dot operation. """ return _wrap_data_handle( self._client.Dot(_unwrap_data_handle(lhs), _unwrap_data_handle(rhs))) def DotGeneral(self, lhs, rhs, dimension_numbers): """Enqueues a general dot operation onto the computation. Args: lhs: ComputationDataHandle for the left-hand-side array. rhs: ComputationDataHandle for the right-hand-side array. dimension_numbers: either an xla_data_pb2.DotDimensionNumbers or a nested tuple ((lhs_contract, rhs_contract), (lhs_batch, rhs_batch)) of lists of integers representing the dimensions to treat as contracting dimensions and batch dimensions on each input operand. Returns: a ComputationDataHandle representing the DotGeneral operation. """ if not isinstance(dimension_numbers, xla_data_pb2.DotDimensionNumbers): dimension_numbers = GetDotDimensionsFromLists(dimension_numbers) return _wrap_data_handle( self._client.DotGeneral( _unwrap_data_handle(lhs), _unwrap_data_handle(rhs), dimension_numbers)) def Conv(self, lhs, rhs, window_strides, padding): """Enqueues a Conv operation onto the computation. Args: lhs: ComputationDataHandle for the rank N+2 array of inputs. rhs: ComputationDataHandle for the rank N+2 array of kernel weights. window_strides: length-N array-like of integer kernel strides. padding: PaddingType representing either 'SAME' or 'VALID' padding. Returns: a ComputationDataHandle representing the Conv operation. """ pads = _convert_padding_type_to_pad_values( padding, self.GetShape(lhs).dimensions()[2:], self.GetShape(rhs).dimensions()[2:], window_strides) dimension_numbers = self._GetConvDimensionNumbers(len(window_strides)) return _wrap_data_handle( self._client.ConvGeneralDilated(_unwrap_data_handle(lhs), _unwrap_data_handle(rhs), window_strides, pads, (), (), dimension_numbers)) def ConvWithGeneralPadding(self, lhs, rhs, window_strides, padding, lhs_dilation, rhs_dilation): """Enqueues a ConvWithGeneralPadding operation onto the computation. Args: lhs: ComputationDataHandle for the rank N+2 array of inputs. rhs: ComputationDataHandle for the rank N+2 array of kernel weights. window_strides: length-N array-like of kernel strides. padding: length-N array-like of pairs of integers of (low, high) padding. lhs_dilation: length-N array-like of dilation factors. rhs_dilation: length-N array-like of dilation factors. Returns: A ComputationdataHandle representing the added ConvWithGeneralPadding op. """ dimension_numbers = self._GetConvDimensionNumbers(len(window_strides)) return _wrap_data_handle( self._client.ConvGeneralDilated(_unwrap_data_handle(lhs), _unwrap_data_handle(rhs), window_strides, padding, lhs_dilation, rhs_dilation, dimension_numbers)) def _GetConvDimensionNumbers(self, num_spatial_dims): """Create ConvolutionDimensionNumbers proto for convolutions.""" nd = num_spatial_dims dimension_numbers = xla_data_pb2.ConvolutionDimensionNumbers() dimension_numbers.input_batch_dimension = 0 dimension_numbers.input_feature_dimension = 1 dimension_numbers.output_batch_dimension = 0 dimension_numbers.output_feature_dimension = 1 dimension_numbers.kernel_output_feature_dimension = 0 dimension_numbers.kernel_input_feature_dimension = 1 dimension_numbers.input_spatial_dimensions.extend(range(2, 2 + nd)) dimension_numbers.kernel_spatial_dimensions.extend(range(2, 2 + nd)) dimension_numbers.output_spatial_dimensions.extend(range(2, 2 + nd)) return dimension_numbers def _forward_methods_to_local_builder(): """Forward remaining ComputationBuilder methods to the C API. Set up methods, corresponding to unary and binary XLA operations, whose calls are forwarded in a boilerplate manner to the underlying LocalComputationBuilder C-extension API. """ def forward_to_local_builder_with_handles(target_method, is_binop=False): """Generate a forwarding method that wraps/unwraps data handles.""" def forward(self, *args, **kwargs): unwrapped_args = [_unwrap_data_handle(arg) for arg in args] if is_binop and len(unwrapped_args) < 3: unwrapped_args.append(kwargs.get('broadcast_dimensions', ())) return _wrap_data_handle( target_method( self._client, # pylint: disable=protected-access *unwrapped_args)) return forward for method_name in _UNARY_OPS: forward = forward_to_local_builder_with_handles( getattr(c_api.LocalComputationBuilder, method_name)) forward.__name__ = method_name setattr(ComputationBuilder, method_name, forward) for method_name in _BINARY_OPS: forward = forward_to_local_builder_with_handles( getattr(c_api.LocalComputationBuilder, method_name), is_binop=True) forward.__name__ = method_name setattr(ComputationBuilder, method_name, forward) _forward_methods_to_local_builder() def initialize_replica_count(replica_count): """Initializes the desired replica count to use on XLA service init. Args: replica_count: number of replicas that are desired for set up during XLA initialization. Raises: A runtime exception if the XLA service has already been initialized. """ c_api.InitializeReplicaCount(replica_count) def get_replica_count(): """Returns the current replica count used for the XLA service. Note: this will return a value whether the XLA service has been initialized yet or not. """ return c_api.GetReplicaCount() def GetPaddingConfigFromTriples(triples): """Create PaddingConfig proto from list of triples of integers.""" padding_config = xla_data_pb2.PaddingConfig() for lo, hi, interior in triples: dimension = padding_config.dimensions.add() dimension.edge_padding_low = lo dimension.edge_padding_high = hi dimension.interior_padding = interior return padding_config def GetDotDimensionsFromLists(dimension_numbers): (lhs_contract, rhs_contract), (lhs_batch, rhs_batch) = dimension_numbers dot_dims_proto = xla_data_pb2.DotDimensionNumbers() dot_dims_proto.lhs_contracting_dimensions.extend(lhs_contract) dot_dims_proto.rhs_contracting_dimensions.extend(rhs_contract) dot_dims_proto.lhs_batch_dimensions.extend(lhs_batch) dot_dims_proto.rhs_batch_dimensions.extend(rhs_batch) return dot_dims_proto
eaplatanios/tensorflow
tensorflow/compiler/xla/python/xla_client.py
Python
apache-2.0
43,463
# -*- coding: utf-8 -*- # Generated by Django 1.11.6 on 2017-11-13 09:33 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('questionnaires', '0001_initial'), ] operations = [ migrations.RenameModel( old_name='AttentionRelatedCognitiveErrors', new_name='AttentionRelatedCognitiveError', ), ]
warrenatmindset/DjangoFlowApp
questionnaires/migrations/0002_auto_20171113_0933.py
Python
mit
440
from django.contrib.staticfiles.storage import StaticFilesStorage from pipeline.storage import PipelineMixin from storages.backends.s3boto import S3BotoStorage class S3PipelineStorage(PipelineMixin, S3BotoStorage): pass class PipelineStorage(PipelineMixin, StaticFilesStorage): pass
smallmultiples/smu-storage
__init__.py
Python
mit
292
from typing import Optional, Callable from slack_sdk.socket_mode.request import SocketModeRequest class AsyncWebSocketMessageListener(Callable): async def __call__( client: "AsyncBaseSocketModeClient", # noqa: F821 message: dict, raw_message: Optional[str] = None, ): # noqa: F821 raise NotImplementedError() class AsyncSocketModeRequestListener(Callable): async def __call__( client: "AsyncBaseSocketModeClient", # noqa: F821 request: SocketModeRequest, ): # noqa: F821 raise NotImplementedError()
slackhq/python-slackclient
slack_sdk/socket_mode/async_listeners.py
Python
mit
580
import os import subprocess from collections import defaultdict from datetime import datetime import csv from csv import DictReader import math from glob import glob # Data locations loc_train = "../data/train.csv" loc_test = "../data/test.csv" loc_labels = "../data/trainLabels.csv" loc_best = "test.pred2.csv" # best submission loc_model_prefix = "../data/" loc_preds_prefix = "../data/" # Will be created loc_test_vw = "../data/test.vw" loc_train_vw = "../data/train_y33.vw" loc_train_vw_temp = "../data/train_yn_temp.vw" # used for relabelling loc_kaggle_submission = "test.pred.vw.csv" def load_data(loc_csv, nrows=0): with open(loc_csv) as fd: reader = csv.reader(fd) header = reader.next()[1:] labels = [] for row in reader: row = map(int, row) labels.append(row[1:]) return labels def to_vw(loc_csv, loc_out, y, y_nr=33, train=True): print("\nConverting %s"%loc_csv) with open(loc_out,"wb") as outfile: distribution = 0 for linenr, row in enumerate( DictReader(open(loc_csv,"rb")) ): hash_features = ["x35","x91","x94","x95","x34","x4","x65","x64","x61","x3"] yes_no_features = ["x92","x93","x101","x103","x130","x102","x10","x11","x12","x13","x14","x25","x24","x26","x32","x33","x30","x31","x141","x140","x142","x45","x44","x43","x42","x41","x2","x1","x55","x56","x57","x129","x128","x127","x126","x105","x63","x62","x87","x86","x85","x116","x117","x115","x104","x74","x75","x72","x73","x71"] pos_features = ["x23","x22","x113","x114","x53","x54","x138","x139"] float_features = ["x70","x77","x96","x97","x98","x99","x107","x135","x100","x137","x132","x19","x16","x29","x28","x36","x37","x38","x39","x122","x144","x145","x47","x40","x110","x119","x60","x120","x121","x123","x124","x125","x59","x52","x50","x7","x6","x8","x9","x40","x144","x145","x122","x39","x38","x37","x36"] n_h = "" n_b = "" n_p = "" n_f = "" n_r = "" for k in row: if k is not "id": if k in hash_features: n_h += " %s_%s"%(k,row[k]) elif k in yes_no_features: n_b += " %s_%s"%(k,row[k]) elif k in pos_features: n_p += " %s_%s"%(k,row[k]) elif k in float_features and row[k] is not "": n_f += " %s_%s"%(k,row[k]) elif k in float_features and row[k] is "": n_f += " %s_%s"%(k,row[k]) else: n_r += " %s_%s"%(k,row[k]) if train: label = y[linenr][y_nr-1] if label == 1: distribution += 1 else: label = -1 else: label = 1 id = row["id"] outfile.write("%s '%s |h%s |b%s |p%s |f%s |r%s\n"%(label,id,n_h,n_b,n_p,n_f,n_r) ) if linenr % 100000 == 0: print("%s\t%s"%(linenr,distribution)) print(distribution) def relabel_vw(loc_vw, loc_out, loc_labels, y, y_i = 0): print("Relabelling to dataset %s..."%loc_out) start = datetime.now() with open(loc_out,"wb") as outfile: for e, line in enumerate( open( loc_vw, "rb") ): if y[e][y_i-1] == 0: new_id = -1 else: new_id = 1 outfile.write( "%s %s\n"%(new_id," ".join(line.strip().split()[1:])) ) print("\ncompleted in :( %s\n"%(str(datetime.now()-start))) def sigmoid(x): return 1 / (1 + math.exp(-x)) def to_kaggle(loc_preds, loc_best_sub, loc_out_sub, y_nr): preds = {} for e, line in enumerate( open(loc_preds,"rb") ): preds[line.strip().split()[1]] = sigmoid(float(line.strip().split()[0])) with open(loc_out_sub,"wb") as outfile: for e, line in enumerate( open(loc_best_sub,"rb") ): row = line.strip().split(",") if e == 0: outfile.write(line) elif "y"+str(y_nr)+"," not in line: outfile.write(line) else: outfile.write("%s,%s\n"%(row[0],preds[row[0].replace("_y"+str(y_nr),"")])) print("Finished writing Kaggle submission: %s"%loc_out_sub) if __name__ == "__main__": #Load labels, remove the id y = load_data(loc_labels) #Create train set for label y33, and a test set with dummy labels to_vw(loc_train, loc_train_vw, y, y_nr=33, train=True) to_vw(loc_test, loc_test_vw, y, train=False) #Train and test VW now #Add the VW predictions to our best submission file #to_kaggle("preds_y33.p.txt", loc_best, loc_kaggle_submission, y_nr=33)
timpalpant/KaggleTSTextClassification
others/tradeshift.py
Python
gpl-3.0
4,544
#!/usr/bin/env python """ Manipulates MacOS alias records. """ from classicbox.alias.record import Extra from classicbox.alias.record import print_alias_record from classicbox.alias.record import read_alias_record from classicbox.alias.record import write_alias_record from classicbox.io import BytesIO import sys def main(args): # Path to a file that contains an alias record. # # This is equivalent to the contents of an 'alis' resource, # which is the primary resource contained in an alias file (command, alias_record_file_filepath) = args if alias_record_file_filepath == '-': alias_record = None else: with open(alias_record_file_filepath, 'rb') as input: alias_record = read_alias_record(input) if command == 'info': print_alias_record(alias_record) elif command == 'test_read_write': output = BytesIO() write_alias_record(output, alias_record) verify_matches(output, alias_record_file_filepath, alias_record) elif command == 'test_read_write_no_extras': alias_record['extras'] = [] output = BytesIO() write_alias_record(output, alias_record) output.seek(0) alias_record_no_extras = read_alias_record(output) if alias_record_no_extras['extras'] == []: print 'OK' else: print 'Expected empty extras.' elif command == 'test_write_custom_matching': test_write_custom_matching(alias_record_file_filepath, alias_record) else: sys.exit('Unrecognized command: %s' % command) return def test_write_custom_matching(alias_record_file_filepath, alias_record): # "AppAlias.rsrc.dat" output = BytesIO() write_alias_record(output, { 'alias_kind': 0, 'volume_name': 'Boot', 'volume_created': 3431272487, 'parent_directory_id': 542, 'file_name': 'app', 'file_number': 543, # NOTE: Can't get file_created reliably from hfsutil CLI 'file_created': 3265652246, 'file_type': 'APPL', 'file_creator': 'AQt7', 'nlvl_from': 1, 'nlvl_to': 1, 'extras': [ Extra(0, 'parent_directory_name', 'B'), Extra(1, 'directory_ids', [542, 541, 484]), Extra(2, 'absolute_path', 'Boot:AutQuit7:A:B:app'), Extra(0xFFFF, 'end', None) ] }) if alias_record_file_filepath != '-': verify_matches(output, alias_record_file_filepath, alias_record) def verify_matches(output, alias_record_file_filepath, alias_record): actual_output = output.getvalue() with open(alias_record_file_filepath, 'rb') as file: expected_output = file.read() matches = (actual_output == expected_output) if matches: print 'OK' else: print ' Expected: ' + repr(expected_output) print ' Actual: ' + repr(actual_output) print print_alias_record(alias_record) # ------------------------------------------------------------------------------ if __name__ == '__main__': main(sys.argv[1:])
davidfstr/ClassicBox
alias_record.py
Python
gpl-2.0
3,197
from django.test import TestCase from django.utils import timezone from rest_framework_json_api.utils import format_relation_name from rest_framework_json_api.serializers import ResourceIdentifierObjectSerializer from example.models import Blog, Entry, Author class TestResourceIdentifierObjectSerializer(TestCase): def setUp(self): self.blog = Blog.objects.create(name='Some Blog', tagline="It's a blog") self.entry = Entry.objects.create( blog=self.blog, headline='headline', body_text='body_text', pub_date=timezone.now(), mod_date=timezone.now(), n_comments=0, n_pingbacks=0, rating=3 ) for i in range(1,6): name = 'some_author{}'.format(i) self.entry.authors.add( Author.objects.create(name=name, email='{}@example.org'.format(name)) ) def test_data_in_correct_format_when_instantiated_with_blog_object(self): serializer = ResourceIdentifierObjectSerializer(instance=self.blog) expected_data = {'type': format_relation_name('Blog'), 'id': str(self.blog.id)} assert serializer.data == expected_data def test_data_in_correct_format_when_instantiated_with_entry_object(self): serializer = ResourceIdentifierObjectSerializer(instance=self.entry) expected_data = {'type': format_relation_name('Entry'), 'id': str(self.entry.id)} assert serializer.data == expected_data def test_deserialize_primitive_data_blog(self): initial_data = { 'type': format_relation_name('Blog'), 'id': str(self.blog.id) } serializer = ResourceIdentifierObjectSerializer(data=initial_data, model_class=Blog) self.assertTrue(serializer.is_valid(), msg=serializer.errors) assert serializer.validated_data == self.blog def test_data_in_correct_format_when_instantiated_with_queryset(self): qs = Author.objects.all() serializer = ResourceIdentifierObjectSerializer(instance=qs, many=True) type_string = format_relation_name('Author') author_pks = Author.objects.values_list('pk', flat=True) expected_data = [{'type': type_string, 'id': str(pk)} for pk in author_pks] assert serializer.data == expected_data def test_deserialize_many(self): type_string = format_relation_name('Author') author_pks = Author.objects.values_list('pk', flat=True) initial_data = [{'type': type_string, 'id': str(pk)} for pk in author_pks] serializer = ResourceIdentifierObjectSerializer(data=initial_data, model_class=Author, many=True) self.assertTrue(serializer.is_valid(), msg=serializer.errors) print(serializer.data)
hnakamur/django-rest-framework-json-api
example/tests/test_serializers.py
Python
bsd-2-clause
2,803
#-*- coding:utf-8 -*- ''' Linear Aggregation NOTE: x_label should be `m * n` dataset. ''' import numpy as np import matplotlib.pyplot as plt from sklearn.linear_model import LinearRegression def linear_aggregation(dataset, x_test): y_label = np.array([float(i) for i in dataset]) x_label = np.linspace(1, y_label.size, y_label.size) model = LinearRegression() model.fit(x_label.reshape(-1, 1), y_label) y_test = model.predict(x_test) y_test = model.predict(x_label.reshape(-1, 1)) plt.figure(figsize = (8, 4)) plt.plot(x_label, y_label, label = "origin", color = "red", linewidth = 2) plt.plot(x_label, y_test, label = "predict", color = "blue", linewidth = 2) min_y = min(y_test.min(), y_label.min()) max_y = max(y_test.max(), y_label.max()) plt.ylim(min_y - 0.5 * abs(min_y), max_y + 0.5 * abs(min_y)) plt.xlabel("X") plt.ylabel("Y") plt.legend() plt.title("Linear-Aggregation") plt.show() dataset = [u'0.7965', u'0.8072', u'0.8342', u'0.8244', u'0.8000', u'0.8025', u'0.8119', u'0.8172', u'0.8109', u'0.8095', u'0.8161', u'0.8009', u'0.7911', u'0.7780', u'0.8197', u'0.8101', u'0.8280', u'0.8408', u'0.8390'] linear_aggregation(dataset, len(dataset) + 1)
Justontheway/data-science
python/regression/LinearAggregation.py
Python
apache-2.0
1,234
# Copyright 2021 UW-IT, University of Washington # SPDX-License-Identifier: Apache-2.0 class TermNotStarted(Exception): pass
uw-it-aca/canvas-analytics
data_aggregator/exceptions.py
Python
apache-2.0
131
# -*- coding: utf-8 -*- ############################################################################## # # ______ Releasing children from poverty _ # / ____/___ ____ ___ ____ ____ ___________(_)___ ____ # / / / __ \/ __ `__ \/ __ \/ __ `/ ___/ ___/ / __ \/ __ \ # / /___/ /_/ / / / / / / /_/ / /_/ (__ |__ ) / /_/ / / / / # \____/\____/_/ /_/ /_/ .___/\__,_/____/____/_/\____/_/ /_/ # /_/ # in Jesus' name # # Copyright (C) 2018 Compassion CH (http://www.compassion.ch) # @author: Emanuel Cino <ecino@compassion.ch> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## # pylint: disable=C8101 { 'name': 'Compassion Events Website', 'version': '10.0.2.0.0', 'category': 'Other', 'author': 'Compassion CH', 'license': 'AGPL-3', 'website': 'https://github.com/CompassionCH/compassion-modules/tree/10.0', 'depends': [ 'website_compassion', 'crm_compassion', 'event', 'partner_communication_switzerland', 'payment_ogone_compassion', 'survey_phone' ], 'external_dependencies': { 'python': ['magic'], }, 'data': [ 'security/ir.model.access.csv', 'security/access_rules.xml', 'data/event_type.xml', 'data/event_registration_stage.xml', 'data/event_registration_task.xml', 'data/product.xml', 'data/group_visit_emails.xml', 'data/communication_config.xml', 'data/survey.xml', 'data/event_message_subtype.xml', 'views/event_compassion_open_wizard.xml', 'views/event_compassion_view.xml', 'views/event_event_view.xml', 'views/event_registration_view.xml', 'views/registration_stage_view.xml', 'views/registration_task_view.xml', 'views/event_faq_view.xml', 'views/res_vaccine_view.xml', 'views/advocate_details.xml', 'views/event_info_party_wizard.xml', 'views/event_flight_view.xml', 'views/event_type_view.xml', 'templates/assets.xml', 'templates/event_page.xml', 'templates/events_list.xml', 'templates/event_registration.xml', 'templates/participants_list.xml', 'templates/participant_page.xml', 'templates/donation_result.xml', 'templates/event_faq.xml', 'templates/group_visit_step2.xml', 'templates/group_visit_medical_info.xml', 'templates/group_visit_practical_information.xml', 'templates/group_visit_party_invitation.xml', 'templates/robots.xml', 'wizards/event_registration_communication_wizard.xml', ], 'demo': [ 'demo/crm_event_demo.xml' ], 'development_status': 'Beta', 'installable': True, 'auto_install': False, }
ecino/compassion-switzerland
website_event_compassion/__manifest__.py
Python
agpl-3.0
3,522
#!/usr/bin/python import picamera import RPi.GPIO as GPIO from LocalVariables import takepicture camera = picamera.PiCamera() def TakePicture(): camera.capture('image.jpg') print('Picture Taken')
Multipixelone/BlindRemote
TakePicture.py
Python
gpl-3.0
207
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('app', '0007_remove_post_post_num'), ] operations = [ migrations.RemoveField( model_name='post', name='submitter', ), migrations.RemoveField( model_name='thread', name='submitter', ), ]
paramsingh/backpage
app/migrations/0008_auto_20150117_2249.py
Python
mit
457
#-*- encoding:utf-8 -*- ''' Created on Dec 1, 2014 @author: letian ''' import networkx as nx from Segmentation import Segmentation import numpy as np import math class TextRank4Sentence(object): def __init__(self, stop_words_file = None, delimiters='?!;?!。;…\n'): ''' `stop_words_file`:默认值为None,此时内部停止词表为空;可以设置为文件路径(字符串),将从停止词文件中提取停止词。 `delimiters`:默认值是`'?!;?!。;…\n'`,用来将文本拆分为句子。 self.sentences:由句子组成的列表。 self.words_no_filter:对sentences中每个句子分词而得到的两级列表。 self.words_no_stop_words:去掉words_no_filter中的停止词而得到的两级列表。 self.words_all_filters:保留words_no_stop_words中指定词性的单词而得到的两级列表。 ''' self.seg = Segmentation(stop_words_file=stop_words_file, delimiters=delimiters) self.sentences = None self.words_no_filter = None # 2维列表 self.words_no_stop_words = None self.words_all_filters = None self.graph = None self.key_sentences = None def train(self, text, lower = False, speech_tag_filter=True, source = 'no_stop_words', sim_func = 'standard'): ''' `text`:文本内容,字符串。 `lower`:是否将文本转换为小写。默认为False。 `speech_tag_filter`:若值为True,将调用内部的词性列表来过滤生成words_all_filters。 若值为False,words_all_filters与words_no_stop_words相同。 `source`:选择使用words_no_filter, words_no_stop_words, words_all_filters中的哪一个来生成句子之间的相似度。 默认值为`'all_filters'`,可选值为`'no_filter', 'no_stop_words', 'all_filters'`。 `sim_func`: 指定计算句子相似度的函数。当前只有一个函数,对应默认值`standard`。 ''' self.key_sentences = [] (self.sentences, self.words_no_filter, self.words_no_stop_words, self.words_all_filters) = self.seg.segment(text=text, lower=lower, speech_tag_filter=speech_tag_filter); # - # print self.sentences if source == 'no_filter': source = self.words_no_filter elif source == 'all_filters': source = self.words_all_filters else: source = self.words_no_stop_words sim_func = self._get_similarity_standard sentences_num = len(source) self.graph = np.zeros((sentences_num, sentences_num)) for x in xrange(sentences_num): for y in xrange(x, sentences_num): similarity = sim_func(source[x], source[y]) self.graph[x, y] = similarity self.graph[y, x] = similarity # for x in xrange(sentences_num): # row_sum = np.sum(self.graph[x, :]) # if row_sum > 0: # self.graph[x, :] = self.graph[x, :] / row_sum # print self.graph nx_graph = nx.from_numpy_matrix(self.graph) scores = nx.pagerank(nx_graph) # this is a dict sorted_scores = sorted(scores.items(), key = lambda item: item[1], reverse=True) # print sorted_scores for index, _ in sorted_scores: self.key_sentences.append(self.sentences[index]) # print '\n'.join(self.key_sentences) def _get_similarity_standard(self, word_list1, word_list2): ''' 默认的用于计算两个句子相似度的函数。 word_list1, word_list2: 分别代表两个句子,都是由单词组成的列表 ''' vector1, vector2 =self._gen_vectors(word_list1, word_list2) # print vector1, vector2 vector3 = [vector1[x]*vector2[x] for x in xrange(len(vector1))] vector4 = [1 for num in vector3 if num > 0.] co_occur_num = sum(vector4) # print co_occur_num if co_occur_num == 0.: return 0. denominator = math.log(float(len(word_list1))) + math.log(float(len(word_list2))) # 分母 if denominator == 0.: return 0. return co_occur_num / denominator def _gen_vectors(self, word_list1, word_list2): ''' 两个句子转换成两个同样大小向量。可以通过这两个向量来计算两个句子的相似度。 word_list1, word_list2: 分别代表两个句子,都是由单词组成的列表 ''' words = list(set(word_list1 + word_list2)) vector1 = [float(word_list1.count(word)) for word in words] vector2 = [float(word_list2.count(word)) for word in words] return vector1, vector2 def get_key_sentences(self, num = 6, sentence_min_len = 6): ''' 获取最重要的num个长度大于等于sentence_min_len的句子用来生成摘要。 返回列表。 ''' result = [] count = 0 for sentence in self.key_sentences: if count >= num: break if len(sentence) >= sentence_min_len: result.append(sentence) count += 1 return result if __name__ == '__main__': import codecs # text = codecs.open('../text/03.txt', 'r', 'utf-8').read() text = "这间酒店位于北京东三环,里面摆放很多雕塑,文艺气息十足。答谢宴于晚上8点开始。" tr4s = TextRank4Sentence(stop_words_file='../stopword.data') tr4s.train(text=text, speech_tag_filter=True, lower=True, source = 'all_filters') print '\n'.join(tr4s.get_key_sentences(num=1)) print '\n'.join(tr4s.sentences) for wl in tr4s.words_no_filter: print '[', ', \''.join(wl), ']' print for wl in tr4s.words_no_stop_words: print '[', ', \''.join(wl), ']' print for wl in tr4s.words_all_filters: print '[', ', \''.join(wl), ']'
MSC19950601/TextRank4ZH
textrank4zh/TextRank4Sentence.py
Python
mit
6,656
# mysql/reflection.py # Copyright (C) 2005-2017 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php import re from ... import log, util from ... import types as sqltypes from .enumerated import _EnumeratedValues, SET from .types import DATETIME, TIME, TIMESTAMP class ReflectedState(object): """Stores raw information about a SHOW CREATE TABLE statement.""" def __init__(self): self.columns = [] self.table_options = {} self.table_name = None self.keys = [] self.fk_constraints = [] self.ck_constraints = [] @log.class_logger class MySQLTableDefinitionParser(object): """Parses the results of a SHOW CREATE TABLE statement.""" def __init__(self, dialect, preparer): self.dialect = dialect self.preparer = preparer self._prep_regexes() def parse(self, show_create, charset): state = ReflectedState() state.charset = charset for line in re.split(r'\r?\n', show_create): if line.startswith(' ' + self.preparer.initial_quote): self._parse_column(line, state) # a regular table options line elif line.startswith(') '): self._parse_table_options(line, state) # an ANSI-mode table options line elif line == ')': pass elif line.startswith('CREATE '): self._parse_table_name(line, state) # Not present in real reflection, but may be if # loading from a file. elif not line: pass else: type_, spec = self._parse_constraints(line) if type_ is None: util.warn("Unknown schema content: %r" % line) elif type_ == 'key': state.keys.append(spec) elif type_ == 'fk_constraint': state.fk_constraints.append(spec) elif type_ == 'ck_constraint': state.ck_constraints.append(spec) else: pass return state def _parse_constraints(self, line): """Parse a KEY or CONSTRAINT line. :param line: A line of SHOW CREATE TABLE output """ # KEY m = self._re_key.match(line) if m: spec = m.groupdict() # convert columns into name, length pairs spec['columns'] = self._parse_keyexprs(spec['columns']) return 'key', spec # FOREIGN KEY CONSTRAINT m = self._re_fk_constraint.match(line) if m: spec = m.groupdict() spec['table'] = \ self.preparer.unformat_identifiers(spec['table']) spec['local'] = [c[0] for c in self._parse_keyexprs(spec['local'])] spec['foreign'] = [c[0] for c in self._parse_keyexprs(spec['foreign'])] return 'fk_constraint', spec # CHECK constraint m = self._re_ck_constraint.match(line) if m: spec = m.groupdict() return 'ck_constraint', spec # PARTITION and SUBPARTITION m = self._re_partition.match(line) if m: # Punt! return 'partition', line # No match. return (None, line) def _parse_table_name(self, line, state): """Extract the table name. :param line: The first line of SHOW CREATE TABLE """ regex, cleanup = self._pr_name m = regex.match(line) if m: state.table_name = cleanup(m.group('name')) def _parse_table_options(self, line, state): """Build a dictionary of all reflected table-level options. :param line: The final line of SHOW CREATE TABLE output. """ options = {} if not line or line == ')': pass else: rest_of_line = line[:] for regex, cleanup in self._pr_options: m = regex.search(rest_of_line) if not m: continue directive, value = m.group('directive'), m.group('val') if cleanup: value = cleanup(value) options[directive.lower()] = value rest_of_line = regex.sub('', rest_of_line) for nope in ('auto_increment', 'data directory', 'index directory'): options.pop(nope, None) for opt, val in options.items(): state.table_options['%s_%s' % (self.dialect.name, opt)] = val def _parse_column(self, line, state): """Extract column details. Falls back to a 'minimal support' variant if full parse fails. :param line: Any column-bearing line from SHOW CREATE TABLE """ spec = None m = self._re_column.match(line) if m: spec = m.groupdict() spec['full'] = True else: m = self._re_column_loose.match(line) if m: spec = m.groupdict() spec['full'] = False if not spec: util.warn("Unknown column definition %r" % line) return if not spec['full']: util.warn("Incomplete reflection of column definition %r" % line) name, type_, args = spec['name'], spec['coltype'], spec['arg'] try: col_type = self.dialect.ischema_names[type_] except KeyError: util.warn("Did not recognize type '%s' of column '%s'" % (type_, name)) col_type = sqltypes.NullType # Column type positional arguments eg. varchar(32) if args is None or args == '': type_args = [] elif args[0] == "'" and args[-1] == "'": type_args = self._re_csv_str.findall(args) else: type_args = [int(v) for v in self._re_csv_int.findall(args)] # Column type keyword options type_kw = {} if issubclass(col_type, (DATETIME, TIME, TIMESTAMP)): if type_args: type_kw['fsp'] = type_args.pop(0) for kw in ('unsigned', 'zerofill'): if spec.get(kw, False): type_kw[kw] = True for kw in ('charset', 'collate'): if spec.get(kw, False): type_kw[kw] = spec[kw] if issubclass(col_type, _EnumeratedValues): type_args = _EnumeratedValues._strip_values(type_args) if issubclass(col_type, SET) and '' in type_args: type_kw['retrieve_as_bitwise'] = True type_instance = col_type(*type_args, **type_kw) col_kw = {} # NOT NULL col_kw['nullable'] = True # this can be "NULL" in the case of TIMESTAMP if spec.get('notnull', False) == 'NOT NULL': col_kw['nullable'] = False # AUTO_INCREMENT if spec.get('autoincr', False): col_kw['autoincrement'] = True elif issubclass(col_type, sqltypes.Integer): col_kw['autoincrement'] = False # DEFAULT default = spec.get('default', None) if default == 'NULL': # eliminates the need to deal with this later. default = None comment = spec.get('comment', None) if comment is not None: comment = comment.replace("\\\\", "\\").replace("''", "'") col_d = dict(name=name, type=type_instance, default=default, comment=comment) col_d.update(col_kw) state.columns.append(col_d) def _describe_to_create(self, table_name, columns): """Re-format DESCRIBE output as a SHOW CREATE TABLE string. DESCRIBE is a much simpler reflection and is sufficient for reflecting views for runtime use. This method formats DDL for columns only- keys are omitted. :param columns: A sequence of DESCRIBE or SHOW COLUMNS 6-tuples. SHOW FULL COLUMNS FROM rows must be rearranged for use with this function. """ buffer = [] for row in columns: (name, col_type, nullable, default, extra) = \ [row[i] for i in (0, 1, 2, 4, 5)] line = [' '] line.append(self.preparer.quote_identifier(name)) line.append(col_type) if not nullable: line.append('NOT NULL') if default: if 'auto_increment' in default: pass elif (col_type.startswith('timestamp') and default.startswith('C')): line.append('DEFAULT') line.append(default) elif default == 'NULL': line.append('DEFAULT') line.append(default) else: line.append('DEFAULT') line.append("'%s'" % default.replace("'", "''")) if extra: line.append(extra) buffer.append(' '.join(line)) return ''.join([('CREATE TABLE %s (\n' % self.preparer.quote_identifier(table_name)), ',\n'.join(buffer), '\n) ']) def _parse_keyexprs(self, identifiers): """Unpack '"col"(2),"col" ASC'-ish strings into components.""" return self._re_keyexprs.findall(identifiers) def _prep_regexes(self): """Pre-compile regular expressions.""" self._re_columns = [] self._pr_options = [] _final = self.preparer.final_quote quotes = dict(zip(('iq', 'fq', 'esc_fq'), [re.escape(s) for s in (self.preparer.initial_quote, _final, self.preparer._escape_identifier(_final))])) self._pr_name = _pr_compile( r'^CREATE (?:\w+ +)?TABLE +' r'%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +\($' % quotes, self.preparer._unescape_identifier) # `col`,`col2`(32),`col3`(15) DESC # # Note: ASC and DESC aren't reflected, so we'll punt... self._re_keyexprs = _re_compile( r'(?:' r'(?:%(iq)s((?:%(esc_fq)s|[^%(fq)s])+)%(fq)s)' r'(?:\((\d+)\))?(?=\,|$))+' % quotes) # 'foo' or 'foo','bar' or 'fo,o','ba''a''r' self._re_csv_str = _re_compile(r'\x27(?:\x27\x27|[^\x27])*\x27') # 123 or 123,456 self._re_csv_int = _re_compile(r'\d+') # `colname` <type> [type opts] # (NOT NULL | NULL) # DEFAULT ('value' | CURRENT_TIMESTAMP...) # COMMENT 'comment' # COLUMN_FORMAT (FIXED|DYNAMIC|DEFAULT) # STORAGE (DISK|MEMORY) self._re_column = _re_compile( r" " r"%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +" r"(?P<coltype>\w+)" r"(?:\((?P<arg>(?:\d+|\d+,\d+|" r"(?:'(?:''|[^'])*',?)+))\))?" r"(?: +(?P<unsigned>UNSIGNED))?" r"(?: +(?P<zerofill>ZEROFILL))?" r"(?: +CHARACTER SET +(?P<charset>[\w_]+))?" r"(?: +COLLATE +(?P<collate>[\w_]+))?" r"(?: +(?P<notnull>(?:NOT )?NULL))?" r"(?: +DEFAULT +(?P<default>" r"(?:NULL|'(?:''|[^'])*'|[\w\(\)]+" r"(?: +ON UPDATE [\w\(\)]+)?)" r"))?" r"(?: +(?P<autoincr>AUTO_INCREMENT))?" r"(?: +COMMENT +'(?P<comment>(?:''|[^'])*)')?" r"(?: +COLUMN_FORMAT +(?P<colfmt>\w+))?" r"(?: +STORAGE +(?P<storage>\w+))?" r"(?: +(?P<extra>.*))?" r",?$" % quotes ) # Fallback, try to parse as little as possible self._re_column_loose = _re_compile( r' ' r'%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +' r'(?P<coltype>\w+)' r'(?:\((?P<arg>(?:\d+|\d+,\d+|\x27(?:\x27\x27|[^\x27])+\x27))\))?' r'.*?(?P<notnull>(?:NOT )NULL)?' % quotes ) # (PRIMARY|UNIQUE|FULLTEXT|SPATIAL) INDEX `name` (USING (BTREE|HASH))? # (`col` (ASC|DESC)?, `col` (ASC|DESC)?) # KEY_BLOCK_SIZE size | WITH PARSER name self._re_key = _re_compile( r' ' r'(?:(?P<type>\S+) )?KEY' r'(?: +%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s)?' r'(?: +USING +(?P<using_pre>\S+))?' r' +\((?P<columns>.+?)\)' r'(?: +USING +(?P<using_post>\S+))?' r'(?: +KEY_BLOCK_SIZE *[ =]? *(?P<keyblock>\S+))?' r'(?: +WITH PARSER +(?P<parser>\S+))?' r'(?: +COMMENT +(?P<comment>(\x27\x27|\x27([^\x27])*?\x27)+))?' r',?$' % quotes ) # CONSTRAINT `name` FOREIGN KEY (`local_col`) # REFERENCES `remote` (`remote_col`) # MATCH FULL | MATCH PARTIAL | MATCH SIMPLE # ON DELETE CASCADE ON UPDATE RESTRICT # # unique constraints come back as KEYs kw = quotes.copy() kw['on'] = 'RESTRICT|CASCADE|SET NULL|NOACTION' self._re_fk_constraint = _re_compile( r' ' r'CONSTRAINT +' r'%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +' r'FOREIGN KEY +' r'\((?P<local>[^\)]+?)\) REFERENCES +' r'(?P<table>%(iq)s[^%(fq)s]+%(fq)s' r'(?:\.%(iq)s[^%(fq)s]+%(fq)s)?) +' r'\((?P<foreign>[^\)]+?)\)' r'(?: +(?P<match>MATCH \w+))?' r'(?: +ON DELETE (?P<ondelete>%(on)s))?' r'(?: +ON UPDATE (?P<onupdate>%(on)s))?' % kw ) # CONSTRAINT `CONSTRAINT_1` CHECK (`x` > 5)' # testing on MariaDB 10.2 shows that the CHECK constraint # is returned on a line by itself, so to match without worrying # about parenthesis in the expresion we go to the end of the line self._re_ck_constraint = _re_compile( r' ' r'CONSTRAINT +' r'%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +' r'CHECK +' r'\((?P<sqltext>.+)\),?' % kw ) # PARTITION # # punt! self._re_partition = _re_compile(r'(?:.*)(?:SUB)?PARTITION(?:.*)') # Table-level options (COLLATE, ENGINE, etc.) # Do the string options first, since they have quoted # strings we need to get rid of. for option in _options_of_type_string: self._add_option_string(option) for option in ('ENGINE', 'TYPE', 'AUTO_INCREMENT', 'AVG_ROW_LENGTH', 'CHARACTER SET', 'DEFAULT CHARSET', 'CHECKSUM', 'COLLATE', 'DELAY_KEY_WRITE', 'INSERT_METHOD', 'MAX_ROWS', 'MIN_ROWS', 'PACK_KEYS', 'ROW_FORMAT', 'KEY_BLOCK_SIZE'): self._add_option_word(option) self._add_option_regex('UNION', r'\([^\)]+\)') self._add_option_regex('TABLESPACE', r'.*? STORAGE DISK') self._add_option_regex( 'RAID_TYPE', r'\w+\s+RAID_CHUNKS\s*\=\s*\w+RAID_CHUNKSIZE\s*=\s*\w+') _optional_equals = r'(?:\s*(?:=\s*)|\s+)' def _add_option_string(self, directive): regex = (r'(?P<directive>%s)%s' r"'(?P<val>(?:[^']|'')*?)'(?!')" % (re.escape(directive), self._optional_equals)) self._pr_options.append(_pr_compile( regex, lambda v: v.replace("\\\\", "\\").replace("''", "'") )) def _add_option_word(self, directive): regex = (r'(?P<directive>%s)%s' r'(?P<val>\w+)' % (re.escape(directive), self._optional_equals)) self._pr_options.append(_pr_compile(regex)) def _add_option_regex(self, directive, regex): regex = (r'(?P<directive>%s)%s' r'(?P<val>%s)' % (re.escape(directive), self._optional_equals, regex)) self._pr_options.append(_pr_compile(regex)) _options_of_type_string = ('COMMENT', 'DATA DIRECTORY', 'INDEX DIRECTORY', 'PASSWORD', 'CONNECTION') def _pr_compile(regex, cleanup=None): """Prepare a 2-tuple of compiled regex and callable.""" return (_re_compile(regex), cleanup) def _re_compile(regex): """Compile a string to regex, I and UNICODE.""" return re.compile(regex, re.I | re.UNICODE)
Haynie-Research-and-Development/jarvis
deps/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/reflection.py
Python
gpl-2.0
16,703
print ("Welcome To Aboyun App") print ("We are here to help you have a healthy pregnancy. :) ") firsttrimester=["Nausea","Morning Sickness", "strange food cravings", "Unusual tiredness"] secondtrimester=["swelling of feet or hands","dizziness","skin changes"] thirdtrimester=["false labour contractions,back ache","bleeding"] trimester=input("Enter your trimester?") if trimester=="1": print("Congratulations! You are in your first trimester") print("You should be experiencing the following symptoms;" ) print (firsttrimester) elif trimester=="2": print("Way to go! You are in your second trimester") print("You should be experiencing the following symptoms;" ) print(secondtrimester) elif trimester=="3": print("You are in your third trimester. Baby's almost here") print("You should be experiencing the following symptoms;" ) print(thirdtrimester) else: print ("I'm not sure")
markessien/aboyun
aboyun.py
Python
mit
891
"""Handles all processes to clouds. The :py:class:`WorkerManager` class is a :py:class:`jacket.manager.Manager` that handles RPC calls relating to creating instances. It is responsible for building a disk image, launching it via the underlying virtualization driver, responding to calls to check its state, attaching persistent storage, and terminating it. """ import functools from oslo_config import cfg from oslo_log import log as logging import oslo_messaging as messaging from jacket.compute.cloud import manager as com_manager from jacket.compute import exception as com_exception from jacket import exception from jacket.i18n import _LE from jacket import manager from jacket import rpc from jacket.storage.volume import manager as vol_manager from jacket.storage.backup import manager as bak_manager CONF = cfg.CONF LOG = logging.getLogger(__name__) get_notifier = functools.partial(rpc.get_notifier, service='worker') wrap_exception = functools.partial(com_exception.wrap_exception, get_notifier=get_notifier) class WorkerManager(manager.Manager): """Manages the running instances from creation to destruction.""" RPC_API_VERSION = '1.0' target = messaging.Target(version="1.0") def __init__(self, *args, **kwargs): """Load configuration options and connect to the cloud.""" super(WorkerManager, self).__init__(service_name="worker", *args, **kwargs) self.compute_manager = com_manager.ComputeManager() backend = None if CONF.enabled_backends: for backend in CONF.enabled_backends: break self.storage_manager = vol_manager.VolumeManager(service_name=backend) self.backup_manager = bak_manager.BackupManager() self.additional_endpoints.append(self.compute_manager) self.additional_endpoints.append(self.storage_manager) self.additional_endpoints.append(self.backup_manager) self.compute_driver = self.compute_manager.driver self.storage_driver = self.storage_manager.storage_driver # use storage manage rpc version # self.RPC_API_VERSION = self.storage_manager.RPC_API_VERSION def init_host(self): """Initialization for a standalone cloud service.""" # super(WorkerManager, self).init_host() # jacket init host TODO self.compute_manager.init_host() self.storage_manager.init_host() self.backup_manager.init_host() def cleanup_host(self): # super(WorkerManager, self).cleanup_host() # jacket cleanup host TODO self.compute_manager.cleanup_host() self.storage_manager.cleanup_host() def pre_start_hook(self): # super(WorkerManager, self).pre_start_hook() # jacket pre_start_hook TODO self.compute_manager.pre_start_hook() self.storage_manager.pre_start_hook() def post_start_hook(self): # super(WorkerManager, self).post_start_hook() # jacket post_start_hook TODO self.compute_manager.post_start_hook() self.storage_manager.post_start_hook() def reset(self): # super(WorkerManager, self).reset() # jacket post_start_hook TODO self.compute_manager.reset() self.storage_manager.reset() self.backup_manager.reset() def _require_driver_support(self, driver, method): if not hasattr(driver, method): driver_name = driver.__class__.__name__ LOG.error(_LE("driver %s not support method %s"), driver_name, method) raise exception.DriverNotSupported() @wrap_exception() def sub_flavor_detail(self, context): self._require_driver_support(self.compute_driver, 'sub_flavor_detail') return self.compute_driver.sub_flavor_detail(context) @wrap_exception() def sub_vol_type_detail(self, context): self._require_driver_support(self.storage_driver, 'sub_vol_type_detail') return self.storage_driver.sub_vol_type_detail(context) def image_sync(self, context, image, flavor=None, image_sync=None, ret_volume=False): self._require_driver_support(self.compute_manager, 'image_sync') return self.compute_manager.image_sync(context, image, flavor, image_sync, ret_volume=ret_volume)
HybridF5/jacket
jacket/worker/manager.py
Python
apache-2.0
4,440
from setuptools import setup, find_packages def readme(): with open('./README.rst') as f: return f.read() setup(name='pInteServ', version='0.131', description='Module and cli for client/server directory sync.', long_description=readme(), url='https://github.com/paulcrook726/PiCloud.git', author='Paul Crook', author_email='paulcrook726@gmail.com', license='GNU', entry_points={ 'console_scripts': ['picli = pytp.client_cli:main', 'piserver = pytp.server:main'], }, packages=find_packages(), classifiers=[ 'Development Status :: 3 - Alpha', 'Programming Language :: Python :: 3.5', 'Topic :: Internet :: File Transfer Protocol (FTP)', ], install_requires=[ 'pynacl',] )
paulcrook726/pInteServ
src/setup.py
Python
gpl-2.0
848
""" Django settings for HydaiNoWebsite project. For more information on this file, see https://docs.djangoproject.com/en/1.7/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.7/ref/settings/ """ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os BASE_DIR = os.path.dirname(os.path.dirname(__file__)) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '5u2#4zdiaxujgi#mig*#3z!%+*=_q1m*yw6=4x2t1493f2idem' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True TEMPLATE_DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'index', ) MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ROOT_URLCONF = 'HydaiNoWebsite.urls' WSGI_APPLICATION = 'HydaiNoWebsite.wsgi.application' # Database # https://docs.djangoproject.com/en/1.7/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Internationalization # https://docs.djangoproject.com/en/1.7/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.7/howto/static-files/ STATIC_ROOT = os.path.join(BASE_DIR, 'static') STATIC_URL = '/static/'
hydai/HydaiNoWebsite
HydaiNoWebsite/settings.py
Python
mit
2,126
from .tools import tools_bp from .heritability import heritability_bp from .indel_primer import indel_primer_bp
AndersenLab/CeNDR
base/views/tools/__init__.py
Python
mit
112
import numpy as np import cv2 from matplotlib import pyplot as plt # This function looks for contours (consecutive points) that could potentially be the pupil and narrows contours by area size and location within the frame def getContours(image): global mask # uses opencv function findContours to find contours _,contours, hierarchy = cv2.findContours(image, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE) cnts = sorted(contours, key = cv2.contourArea, reverse = True)[:10] mainContour = None mainMoments = None contourCentreX = None contourCentreY = None maxArea = 0.0 # loops through all contours detected and narrows possible pupil by area size and location for c in cnts: area = cv2.contourArea(c) if area > maxArea and area > 600 and area < 5000: #ensure the correct contour is detected M_2 = cv2.moments(c) cX = int(M_2['m10']/M_2['m00']) cY = int(M_2['m01']/M_2['m00']) if cX >= topLeftCornerX and cY >= topLeftCornerY and cX <= bottomRightCornerX and cY <= bottomRightCornerY: maxArea = area mainContour = c M = cv2.moments(c) contourCentreX = int(M['m10']/M['m00']) contourCentreY = int(M['m01']/M['m00']) return contourCentreX, contourCentreY, mainContour # This function looks for contours (consecutive points) that could potentially be the pupil and narrows contours by area size and location within the frame. Copy method by used for separate video def getContoursVideo(image): global mask _,contours, hierarchy = cv2.findContours(image, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE) cnts = sorted(contours, key = cv2.contourArea, reverse = True)[:10] mainContour = None mainMoments = None contourCentreX = None contourCentreY = None maxArea = 0.0 # loops through all contours detected and narrows possible pupil by area size and location for c in cnts: area = cv2.contourArea(c) if area > maxArea and area > 600 and area < 12000: #ensure the correct contour is detected M_2 = cv2.moments(c) cX = int(M_2['m10']/M_2['m00']) cY = int(M_2['m01']/M_2['m00']) if cX >= topLeftCornerX and cY >= topLeftCornerY and cX <= bottomRightCornerX and cY <= bottomRightCornerY: maxArea = area mainContour = c M = cv2.moments(c) contourCentreX = int(M['m10']/M['m00']) contourCentreY = int(M['m01']/M['m00']) return contourCentreX, contourCentreY, mainContour # This function looks for contours (consecutive points) that could potentially be the glint and narrows contours by area size and location to pupil. def getContoursCorneal(image): global mask _,contours, hierarchy = cv2.findContours(image, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE) cnts = sorted(contours, key = cv2.contourArea, reverse = True)[:len(contours)] mainContour = None mainMoments = None contourCentreX = None contourCentreY = None contourList = [] maxArea = 0.0 prevDist = 1000.0 # print " " for c in cnts: area = cv2.contourArea(c) M = cv2.moments(c) if M['m00'] == 0: M['m00'] = 1 cX = int(M['m10']/M['m00']) cY = int(M['m01']/M['m00']) if area > maxArea and area < 250 and abs(cpX - cX) < 50 and abs(cpY - cY) < 50 and cY >= cpY: #ensure the correct contour is detected 15000 deltaX = abs(cpX - cX) deltaY = abs(cpY - cY) dist = np.sqrt(deltaX^2 + deltaY^2) if dist < prevDist: prevDist = dist contourList.append(c) maxArea = area mainContour = c M = cv2.moments(c) contourCentreX = int(M['m10']/M['m00']) contourCentreY = int(M['m01']/M['m00']) contourImg = np.zeros((470,620),np.uint8) contourImg = cv2.cvtColor(contourImg, cv2.COLOR_GRAY2BGR) cv2.drawContours(contourImg,contourList,-1,(0,0,255),3) return contourCentreX, contourCentreY, mainContour # This function looks for contours (consecutive points) that could potentially be the glint and narrows contours by area size and location from pupil. Copy method by used for separate video def getContoursCornealVideo(image): global mask _,contours, hierarchy = cv2.findContours(image, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE) cnts = sorted(contours, key = cv2.contourArea, reverse = True)[:len(contours)] mainContour = None mainMoments = None contourCentreX = None contourCentreY = None contourList = [] maxArea = 0.0 for c in cnts: area = cv2.contourArea(c) M = cv2.moments(c) if M['m00'] == 0: M['m00'] = 1 cX = int(M['m10']/M['m00']) cY = int(M['m01']/M['m00']) if area > maxArea and area < 150 and abs(cpX - cX) < 100 and abs(cpY - cY) < 100 : #ensure the correct contour is detected 15000 contourList.append(c) maxArea = area mainContour = c M = cv2.moments(c) contourCentreX = int(M['m10']/M['m00']) contourCentreY = int(M['m01']/M['m00']) contourImg = np.zeros((470,620),np.uint8) contourImg = cv2.cvtColor(contourImg, cv2.COLOR_GRAY2BGR) cv2.drawContours(contourImg,contourList,-1,(0,0,255),3) return contourCentreX, contourCentreY, mainContour # input is 2 thresholded images. 1.for pupil 2. for corneal. This function sets the boundaries to search for pupil and performs both findContours functions for pupil and glint def edgeDetectionAlgorithm(pupilThreshold, cornealThreshold): global isPupilDetected global isCornealDetected global cpX global cpY global topLeftCornerX global topLeftCornerY global bottomRightCornerX global bottomRightCornerY cpX = None cpY = None ccX = None ccY = None cp = None cc = None isPupilDetected = 0 isCornealDetected = 0 successfullyDetected = True edgeLimit = 4 w,h = pupilThreshold.shape topLeftCornerX = h / edgeLimit topLeftCornerY = w / edgeLimit bottomRightCornerX = h / edgeLimit * (edgeLimit-1) bottomRightCornerY = w / edgeLimit * (edgeLimit-1) #--------------------- Detect pupil ----------------------------------# cpX, cpY, cp = getContours(pupilThreshold) if cpX is None or cpY is None: #check is pupil has been detected successfullyDetected = False cv2.waitKey(1) else: #--------------------- Detect corneal --------------------------------# # Need thresholded image for corneal detection ccX, ccY, cc = getContoursCorneal(cornealThreshold) if ccX is None or ccY is None: #check is pupil has been detected successfullyDetected = False cv2.waitKey(1) return cpX,cpY,cp,ccX,ccY,cc,successfullyDetected # input is 2 thresholded images. 1.for pupil 2. for corneal. This function sets the boundaries to search for pupil and performs both findContours functions for pupil and glint. This is copy specific for video def edgeDetectionAlgorithmVideo(pupilThreshold, cornealThreshold): global isPupilDetected global isCornealDetected global cpX global cpY global topLeftCornerX global topLeftCornerY global bottomRightCornerX global bottomRightCornerY cpX = None cpY = None ccX = None ccY = None cp = None cc = None isPupilDetected = 0 isCornealDetected = 0 successfullyDetected = True edgeLimit =8 #------------ Sets boundary for detecting pupil -----------------# w,h = pupilThreshold.shape topLeftCornerX = h / edgeLimit topLeftCornerY = w / edgeLimit bottomRightCornerX = h / edgeLimit * (edgeLimit-1) bottomRightCornerY = w / edgeLimit * (edgeLimit-1) #--------------------- Detect pupil ------------------------------# cpX, cpY, cp = getContoursVideo(pupilThreshold) if cpX is None or cpY is None: #check if pupil has been detected successfullyDetected = False print "pupil not detected" cv2.waitKey(1) else: #--------------------- Detect corneal ----------------------------# # Need thresholded image for corneal detection ccX, ccY, cc = getContoursCornealVideo(cornealThreshold) if ccX is None or ccY is None: #check if glint has been detected successfullyDetected = False print "corneal not detected" cv2.waitKey(1) return cpX,cpY,cp,ccX,ccY,cc,successfullyDetected
Qwertycal/19520-Eye-Tracker
Filtering/edgeDetection.py
Python
gpl-2.0
8,928
import traceback from datetime import datetime class Fixture(object): def __init__(self, data): """Takes a dict converted from the JSON response by the API and wraps the fixture data within an object. :param data: The fixture data from the API's response. :type data: dict """ self._home_team_ep = data['_links']['homeTeam']['href'] self._away_team_ep = data['_links']['awayTeam']['href'] self._competition_ep = data['_links']['competition']['href'] self.date = datetime.strptime(data['date'], '%Y-%m-%dT%H:%M:%SZ') self.status = data['status'] self.matchday = data['matchday'] self.home_team = data['homeTeamName'] self.home_team_id = int(self._home_team_ep.split("/")[-1]) self.away_team = data['awayTeamName'] self.away_team_id = int(self._away_team_ep.split("/")[-1]) self.competition_id = int(self._competition_ep.split("/")[-1]) if data['result']['goalsHomeTeam'] is not None: self.result = { 'home_team_goals': data['result']['goalsHomeTeam'], 'away_team_goals': data['result']['goalsAwayTeam'], } if 'halfTime' in data['result']: ht = data['result']['halfTime'] self.result['half_time'] = { 'home_team_goals': ht['goalsHomeTeam'], 'away_team_goals': ht['goalsAwayTeam'] } else: self.result = None if data['odds']: self.odds = { 'home_win': float(data['odds']['homeWin']), 'draw': float(data['odds']['draw']), 'away_win': float(data['odds']['awayWin']) } else: self.odds = None
xozzo/pyfootball
pyfootball/models/fixture.py
Python
mit
1,806
from codecs import open import os on_rtd = os.environ.get('READTHEDOCS') == 'True' from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(here, 'README.rst')) as f: long_description = f.read() if on_rtd: requirements = ['psutil', 'xlrd>=1.0.0'] else: requirements = ['numpy_groupies>=0.9.6', 'pyqtgraph>=0.10', 'h5py>=2.6.0', 'igor', 'matplotlib>=2.0.0', 'scikit-learn>=0.17.1', 'xlrd>=1.0.0', 'psutil', 'scikit-image>=0.12.3', 'scipy>=0.17.1', 'numpy>=1.11.0', 'ipywidgets>=5.2.2', 'ipython>=5.1.0'] setup( name='pycroscopy', version='0.0.53', description='A suite of Python libraries for high performance scientific computing of microscopy data.', long_description=long_description, classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Environment :: Console', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Cython', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: Implementation :: CPython', 'Topic :: Scientific/Engineering :: Chemistry', 'Topic :: Scientific/Engineering :: Physics', 'Topic :: Scientific/Engineering :: Information Analysis', ], keywords='scientific microscopy data analysis', packages=find_packages(exclude='tests'), url='http://github.com/pycroscopy/pycroscopy', license='MIT', author='S. Somnath, C. R. Smith, N. Laanait', author_email='pycroscopy@gmail.com', # I don't remember how to do this correctly!!!. NL install_requires=requirements, # package_data={'sample':['dataset_1.dat']} test_suite='nose.collector', tests_require='Nose', dependency='', dependency_links=[''], include_package_data=True, # If there are data files included in your packages that need to be # installed, specify them here. If using Python 2.6 or less, then these # have to be included in MANIFEST.in as well. # package_data={ # 'sample': ['package_data.dat'], # }, # Although 'package_data' is the preferred approach, in some case you may # need to place data files outside of your packages. See: # http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa # In this case, 'data_file' will be installed into '<sys.prefix>/my_data' # data_files=[('my_data', ['data/data_file'])], # To provide executable scripts, use entry points in preference to the # "scripts" keyword. Entry points provide cross-platform support and allow # pip to create the appropriate form of executable for the target platform. # entry_points={ # 'console_scripts': [ # 'sample=sample:main', # ], # }, )
anugrah-saxena/pycroscopy
setup.py
Python
mit
3,107
# # Generated by dumpDCWorkflow.py written by Sebastien Bigaret # Original workflow id/title: OIEStudentApplicationWorkflow/OIEStudentApplicationWorkflow # Date: 2008/05/28 15:48:39.623 GMT-5 # # WARNING: this dumps does NOT contain any scripts you might have added to # the workflow, IT IS YOUR RESPONSABILITY TO MAKE BACKUPS FOR THESE SCRIPTS. # # The following scripts have been detected and should be backed up: # - pre_declineFromFacultyReview (External Method) # - pre_manageDeadlines (External Method) # - pre_holdForFAIncomplete (External Method) # - pre_archive (External Method) # - pre_withdraw (External Method) # - pre_addToWaitlist (External Method) # - pre_assignSeat (External Method) # - post_archive (External Method) # - post_sendForDirectorReview (External Method) # - pre_sendForFacultyReview (External Method) # - post_addToWaitlist (External Method) # - post_decline (External Method) # - post_assertReadyForConditionalAdmit (External Method) # - pre_sendForProgramManagerReview (External Method) # - pre_waitForPrintedMaterials (External Method) # - pre_admitConditionally (External Method) # - pre_sendForDirectorReview (External Method) # - post_manageDeadlines (External Method) # - post_sendForProgramManagerReview (External Method) # - post_submit (External Method) # - post_waitForPrintedMaterials (External Method) # - pre_submit (External Method) # - post_withdraw (External Method) # - pre_decline (External Method) # - pre_addComment (External Method) # - post_sendForFacultyReview (External Method) # - post_declineFromFacultyReview (External Method) # - post_assignSeat (External Method) # - post_recheckForFAHold (External Method) # - post_holdForFAIncomplete (External Method) # - post_addComment (External Method) # - pre_facultyApproves (External Method) # - pre_approveForFA (External Method) # - post_admitConditionally (External Method) # - attempt_transition_to_FAHeld (External Method) # - pre_assertReadyForConditionalAdmit (External Method) # - post_approveForFA (External Method) # - pre_recheckForFAHold (External Method) # - post_facultyApproves (External Method) # """ Programmatically create a workflow type. """ __version__ = "$Id: dumpDCWorkflow.py 25723 2006-07-04 08:41:22Z b_mathieu $" from Products.CMFCore.WorkflowTool import addWorkflowFactory from Products.DCWorkflow.DCWorkflow import DCWorkflowDefinition from Products.PythonScripts.PythonScript import PythonScript from Products.ExternalMethod.ExternalMethod import ExternalMethod def setup_OIEStudentApplicationWorkflow(wf): """Setup the workflow """ wf.setProperties(title='OIEStudentApplicationWorkflow') for s in ('FAHeldIncomplete', 'archived', 'conditionallyAdmitted', 'deadlineManagement', 'declined', 'facApprovedNeedsProgramManagerReview', 'facultyReview', 'incomplete', 'needsDirectorReview', 'needsProgramManagerReview', 'private', 'readyForConditionalAdmit', 'seatAssigned', 'waitingForPrintMaterials', 'waitlist', 'withdrawn'): wf.states.addState(s) for t in ('addComment', 'addToWaitlist', 'admitConditionally', 'approveForFA', 'archive', 'assertReadyForConditionalAdmit', 'assignSeat', 'decline', 'declineFromFacultyReview', 'facultyApproves', 'holdForFAIncomplete', 'manageDeadlines', 'recheckForFAHold', 'sendForDirectorReview', 'sendForFacultyReview', 'sendForProgramManagerReview', 'submit', 'waitForPrintedMaterials', 'withdraw'): wf.transitions.addTransition(t) for v in ('action', 'actor', 'comments', 'review_history', 'time'): wf.variables.addVariable(v) for l in ('uwosh_oie_worklist_archived', 'uwosh_oie_worklist_conditionallyadmitted', 'uwosh_oie_worklist_deadlinemanagement', 'uwosh_oie_worklist_declined', 'uwosh_oie_worklist_facapprovedneedsprogrammanagerreview', 'uwosh_oie_worklist_facultyreview', 'uwosh_oie_worklist_faheldincomplete', 'uwosh_oie_worklist_incomplete', 'uwosh_oie_worklist_needsdirectorreview', 'uwosh_oie_worklist_needsprogrammanagerreview', 'uwosh_oie_worklist_private', 'uwosh_oie_worklist_readyforconditionaladmit', 'uwosh_oie_worklist_seatassigned', 'uwosh_oie_worklist_waitingforprintmaterials', 'uwosh_oie_worklist_waitlist', 'uwosh_oie_worklist_withdrawn'): wf.worklists.addWorklist(l) for p in ('list', 'Modify portal content', 'View', 'Access contents information', 'UWOshOIE: Review OIE Application', 'UWOshOIE: Modify revisable fields', 'UWOshOIE: Modify Financial Aid fields', 'UWOshOIE: Modify Office Use Only fields', 'UWOshOIE: Modify normal fields', 'Undo changes', 'List undoable changes'): wf.addManagedPermission(p) # Initial State wf.states.setInitialState('private') # State Initialization sdef = wf.states['FAHeldIncomplete'] sdef.setProperties(title='Financial Aid Held Incomplete', description='You will only study abroad if financial aid is available; you do not have a seat on this study abroad program. This application can not be processed further until you release your Financial Aid HOLD.', transitions=('addComment', 'approveForFA', 'decline', 'withdraw')) sdef.setPermission('list', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Modify portal content', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('View', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Access contents information', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Review OIE Application', 0, ['UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify revisable fields', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify Financial Aid fields', 0, ['UWOshOIEDirector', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify Office Use Only fields', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify normal fields', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Undo changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('List undoable changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef = wf.states['archived'] sdef.setProperties(title='Archived', description='Archived applications are stored indefinitely and cannot be altered.', transitions=('addComment',)) sdef.setPermission('list', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Modify portal content', 0, []) sdef.setPermission('View', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Access contents information', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Review OIE Application', 0, []) sdef.setPermission('UWOshOIE: Modify revisable fields', 0, []) sdef.setPermission('UWOshOIE: Modify Financial Aid fields', 0, []) sdef.setPermission('UWOshOIE: Modify Office Use Only fields', 0, []) sdef.setPermission('UWOshOIE: Modify normal fields', 0, []) sdef.setPermission('Undo changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('List undoable changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef = wf.states['conditionallyAdmitted'] sdef.setProperties(title='Conditionally Admitted', description='You have been conditionally admitted but you do not yet have a seat on a study abroad program. ----- For most programs, seats are assigned in the order in which qualified applicants have completed their applications to this point PROVIDED the applicant ALSO completes all remaining requirements by the Friday following the application deadline. Therefore, applicants on the waiting list may receive seats if applicants who have been conditionally admitted have not completed all requirements. ----- For competitive programs, seats are assigned to the most qualified applicants provided STEP III has also been completed by the Friday following the application deadline.', transitions=('addComment', 'decline', 'manageDeadlines', 'withdraw')) sdef.setPermission('list', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Modify portal content', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('View', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Access contents information', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Review OIE Application', 0, ['UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify revisable fields', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify Financial Aid fields', 0, ['UWOshOIEDirector', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify Office Use Only fields', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify normal fields', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Undo changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('List undoable changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef = wf.states['deadlineManagement'] sdef.setProperties(title='Deadline Management', description='You have been conditionally admitted but you do not yet have a seat on a study abroad program. ----- For most programs, seats are assigned in the order in which qualified applicants have completed their applications to this point PROVIDED the applicant ALSO completes all remaining requirements by the Friday following the application deadline. Therefore, applicants on the waiting list may receive seats if applicants who have been conditionally admitted have not completed all requirements. ----- For competetive programs, seats are assigned to the most qualified applicants provided STEP III has also been completed by the Friday following the application deadline.', transitions=('addComment', 'assignSeat', 'decline', 'withdraw')) sdef.setPermission('list', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Modify portal content', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('View', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Access contents information', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Review OIE Application', 0, ['UWOshOIEFacReview', 'UWOshOIEFrontLineAdvisor']) sdef.setPermission('UWOshOIE: Modify revisable fields', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify Financial Aid fields', 0, ['UWOshOIEDirector', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify Office Use Only fields', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify normal fields', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Undo changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('List undoable changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef = wf.states['declined'] sdef.setProperties(title='Declined', description='The study abroad application has been declined.', transitions=('addComment', 'archive')) sdef.setPermission('list', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Modify portal content', 0, []) sdef.setPermission('View', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Access contents information', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Review OIE Application', 0, []) sdef.setPermission('UWOshOIE: Modify revisable fields', 0, []) sdef.setPermission('UWOshOIE: Modify Financial Aid fields', 0, []) sdef.setPermission('UWOshOIE: Modify Office Use Only fields', 0, []) sdef.setPermission('UWOshOIE: Modify normal fields', 0, []) sdef.setPermission('Undo changes', 1, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('List undoable changes', 1, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef = wf.states['facApprovedNeedsProgramManagerReview'] sdef.setProperties(title='Faculty Approved and Now Needs Program Manager Review', description='The faculty reviewer has reviewed this application. Your application is pending further review by the OIE Program Manager; you do not yet have a seat on this study abroad program.', transitions=('addComment', 'addToWaitlist', 'assertReadyForConditionalAdmit', 'decline', 'withdraw')) sdef.setPermission('list', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Modify portal content', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('View', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Access contents information', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Review OIE Application', 0, ['UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify revisable fields', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify Financial Aid fields', 0, ['UWOshOIEDirector', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify Office Use Only fields', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify normal fields', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Undo changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('List undoable changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef = wf.states['facultyReview'] sdef.setProperties(title='Faculty Review', description='Your application is pending faculty review; you do not yet have a seat on this study abroad program.', transitions=('addComment', 'declineFromFacultyReview', 'facultyApproves', 'withdraw')) sdef.setPermission('list', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Modify portal content', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('View', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Access contents information', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Review OIE Application', 0, ['UWOshOIEFacReview']) sdef.setPermission('UWOshOIE: Modify revisable fields', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify Financial Aid fields', 0, ['UWOshOIEDirector', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify Office Use Only fields', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify normal fields', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Undo changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('List undoable changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef = wf.states['incomplete'] sdef.setProperties(title='Incomplete', description='You have completed part of the application process but must submit additional materials to the Office of International Education. This application cannot be processed further until these materials have been submitted; you do not yet have a seat on this study abroad program.', transitions=('addComment', 'decline', 'holdForFAIncomplete', 'recheckForFAHold', 'waitForPrintedMaterials', 'withdraw')) sdef.setPermission('list', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Modify portal content', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('View', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Access contents information', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Review OIE Application', 0, ['UWOshOIEFrontLineAdvisor']) sdef.setPermission('UWOshOIE: Modify revisable fields', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify Financial Aid fields', 0, ['UWOshOIEDirector', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify Office Use Only fields', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify normal fields', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Undo changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('List undoable changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef = wf.states['needsDirectorReview'] sdef.setProperties(title='Needs Director Review', description='Your application is pending review by the OIE Director; you do not yet have a seat on this study abroad program.', transitions=('addComment', 'decline', 'sendForProgramManagerReview', 'withdraw')) sdef.setPermission('list', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Modify portal content', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('View', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Access contents information', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Review OIE Application', 0, ['UWOshOIEDirector']) sdef.setPermission('UWOshOIE: Modify revisable fields', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify Financial Aid fields', 0, ['UWOshOIEDirector', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify Office Use Only fields', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify normal fields', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Undo changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('List undoable changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef = wf.states['needsProgramManagerReview'] sdef.setProperties(title='Needs Program Manager Review', description='Your application is pending review by the OIE Program Manager; you do not yet have a seat on this study abroad program.', transitions=('addComment', 'addToWaitlist', 'assertReadyForConditionalAdmit', 'decline', 'sendForFacultyReview', 'withdraw')) sdef.setPermission('list', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Modify portal content', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('View', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Access contents information', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Review OIE Application', 0, ['UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify revisable fields', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify Financial Aid fields', 0, ['UWOshOIEDirector', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify Office Use Only fields', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify normal fields', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Undo changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('List undoable changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef = wf.states['private'] sdef.setProperties(title='Private', description='The Office of International Education does not currently have access to your application. You MUST submit your application using the purple "SUBMIT APP" button below.', transitions=('addComment', 'submit', 'withdraw')) sdef.setPermission('list', 0, ['Owner']) sdef.setPermission('Modify portal content', 0, ['Owner']) sdef.setPermission('View', 0, ['Owner']) sdef.setPermission('Access contents information', 0, ['Owner']) sdef.setPermission('UWOshOIE: Review OIE Application', 0, []) sdef.setPermission('UWOshOIE: Modify revisable fields', 0, ['Owner']) sdef.setPermission('UWOshOIE: Modify Financial Aid fields', 0, []) sdef.setPermission('UWOshOIE: Modify Office Use Only fields', 0, []) sdef.setPermission('UWOshOIE: Modify normal fields', 0, ['Owner']) sdef.setPermission('Undo changes', 1, []) sdef.setPermission('List undoable changes', 1, []) sdef = wf.states['readyForConditionalAdmit'] sdef.setProperties(title='Ready For Conditionally Admit', description='The OIE is sorting applications by completion date & time in order to determine conditional admission. You do not yet have a seat on this study abroad program.', transitions=('addComment', 'admitConditionally', 'decline', 'withdraw')) sdef.setPermission('list', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Modify portal content', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('View', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Access contents information', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Review OIE Application', 0, ['UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify revisable fields', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify Financial Aid fields', 0, ['UWOshOIEDirector', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify Office Use Only fields', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify normal fields', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Undo changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('List undoable changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef = wf.states['seatAssigned'] sdef.setProperties(title='Seat Assigned', description='The application process is complete and a seat has been assigned on a study abroad program. Welcome to the program!', transitions=('addComment', 'archive', 'decline', 'withdraw')) sdef.setPermission('list', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Modify portal content', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('View', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Access contents information', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Review OIE Application', 0, ['UWOshOIEFrontLineAdvisor']) sdef.setPermission('UWOshOIE: Modify revisable fields', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify Financial Aid fields', 0, ['UWOshOIEDirector', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify Office Use Only fields', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify normal fields', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Undo changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('List undoable changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef = wf.states['waitingForPrintMaterials'] sdef.setProperties(title='Waiting For Printed Materials', description='You have completed part of the application process but must submit additional materials to the Office of International Education. This application cannot be processed further until these materials have been submitted; you do not yet have a seat on this study abroad program.', transitions=('addComment', 'decline', 'sendForDirectorReview', 'withdraw')) sdef.setPermission('list', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Modify portal content', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('View', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Access contents information', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Review OIE Application', 0, ['UWOshOIEFrontLineAdvisor']) sdef.setPermission('UWOshOIE: Modify revisable fields', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify Financial Aid fields', 0, ['UWOshOIEDirector', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify Office Use Only fields', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify normal fields', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Undo changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('List undoable changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef = wf.states['waitlist'] sdef.setProperties(title='Waitlist', description='You are on the waiting list. You do not yet have a seat on this study abroad program.', transitions=('addComment', 'assertReadyForConditionalAdmit', 'decline', 'withdraw')) sdef.setPermission('list', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Modify portal content', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('View', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Access contents information', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Review OIE Application', 0, ['UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify revisable fields', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify Financial Aid fields', 0, ['UWOshOIEDirector', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify Office Use Only fields', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Modify normal fields', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Undo changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('List undoable changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef = wf.states['withdrawn'] sdef.setProperties(title='Withdrawn', description='You have withdrawn your on-line study abroad application only. You must officially withdraw from the study abroad program by contacting the OIE IN WRITING. See your email in-box for details.', transitions=('addComment', 'archive')) sdef.setPermission('list', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Modify portal content', 0, []) sdef.setPermission('View', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('Access contents information', 0, ['Owner', 'UWOshOIEDirector', 'UWOshOIEFacReview', 'UWOshOIEFinAid', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('UWOshOIE: Review OIE Application', 0, []) sdef.setPermission('UWOshOIE: Modify revisable fields', 0, []) sdef.setPermission('UWOshOIE: Modify Financial Aid fields', 0, []) sdef.setPermission('UWOshOIE: Modify Office Use Only fields', 0, []) sdef.setPermission('UWOshOIE: Modify normal fields', 0, []) sdef.setPermission('Undo changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) sdef.setPermission('List undoable changes', 0, ['UWOshOIEDirector', 'UWOshOIEFrontLineAdvisor', 'UWOshOIEProgramManager']) # Transition Initialization tdef = wf.transitions['addComment'] tdef.setProperties(title='Add a Comment', description='Add Comment', new_state_id='', trigger_type=1, script_name='pre_addComment', after_script_name='post_addComment', actbox_name='addComment', actbox_url='', actbox_category='workflow', props={'guard_roles': 'Owner; UWOshOIEFinAid; UWOshOIEExtReview; UWOshOIEFacReview; UWOshOIEFrontLineAdvisor; UWOshOIEProgramManager; UWOshOIEDirector'}, ) tdef = wf.transitions['addToWaitlist'] tdef.setProperties(title='Add this Student to the Waitlist', description='Add To Waitlist', new_state_id='waitlist', trigger_type=1, script_name='pre_addToWaitlist', after_script_name='post_addToWaitlist', actbox_name='addToWaitlist', actbox_url='', actbox_category='workflow', props={'guard_roles': 'UWOshOIEProgramManager'}, ) tdef = wf.transitions['admitConditionally'] tdef.setProperties(title='Admit this Student Conditionally', description='Admit Conditionally', new_state_id='conditionallyAdmitted', trigger_type=1, script_name='pre_admitConditionally', after_script_name='post_admitConditionally', actbox_name='admitConditionally', actbox_url='', actbox_category='workflow', props={'guard_roles': 'UWOshOIEProgramManager'}, ) tdef = wf.transitions['approveForFA'] tdef.setProperties(title='Approve for Financial Aid', description='Approve For Fin Aid', new_state_id='waitingForPrintMaterials', trigger_type=1, script_name='pre_approveForFA', after_script_name='post_approveForFA', actbox_name='approveForFA', actbox_url='', actbox_category='workflow', props={'guard_roles': 'UWOshOIEFrontLineAdvisor; UWOshOIEProgramManager; UWOshOIEFinAid'}, ) tdef = wf.transitions['archive'] tdef.setProperties(title='Archive the Application', description='Archive App', new_state_id='archived', trigger_type=1, script_name='pre_archive', after_script_name='post_archive', actbox_name='archive', actbox_url='', actbox_category='workflow', props={'guard_roles': 'UWOshOIEFrontLineAdvisor; UWOshOIEProgramManager; UWOshOIEDirector'}, ) tdef = wf.transitions['assertReadyForConditionalAdmit'] tdef.setProperties(title='Assert Applicant Is Ready to be Conditionally Admitted', description='Assert App For Con. Admit', new_state_id='readyForConditionalAdmit', trigger_type=1, script_name='pre_assertReadyForConditionalAdmit', after_script_name='post_assertReadyForConditionalAdmit', actbox_name='assertReadyForConditionalAdmit', actbox_url='', actbox_category='workflow', props={'guard_roles': 'UWOshOIEProgramManager'}, ) tdef = wf.transitions['assignSeat'] tdef.setProperties(title='Assign a Seat to the Applicant and Complete the Application.', description='Assign Seat', new_state_id='seatAssigned', trigger_type=1, script_name='pre_assignSeat', after_script_name='post_assignSeat', actbox_name='assignSeat', actbox_url='', actbox_category='workflow', props={'guard_roles': 'UWOshOIEFrontLineAdvisor'}, ) tdef = wf.transitions['decline'] tdef.setProperties(title='Decline This Application', description='Decline', new_state_id='declined', trigger_type=1, script_name='pre_decline', after_script_name='post_decline', actbox_name='decline', actbox_url='', actbox_category='workflow', props={'guard_roles': 'UWOshOIEFrontLineAdvisor; UWOshOIEProgramManager; UWOshOIEDirector'}, ) tdef = wf.transitions['declineFromFacultyReview'] tdef.setProperties(title='Decline This Application From Faculty Review', description='Faculty Decline', new_state_id='declined', trigger_type=1, script_name='pre_declineFromFacultyReview', after_script_name='post_declineFromFacultyReview', actbox_name='declinedFromFacultyReview', actbox_url='', actbox_category='workflow', props={'guard_roles': 'UWOshOIEFacReview; UWOshOIEFrontLineAdvisor; UWOshOIEProgramManager; UWOshOIEDirector'}, ) tdef = wf.transitions['facultyApproves'] tdef.setProperties(title='Approve This Application From Faculty Review', description='Faculty Approve', new_state_id='facApprovedNeedsProgramManagerReview', trigger_type=1, script_name='pre_facultyApproves', after_script_name='post_facultyApproves', actbox_name='facultyApproves', actbox_url='', actbox_category='workflow', props={'guard_roles': 'UWOshOIEFacReview; UWOshOIEFrontLineAdvisor; UWOshOIEProgramManager; UWOshOIEDirector'}, ) tdef = wf.transitions['holdForFAIncomplete'] tdef.setProperties(title='Hold For Financial Aid - incomplete', description='Hold For Fin Aid', new_state_id='FAHeldIncomplete', trigger_type=1, script_name='pre_holdForFAIncomplete', after_script_name='post_holdForFAIncomplete', actbox_name='holdForFAIncomplete', actbox_url='', actbox_category='workflow', props=None, ) tdef = wf.transitions['manageDeadlines'] tdef.setProperties(title='Begin Managing Deadlines For This Application', description='Manage Deadlines', new_state_id='deadlineManagement', trigger_type=1, script_name='pre_manageDeadlines', after_script_name='post_manageDeadlines', actbox_name='manageDeadlines', actbox_url='', actbox_category='workflow', props={'guard_roles': 'UWOshOIEFrontLineAdvisor; UWOshOIEProgramManager; UWOshOIEDirector'}, ) tdef = wf.transitions['recheckForFAHold'] tdef.setProperties(title='Check If HOLD Has Been Set for Financial Aid', description='Recheck For FA Hold', new_state_id='incomplete', trigger_type=1, script_name='pre_recheckForFAHold', after_script_name='post_recheckForFAHold', actbox_name='recheckForFAHold', actbox_url='', actbox_category='workflow', props={'guard_roles': 'UWOshOIEFrontLineAdvisor'}, ) tdef = wf.transitions['sendForDirectorReview'] tdef.setProperties(title='Send to OIE Director For Review', description='Send For Director Rev', new_state_id='needsDirectorReview', trigger_type=1, script_name='pre_sendForDirectorReview', after_script_name='post_sendForDirectorReview', actbox_name='sendForDirectorReview', actbox_url='', actbox_category='workflow', props={'guard_roles': 'UWOshOIEFrontLineAdvisor'}, ) tdef = wf.transitions['sendForFacultyReview'] tdef.setProperties(title='Send to Faculty For Review', description='Send For Fac Rev', new_state_id='facultyReview', trigger_type=1, script_name='pre_sendForFacultyReview', after_script_name='post_sendForFacultyReview', actbox_name='sendForFacultyReview', actbox_url='', actbox_category='workflow', props={'guard_roles': 'UWOshOIEProgramManager'}, ) tdef = wf.transitions['sendForProgramManagerReview'] tdef.setProperties(title='Send To OIE Program Manager For Review', description='Send For Pgm Mgr Rev', new_state_id='needsProgramManagerReview', trigger_type=1, script_name='pre_sendForProgramManagerReview', after_script_name='post_sendForProgramManagerReview', actbox_name='sendForProgramManagerReview', actbox_url='', actbox_category='workflow', props={'guard_roles': 'UWOshOIEDirector'}, ) tdef = wf.transitions['submit'] tdef.setProperties(title='Submit Application To OIE', description='Submit App', new_state_id='incomplete', trigger_type=1, script_name='pre_submit', after_script_name='post_submit', actbox_name='submit', actbox_url='', actbox_category='workflow', props={'guard_roles': 'Owner'}, ) tdef = wf.transitions['waitForPrintedMaterials'] tdef.setProperties(title='Wait For Printed Materials From Student', description='Wait For Printed Materials', new_state_id='waitingForPrintMaterials', trigger_type=1, script_name='pre_waitForPrintedMaterials', after_script_name='post_waitForPrintedMaterials', actbox_name='waitForPrintedMaterials', actbox_url='', actbox_category='workflow', props={'guard_roles': 'UWOshOIEFrontLineAdvisor'}, ) tdef = wf.transitions['withdraw'] tdef.setProperties(title='Withdraw This Application From Consideration', description='Withdraw App', new_state_id='withdrawn', trigger_type=1, script_name='pre_withdraw', after_script_name='post_withdraw', actbox_name='withdraw', actbox_url='', actbox_category='workflow', props={'guard_roles': 'Owner'}, ) # State Variable wf.variables.setStateVar('review_state') # Variable Initialization vdef = wf.variables['action'] vdef.setProperties(description='The last transition', default_value='', default_expr='transition/getId|nothing', for_catalog=0, for_status=1, update_always=1, props=None) vdef = wf.variables['actor'] vdef.setProperties(description='The ID of the user who performed the last transition', default_value='', default_expr='user/getId', for_catalog=0, for_status=1, update_always=1, props=None) vdef = wf.variables['comments'] vdef.setProperties(description='Comments about the last transition', default_value='', default_expr="python:state_change.kwargs.get('comment','')", for_catalog=0, for_status=1, update_always=1, props=None) vdef = wf.variables['review_history'] vdef.setProperties(description='Provides access to workflow history', default_value='', default_expr='state_change/getHistory', for_catalog=0, for_status=0, update_always=0, props={'guard_permissions': 'Request review; Review portal content; View'}) vdef = wf.variables['time'] vdef.setProperties(description='Time of the last transition', default_value='', default_expr='state_change/getDateTime', for_catalog=0, for_status=1, update_always=1, props=None) # Worklist Initialization ldef = wf.worklists['uwosh_oie_worklist_archived'] ldef.setProperties(description='', actbox_name='archived (%(count)d)', actbox_url='%(portal_url)s/search?review_state=archived', actbox_category='global', props={'guard_permissions': 'UWOshOIE: Review OIE Application', 'var_match_review_state': 'archived'}) ldef = wf.worklists['uwosh_oie_worklist_conditionallyadmitted'] ldef.setProperties(description='', actbox_name='conditionallyAdmitted (%(count)d)', actbox_url='%(portal_url)s/search?review_state=conditionallyAdmitted', actbox_category='global', props={'guard_permissions': 'UWOshOIE: Review OIE Application', 'var_match_review_state': 'conditionallyAdmitted'}) ldef = wf.worklists['uwosh_oie_worklist_deadlinemanagement'] ldef.setProperties(description='', actbox_name='', actbox_url='', actbox_category='global', props={'guard_permissions': 'UWOshOIE: Review OIE Application', 'var_match_review_state': 'deadlineManagement'}) ldef = wf.worklists['uwosh_oie_worklist_declined'] ldef.setProperties(description='', actbox_name='declined (%(count)d)', actbox_url='%(portal_url)s/search?review_state=declined', actbox_category='global', props={'guard_permissions': 'UWOshOIE: Review OIE Application', 'var_match_review_state': 'declined'}) ldef = wf.worklists['uwosh_oie_worklist_facapprovedneedsprogrammanagerreview'] ldef.setProperties(description='', actbox_name='facApprovedNeedsProgramManagerReview (%(count)d)', actbox_url='%(portal_url)s/search?review_state=facApprovedNeedsProgramManagerReview', actbox_category='global', props={'guard_permissions': 'UWOshOIE: Review OIE Application', 'var_match_review_state': 'facApprovedNeedsProgramManagerReview'}) ldef = wf.worklists['uwosh_oie_worklist_facultyreview'] ldef.setProperties(description='', actbox_name='facultyReview (%(count)d)', actbox_url='%(portal_url)s/search?review_state=facultyReview', actbox_category='global', props={'guard_permissions': 'UWOshOIE: Review OIE Application', 'var_match_review_state': 'facultyReview'}) ldef = wf.worklists['uwosh_oie_worklist_faheldincomplete'] ldef.setProperties(description='', actbox_name='FAHeldIncomplete (%(count)d)', actbox_url='%(portal_url)s/search?review_state=FAHeldIncomplete', actbox_category='global', props={'guard_permissions': 'UWOshOIE: Review OIE Application', 'var_match_review_state': 'FAHeldIncomplete'}) ldef = wf.worklists['uwosh_oie_worklist_incomplete'] ldef.setProperties(description='', actbox_name='incomplete (%(count)d)', actbox_url='%(portal_url)s/search?review_state=incomplete', actbox_category='global', props={'guard_permissions': 'UWOshOIE: Review OIE Application', 'var_match_review_state': 'incomplete'}) ldef = wf.worklists['uwosh_oie_worklist_needsdirectorreview'] ldef.setProperties(description='', actbox_name='', actbox_url='', actbox_category='global', props={'guard_permissions': 'UWOshOIE: Review OIE Application', 'var_match_review_state': 'needsDirectorReview'}) ldef = wf.worklists['uwosh_oie_worklist_needsprogrammanagerreview'] ldef.setProperties(description='', actbox_name='', actbox_url='', actbox_category='global', props={'guard_permissions': 'UWOshOIE: Review OIE Application', 'var_match_review_state': 'needsProgramManagerReview'}) ldef = wf.worklists['uwosh_oie_worklist_private'] ldef.setProperties(description='', actbox_name='private (%(count)d)', actbox_url='%(portal_url)s/search?review_state=private', actbox_category='global', props={'guard_permissions': 'UWOshOIE: Review OIE Application', 'var_match_review_state': 'private'}) ldef = wf.worklists['uwosh_oie_worklist_readyforconditionaladmit'] ldef.setProperties(description='', actbox_name='', actbox_url='', actbox_category='global', props={'guard_permissions': 'UWOshOIE: Review OIE Application', 'var_match_review_state': 'readyForConditionalAdmit'}) ldef = wf.worklists['uwosh_oie_worklist_seatassigned'] ldef.setProperties(description='', actbox_name='', actbox_url='', actbox_category='global', props={'guard_permissions': 'UWOshOIE: Review OIE Application', 'var_match_review_state': 'seatAssigned'}) ldef = wf.worklists['uwosh_oie_worklist_waitingforprintmaterials'] ldef.setProperties(description='', actbox_name='waitingForPrintMaterials (%(count)d)', actbox_url='%(portal_url)s/search?review_state=waitingForPrintMaterials', actbox_category='global', props={'guard_permissions': 'UWOshOIE: Review OIE Application', 'var_match_review_state': 'waitingForPrintMaterials'}) ldef = wf.worklists['uwosh_oie_worklist_waitlist'] ldef.setProperties(description='', actbox_name='waitlist (%(count)d)', actbox_url='%(portal_url)s/search?review_state=waitlist', actbox_category='global', props={'guard_permissions': 'UWOshOIE: Review OIE Application', 'var_match_review_state': 'waitlist'}) ldef = wf.worklists['uwosh_oie_worklist_withdrawn'] ldef.setProperties(description='', actbox_name='withdrawn (%(count)d)', actbox_url='%(portal_url)s/search?review_state=withdrawn', actbox_category='global', props={'guard_permissions': 'UWOshOIE: Review OIE Application', 'var_match_review_state': 'withdrawn'}) # Script Initialization wf.scripts._setObject('attempt_transition_to_FAHeld', ExternalMethod('attempt_transition_to_FAHeld', 'attempt_transition_to_FAHeld', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'attempt_transition_to_FAHeld')) wf.scripts._setObject('post_addComment', ExternalMethod('post_addComment', 'post_addComment', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'post_addComment')) wf.scripts._setObject('post_addToWaitlist', ExternalMethod('post_addToWaitlist', 'post_addToWaitlist', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'post_addToWaitlist')) wf.scripts._setObject('post_admitConditionally', ExternalMethod('post_admitConditionally', 'post_admitConditionally', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'post_admitConditionally')) wf.scripts._setObject('post_approveForFA', ExternalMethod('post_approveForFA', 'post_approveForFA', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'post_approveForFA')) wf.scripts._setObject('post_archive', ExternalMethod('post_archive', 'post_archive', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'post_archive')) wf.scripts._setObject('post_assertReadyForConditionalAdmit', ExternalMethod('post_assertReadyForConditionalAdmit', 'post_assertReadyForConditionalAdmit', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'post_assertReadyForConditionalAdmit')) wf.scripts._setObject('post_assignSeat', ExternalMethod('post_assignSeat', 'post_assignSeat', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'post_assignSeat')) wf.scripts._setObject('post_decline', ExternalMethod('post_decline', 'post_decline', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'post_decline')) wf.scripts._setObject('post_declineFromFacultyReview', ExternalMethod('post_declineFromFacultyReview', 'post_declineFromFacultyReview', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'post_declineFromFacultyReview')) wf.scripts._setObject('post_facultyApproves', ExternalMethod('post_facultyApproves', 'post_facultyApproves', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'post_facultyApproves')) wf.scripts._setObject('post_holdForFAIncomplete', ExternalMethod('post_holdForFAIncomplete', 'post_holdForFAIncomplete', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'post_holdForFAIncomplete')) wf.scripts._setObject('post_manageDeadlines', ExternalMethod('post_manageDeadlines', 'post_manageDeadlines', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'post_manageDeadlines')) wf.scripts._setObject('post_recheckForFAHold', ExternalMethod('post_recheckForFAHold', 'post_recheckForFAHold', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'post_recheckForFAHold')) wf.scripts._setObject('post_sendForDirectorReview', ExternalMethod('post_sendForDirectorReview', 'post_sendForDirectorReview', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'post_sendForDirectorReview')) wf.scripts._setObject('post_sendForFacultyReview', ExternalMethod('post_sendForFacultyReview', 'post_sendForFacultyReview', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'post_sendForFacultyReview')) wf.scripts._setObject('post_sendForProgramManagerReview', ExternalMethod('post_sendForProgramManagerReview', 'post_sendForProgramManagerReview', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'post_sendForProgramManagerReview')) wf.scripts._setObject('post_submit', ExternalMethod('post_submit', 'post_submit', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'post_submit')) wf.scripts._setObject('post_waitForPrintedMaterials', ExternalMethod('post_waitForPrintedMaterials', 'post_waitForPrintedMaterials', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'post_waitForPrintedMaterials')) wf.scripts._setObject('post_withdraw', ExternalMethod('post_withdraw', 'post_withdraw', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'post_withdraw')) wf.scripts._setObject('pre_addComment', ExternalMethod('pre_addComment', 'pre_addComment', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'pre_addComment')) wf.scripts._setObject('pre_addToWaitlist', ExternalMethod('pre_addToWaitlist', 'pre_addToWaitlist', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'pre_addToWaitlist')) wf.scripts._setObject('pre_admitConditionally', ExternalMethod('pre_admitConditionally', 'pre_admitConditionally', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'pre_admitConditionally')) wf.scripts._setObject('pre_approveForFA', ExternalMethod('pre_approveForFA', 'pre_approveForFA', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'pre_approveForFA')) wf.scripts._setObject('pre_archive', ExternalMethod('pre_archive', 'pre_archive', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'pre_archive')) wf.scripts._setObject('pre_assertReadyForConditionalAdmit', ExternalMethod('pre_assertReadyForConditionalAdmit', 'pre_assertReadyForConditionalAdmit', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'pre_assertReadyForConditionalAdmit')) wf.scripts._setObject('pre_assignSeat', ExternalMethod('pre_assignSeat', 'pre_assignSeat', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'pre_assignSeat')) wf.scripts._setObject('pre_decline', ExternalMethod('pre_decline', 'pre_decline', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'pre_decline')) wf.scripts._setObject('pre_declineFromFacultyReview', ExternalMethod('pre_declineFromFacultyReview', 'pre_declineFromFacultyReview', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'pre_declineFromFacultyReview')) wf.scripts._setObject('pre_facultyApproves', ExternalMethod('pre_facultyApproves', 'pre_facultyApproves', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'pre_facultyApproves')) wf.scripts._setObject('pre_holdForFAIncomplete', ExternalMethod('pre_holdForFAIncomplete', 'pre_holdForFAIncomplete', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'pre_holdForFAIncomplete')) wf.scripts._setObject('pre_manageDeadlines', ExternalMethod('pre_manageDeadlines', 'pre_manageDeadlines', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'pre_manageDeadlines')) wf.scripts._setObject('pre_recheckForFAHold', ExternalMethod('pre_recheckForFAHold', 'pre_recheckForFAHold', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'pre_recheckForFAHold')) wf.scripts._setObject('pre_sendForDirectorReview', ExternalMethod('pre_sendForDirectorReview', 'pre_sendForDirectorReview', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'pre_sendForDirectorReview')) wf.scripts._setObject('pre_sendForFacultyReview', ExternalMethod('pre_sendForFacultyReview', 'pre_sendForFacultyReview', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'pre_sendForFacultyReview')) wf.scripts._setObject('pre_sendForProgramManagerReview', ExternalMethod('pre_sendForProgramManagerReview', 'pre_sendForProgramManagerReview', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'pre_sendForProgramManagerReview')) wf.scripts._setObject('pre_submit', ExternalMethod('pre_submit', 'pre_submit', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'pre_submit')) wf.scripts._setObject('pre_waitForPrintedMaterials', ExternalMethod('pre_waitForPrintedMaterials', 'pre_waitForPrintedMaterials', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'pre_waitForPrintedMaterials')) wf.scripts._setObject('pre_withdraw', ExternalMethod('pre_withdraw', 'pre_withdraw', 'UWOshOIE.OIEStudentApplicationWorkflow_scripts', 'pre_withdraw')) def create_OIEStudentApplicationWorkflow(id): """Create, setup and return the workflow. """ ob = DCWorkflowDefinition(id) setup_OIEStudentApplicationWorkflow(ob) return ob addWorkflowFactory(create_OIEStudentApplicationWorkflow, id='OIEStudentApplicationWorkflow', title='OIEStudentApplicationWorkflow')
uwosh/UWOshOIE
Extensions/OIEStudentApplicationWorkflow.py
Python
gpl-2.0
79,386
# -*- coding: utf-8 -*- import scrapy from scrapy import Selector from libs.misc import get_spider_name_from_domain from libs.polish import * from novelsCrawler.items import NovelsCrawlerItem class Novel101Spider(scrapy.Spider): """ classdocs example: http://www.101novel.com/ck101/14744/ """ dom = 'www.101novel.com' name = get_spider_name_from_domain(dom) allowed_domains = [dom] def __init__(self, *args, **kwargs): super(Novel101Spider, self).__init__(*args, **kwargs) self.tmp_novels_dir = kwargs['tmp_novels_dir'] urls = kwargs['start_urls'] self.start_urls = [self.url_check(url) for url in urls] print(self.start_urls) def url_check(self, url): # pattern = 'http://m.lwxs.com/wapbook/([\d]+).html' # m = re.search(pattern, url) # if m is not None: # return 'http://m.lwxs.com/wapbook/{0}_1/'.format(m.group(1)) return url # def start_requests(self): # for url in self.start_urls: # yield self.make_requests_from_url(url) def parse(self, response): sel = Selector(response) title = sel.xpath('//h1/text()').extract()[0] title = polish_title(title, self.name) print(title) tmp_spider_root_dir = os.path.join(self.tmp_novels_dir, title) if not os.path.isdir(tmp_spider_root_dir): os.makedirs(tmp_spider_root_dir) subtitle_selectors = sel.xpath('//tbody/tr/td/div[@class="dccss"]/a') all_pages = [i + 1 for i in range(0, len(subtitle_selectors))] save_index(title, response.url, tmp_spider_root_dir, all_pages) download_pages = polish_pages(tmp_spider_root_dir, all_pages) # Traverse the subtitle_selectors only crawler the pages that haven't been downloaded yet for i, subtitle_selector in enumerate(subtitle_selectors): page_id = i + 1 if page_id not in set(download_pages): continue else: subtitle_url = subtitle_selector.xpath('@href').extract()[0] subtitle_url = response.urljoin(subtitle_url.strip()) subtitle_name = subtitle_selector.xpath('text()').extract()[0] subtitle_name = polish_subtitle(subtitle_name) item = NovelsCrawlerItem() item['title'] = title item['id'] = page_id item['subtitle'] = subtitle_name item['root_dir'] = tmp_spider_root_dir request = scrapy.Request(subtitle_url, callback=self.parse_page) request.meta['item'] = item yield request def parse_page(self, response): item = response.meta['item'] sel = Selector(response) content = sel.xpath('//div[@id="content"]/p/text()').extract() content = polish_content(content) item['content'] = content return item
yytang2012/novels-crawler
novelsCrawler/spiders/c101nove.py
Python
mit
2,957
""" Google OpenId, OAuth2, OAuth1, Google+ Sign-in backends, docs at: http://psa.matiasaguirre.net/docs/backends/google.html """ from requests import HTTPError from social.backends.open_id import OpenIdAuth from social.backends.oauth import BaseOAuth2, BaseOAuth1 from social.exceptions import AuthMissingParameter, AuthCanceled class BaseGoogleAuth(object): def get_user_id(self, details, response): """Use google email as unique id""" if self.setting('USE_UNIQUE_USER_ID', False): return response['id'] else: return details['email'] def get_user_details(self, response): """Return user details from Orkut account""" email = response.get('email', '') return {'username': email.split('@', 1)[0], 'email': email, 'fullname': response.get('name', ''), 'first_name': response.get('given_name', ''), 'last_name': response.get('family_name', '')} class BaseGoogleOAuth2API(BaseGoogleAuth): def user_data(self, access_token, *args, **kwargs): """Return user data from Google API""" return self.get_json( 'https://www.googleapis.com/oauth2/v1/userinfo', params={'access_token': access_token, 'alt': 'json'} ) class GoogleOAuth2(BaseGoogleOAuth2API, BaseOAuth2): """Google OAuth2 authentication backend""" name = 'google-oauth2' REDIRECT_STATE = False AUTHORIZATION_URL = 'https://accounts.google.com/o/oauth2/auth' ACCESS_TOKEN_URL = 'https://accounts.google.com/o/oauth2/token' ACCESS_TOKEN_METHOD = 'POST' REVOKE_TOKEN_URL = 'https://accounts.google.com/o/oauth2/revoke' REVOKE_TOKEN_METHOD = 'GET' DEFAULT_SCOPE = ['https://www.googleapis.com/auth/userinfo.email', 'https://www.googleapis.com/auth/userinfo.profile'] EXTRA_DATA = [ ('refresh_token', 'refresh_token', True), ('expires_in', 'expires'), ('token_type', 'token_type', True) ] def revoke_token_params(self, token, uid): return {'token': token} def revoke_token_headers(self, token, uid): return {'Content-type': 'application/json'} class GooglePlusAuth(BaseGoogleOAuth2API, BaseOAuth2): name = 'google-plus' REDIRECT_STATE = False STATE_PARAMETER = False ACCESS_TOKEN_URL = 'https://accounts.google.com/o/oauth2/token' ACCESS_TOKEN_METHOD = 'POST' REVOKE_TOKEN_URL = 'https://accounts.google.com/o/oauth2/revoke' REVOKE_TOKEN_METHOD = 'GET' DEFAULT_SCOPE = ['https://www.googleapis.com/auth/plus.login', 'https://www.googleapis.com/auth/userinfo.email', 'https://www.googleapis.com/auth/userinfo.profile'] EXTRA_DATA = [ ('id', 'user_id'), ('refresh_token', 'refresh_token', True), ('expires_in', 'expires'), ('access_type', 'access_type', True), ('code', 'code') ] def auth_complete_params(self, state=None): params = super(GooglePlusAuth, self).auth_complete_params(state) params['redirect_uri'] = 'postmessage' return params def auth_complete(self, *args, **kwargs): token = self.data.get('access_token') if not token: raise AuthMissingParameter(self, 'access_token') self.process_error(self.get_json( 'https://www.googleapis.com/oauth2/v1/tokeninfo', params={'access_token': token} )) try: response = self.request_access_token( self.ACCESS_TOKEN_URL, data=self.auth_complete_params(), headers=self.auth_headers(), method=self.ACCESS_TOKEN_METHOD ) except HTTPError as err: if err.response.status_code == 400: raise AuthCanceled(self) else: raise self.process_error(response) return self.do_auth(response['access_token'], response=response, *args, **kwargs) class GoogleOAuth(BaseGoogleAuth, BaseOAuth1): """Google OAuth authorization mechanism""" name = 'google-oauth' AUTHORIZATION_URL = 'https://www.google.com/accounts/OAuthAuthorizeToken' REQUEST_TOKEN_URL = 'https://www.google.com/accounts/OAuthGetRequestToken' ACCESS_TOKEN_URL = 'https://www.google.com/accounts/OAuthGetAccessToken' DEFAULT_SCOPE = ['https://www.googleapis.com/auth/userinfo#email'] def user_data(self, access_token, *args, **kwargs): """Return user data from Google API""" return self.get_querystring( 'https://www.googleapis.com/userinfo/email', auth=self.oauth_auth(access_token) ) def get_key_and_secret(self): """Return Google OAuth Consumer Key and Consumer Secret pair, uses anonymous by default, beware that this marks the application as not registered and a security badge is displayed on authorization page. http://code.google.com/apis/accounts/docs/OAuth_ref.html#SigningOAuth """ key_secret = super(GoogleOAuth, self).get_key_and_secret() if key_secret == (None, None): key_secret = ('anonymous', 'anonymous') return key_secret class GoogleOpenId(OpenIdAuth): name = 'google' URL = 'https://www.google.com/accounts/o8/id' def get_user_id(self, details, response): """ Return user unique id provided by service. For google user email is unique enought to flag a single user. Email comes from schema: http://axschema.org/contact/email """ print details return details['email']
HackerEcology/SuggestU
suggestu/social/backends/google.py
Python
gpl-3.0
5,709
# -*- coding: utf-8 -*- # Copyright (c) 2006-2011 Mitch Garnaat http://garnaat.org/ # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. """ Some unit tests for the S3Connection """ import unittest import time import os import urllib import urlparse import httplib from boto.s3.connection import S3Connection from boto.s3.bucket import Bucket from boto.exception import S3PermissionsError, S3ResponseError class S3ConnectionTest (unittest.TestCase): s3 = True def test_1_basic(self): print '--- running S3Connection tests ---' c = S3Connection() # create a new, empty bucket bucket_name = 'test-%d' % int(time.time()) bucket = c.create_bucket(bucket_name) # now try a get_bucket call and see if it's really there bucket = c.get_bucket(bucket_name) # test logging logging_bucket = c.create_bucket(bucket_name + '-log') logging_bucket.set_as_logging_target() bucket.enable_logging(target_bucket=logging_bucket, target_prefix=bucket.name) bucket.disable_logging() c.delete_bucket(logging_bucket) k = bucket.new_key('foobar') s1 = 'This is a test of file upload and download' s2 = 'This is a second string to test file upload and download' k.set_contents_from_string(s1) fp = open('foobar', 'wb') # now get the contents from s3 to a local file k.get_contents_to_file(fp) fp.close() fp = open('foobar') # check to make sure content read from s3 is identical to original assert s1 == fp.read(), 'corrupted file' fp.close() # test generated URLs url = k.generate_url(3600) file = urllib.urlopen(url) assert s1 == file.read(), 'invalid URL %s' % url url = k.generate_url(3600, force_http=True) file = urllib.urlopen(url) assert s1 == file.read(), 'invalid URL %s' % url url = k.generate_url(3600, force_http=True, headers={'x-amz-x-token' : 'XYZ'}) file = urllib.urlopen(url) assert s1 == file.read(), 'invalid URL %s' % url rh = {'response-content-disposition': 'attachment; filename="foo.txt"'} url = k.generate_url(60, response_headers=rh) file = urllib.urlopen(url) assert s1 == file.read(), 'invalid URL %s' % url #test whether amperands and to-be-escaped characters work in header filename rh = {'response-content-disposition': 'attachment; filename="foo&z%20ar&ar&zar&bar.txt"'} url = k.generate_url(60, response_headers=rh, force_http=True) file = urllib.urlopen(url) assert s1 == file.read(), 'invalid URL %s' % url # overwrite foobar contents with a PUT url = k.generate_url(3600, 'PUT', force_http=True, policy='private', reduced_redundancy=True) up = urlparse.urlsplit(url) con = httplib.HTTPConnection(up.hostname, up.port) con.request("PUT", up.path + '?' + up.query, body="hello there") resp = con.getresponse() assert 200 == resp.status assert "hello there" == k.get_contents_as_string() bucket.delete_key(k) # test a few variations on get_all_keys - first load some data # for the first one, let's override the content type phony_mimetype = 'application/x-boto-test' headers = {'Content-Type': phony_mimetype} k.name = 'foo/bar' k.set_contents_from_string(s1, headers) k.name = 'foo/bas' size = k.set_contents_from_filename('foobar') assert size == 42 k.name = 'foo/bat' k.set_contents_from_string(s1) k.name = 'fie/bar' k.set_contents_from_string(s1) k.name = 'fie/bas' k.set_contents_from_string(s1) k.name = 'fie/bat' k.set_contents_from_string(s1) # try resetting the contents to another value md5 = k.md5 k.set_contents_from_string(s2) assert k.md5 != md5 os.unlink('foobar') all = bucket.get_all_keys() assert len(all) == 6 rs = bucket.get_all_keys(prefix='foo') assert len(rs) == 3 rs = bucket.get_all_keys(prefix='', delimiter='/') assert len(rs) == 2 rs = bucket.get_all_keys(maxkeys=5) assert len(rs) == 5 # test the lookup method k = bucket.lookup('foo/bar') assert isinstance(k, bucket.key_class) assert k.content_type == phony_mimetype k = bucket.lookup('notthere') assert k == None # try some metadata stuff k = bucket.new_key('has_metadata') mdkey1 = 'meta1' mdval1 = 'This is the first metadata value' k.set_metadata(mdkey1, mdval1) mdkey2 = 'meta2' mdval2 = 'This is the second metadata value' k.set_metadata(mdkey2, mdval2) # try a unicode metadata value mdval3 = u'föö' mdkey3 = 'meta3' k.set_metadata(mdkey3, mdval3) k.set_contents_from_string(s1) k = bucket.lookup('has_metadata') assert k.get_metadata(mdkey1) == mdval1 assert k.get_metadata(mdkey2) == mdval2 assert k.get_metadata(mdkey3) == mdval3 k = bucket.new_key('has_metadata') k.get_contents_as_string() assert k.get_metadata(mdkey1) == mdval1 assert k.get_metadata(mdkey2) == mdval2 assert k.get_metadata(mdkey3) == mdval3 bucket.delete_key(k) # test list and iterator rs1 = bucket.list() num_iter = 0 for r in rs1: num_iter = num_iter + 1 rs = bucket.get_all_keys() num_keys = len(rs) assert num_iter == num_keys # try a key with a funny character k = bucket.new_key('testnewline\n') k.set_contents_from_string('This is a test') rs = bucket.get_all_keys() assert len(rs) == num_keys + 1 bucket.delete_key(k) rs = bucket.get_all_keys() assert len(rs) == num_keys # try some acl stuff bucket.set_acl('public-read') policy = bucket.get_acl() assert len(policy.acl.grants) == 2 bucket.set_acl('private') policy = bucket.get_acl() assert len(policy.acl.grants) == 1 k = bucket.lookup('foo/bar') k.set_acl('public-read') policy = k.get_acl() assert len(policy.acl.grants) == 2 k.set_acl('private') policy = k.get_acl() assert len(policy.acl.grants) == 1 # try the convenience methods for grants bucket.add_user_grant('FULL_CONTROL', 'c1e724fbfa0979a4448393c59a8c055011f739b6d102fb37a65f26414653cd67') try: bucket.add_email_grant('foobar', 'foo@bar.com') except S3PermissionsError: pass # now try to create an RRS key k = bucket.new_key('reduced_redundancy') k.set_contents_from_string('This key has reduced redundancy', reduced_redundancy=True) # now try to inject a response header data = k.get_contents_as_string(response_headers={'response-content-type' : 'foo/bar'}) assert k.content_type == 'foo/bar' # now delete all keys in bucket for k in bucket: if k.name == 'reduced_redundancy': assert k.storage_class == 'REDUCED_REDUNDANCY' bucket.delete_key(k) # now delete bucket time.sleep(5) c.delete_bucket(bucket) print '--- tests completed ---' def test_basic_anon(self): auth_con = S3Connection() # create a new, empty bucket bucket_name = 'test-%d' % int(time.time()) auth_bucket = auth_con.create_bucket(bucket_name) # try read the bucket anonymously anon_con = S3Connection(anon=True) anon_bucket = Bucket(anon_con, bucket_name) try: iter(anon_bucket.list()).next() self.fail("anon bucket list should fail") except S3ResponseError: pass # give bucket anon user access and anon read again auth_bucket.set_acl('public-read') time.sleep(5) try: iter(anon_bucket.list()).next() self.fail("not expecting contents") except S3ResponseError, e: self.fail("We should have public-read access, but received " "an error: %s" % e) except StopIteration: pass # cleanup auth_con.delete_bucket(auth_bucket) def test_error_code_populated(self): c = S3Connection() try: c.create_bucket('bad$bucket$name') except S3ResponseError, e: self.assertEqual(e.error_code, 'InvalidBucketName') else: self.fail("S3ResponseError not raised.")
harshilasu/GraphicMelon
y/google-cloud-sdk/platform/gsutil/third_party/boto/tests/integration/s3/test_connection.py
Python
gpl-3.0
9,832
"""SocksiPy - Python SOCKS module. Version 1.00 Copyright 2006 Dan-Haim. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of Dan Haim nor the names of his contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE. This module provides a standard socket-like interface for Python for tunneling connections through SOCKS proxies. """ import socket import struct PROXY_TYPE_SOCKS4 = 1 PROXY_TYPE_SOCKS5 = 2 PROXY_TYPE_HTTP = 3 _defaultproxy = None _orgsocket = socket.socket class ProxyError(Exception): def __init__(self, value): self.value = value def __str__(self): return repr(self.value) class GeneralProxyError(ProxyError): def __init__(self, value): self.value = value def __str__(self): return repr(self.value) class Socks5AuthError(ProxyError): def __init__(self, value): self.value = value def __str__(self): return repr(self.value) class Socks5Error(ProxyError): def __init__(self, value): self.value = value def __str__(self): return repr(self.value) class Socks4Error(ProxyError): def __init__(self, value): self.value = value def __str__(self): return repr(self.value) class HTTPError(ProxyError): def __init__(self, value): self.value = value def __str__(self): return repr(self.value) _generalerrors = ("success", "invalid data", "not connected", "not available", "bad proxy type", "bad input") _socks5errors = ("succeeded", "general SOCKS server failure", "connection not allowed by ruleset", "Network unreachable", "Host unreachable", "Connection refused", "TTL expired", "Command not supported", "Address type not supported", "Unknown error") _socks5autherrors = ("succeeded", "authentication is required", "all offered authentication methods were rejected", "unknown username or invalid password", "unknown error") _socks4errors = ("request granted", "request rejected or failed", "request rejected because SOCKS server cannot connect to identd on the client", "request rejected because the client program and identd report different user-ids", "unknown error") def setdefaultproxy(proxytype=None,addr=None,port=None,rdns=True,username=None,password=None): """setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]]) Sets a default proxy which all further socksocket objects will use, unless explicitly changed. """ global _defaultproxy _defaultproxy = (proxytype,addr,port,rdns,username,password) class socksocket(socket.socket): """socksocket([family[, type[, proto]]]) -> socket object Open a SOCKS enabled socket. The parameters are the same as those of the standard socket init. In order for SOCKS to work, you must specify family=AF_INET, type=SOCK_STREAM and proto=0. """ def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None): _orgsocket.__init__(self,family,type,proto,_sock) if _defaultproxy != None: self.__proxy = _defaultproxy else: self.__proxy = (None, None, None, None, None, None) self.__proxysockname = None self.__proxypeername = None def __recvall(self, bytes): """__recvall(bytes) -> data Receive EXACTLY the number of bytes requested from the socket. Blocks until the required number of bytes have been received. """ data = "" while len(data) < bytes: data = data + self.recv(bytes-len(data)) return data def setproxy(self,proxytype=None,addr=None,port=None,rdns=True,username=None,password=None): """setproxy(proxytype, addr[, port[, rdns[, username[, password]]]]) Sets the proxy to be used. proxytype - The type of the proxy to be used. Three types are supported: PROXY_TYPE_SOCKS4 (including socks4a), PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP addr - The address of the server (IP or DNS). port - The port of the server. Defaults to 1080 for SOCKS servers and 8080 for HTTP proxy servers. rdns - Should DNS queries be preformed on the remote side (rather than the local side). The default is True. Note: This has no effect with SOCKS4 servers. username - Username to authenticate with to the server. The default is no authentication. password - Password to authenticate with to the server. Only relevant when username is also provided. """ self.__proxy = (proxytype,addr,port,rdns,username,password) def __negotiatesocks5(self,destaddr,destport): """__negotiatesocks5(self,destaddr,destport) Negotiates a connection through a SOCKS5 server. """ # First we'll send the authentication packages we support. if (self.__proxy[4]!=None) and (self.__proxy[5]!=None): # The username/password details were supplied to the # setproxy method so we support the USERNAME/PASSWORD # authentication (in addition to the standard none). self.sendall("\x05\x02\x00\x02") else: # No username/password were entered, therefore we # only support connections with no authentication. self.sendall("\x05\x01\x00") # We'll receive the server's response to determine which # method was selected chosenauth = self.__recvall(2) if chosenauth[0] != "\x05": self.close() raise GeneralProxyError((1,_generalerrors[1])) # Check the chosen authentication method if chosenauth[1] == "\x00": # No authentication is required pass elif chosenauth[1] == "\x02": # Okay, we need to perform a basic username/password # authentication. self.sendall("\x01" + chr(len(self.__proxy[4])) + self.__proxy[4] + chr(len(self.__proxy[5])) + self.__proxy[5]) authstat = self.__recvall(2) if authstat[0] != "\x01": # Bad response self.close() raise GeneralProxyError((1,_generalerrors[1])) if authstat[1] != "\x00": # Authentication failed self.close() raise Socks5AuthError((3,_socks5autherrors[3])) # Authentication succeeded else: # Reaching here is always bad self.close() if chosenauth[1] == "\xFF": raise Socks5AuthError((2,_socks5autherrors[2])) else: raise GeneralProxyError((1,_generalerrors[1])) # Now we can request the actual connection req = "\x05\x01\x00" # If the given destination address is an IP address, we'll # use the IPv4 address request even if remote resolving was specified. try: ipaddr = socket.inet_aton(destaddr) req = req + "\x01" + ipaddr except socket.error: # Well it's not an IP number, so it's probably a DNS name. if self.__proxy[3]==True: # Resolve remotely ipaddr = None req = req + "\x03" + chr(len(destaddr)) + destaddr else: # Resolve locally ipaddr = socket.inet_aton(socket.gethostbyname(destaddr)) req = req + "\x01" + ipaddr req = req + struct.pack(">H",destport) self.sendall(req) # Get the response resp = self.__recvall(4) if resp[0] != "\x05": self.close() raise GeneralProxyError((1,_generalerrors[1])) elif resp[1] != "\x00": # Connection failed self.close() if ord(resp[1])<=8: raise Socks5Error(ord(resp[1]),_socks5errors[ord(resp[1])]) else: raise Socks5Error(9,_socks5errors[9]) # Get the bound address/port elif resp[3] == "\x01": boundaddr = self.__recvall(4) elif resp[3] == "\x03": resp = resp + self.recv(1) boundaddr = self.__recvall(resp[4]) else: self.close() raise GeneralProxyError((1,_generalerrors[1])) boundport = struct.unpack(">H",self.__recvall(2))[0] self.__proxysockname = (boundaddr,boundport) if ipaddr != None: self.__proxypeername = (socket.inet_ntoa(ipaddr),destport) else: self.__proxypeername = (destaddr,destport) def getproxysockname(self): """getsockname() -> address info Returns the bound IP address and port number at the proxy. """ return self.__proxysockname def getproxypeername(self): """getproxypeername() -> address info Returns the IP and port number of the proxy. """ return _orgsocket.getpeername(self) def getpeername(self): """getpeername() -> address info Returns the IP address and port number of the destination machine (note: getproxypeername returns the proxy) """ return self.__proxypeername def __negotiatesocks4(self,destaddr,destport): """__negotiatesocks4(self,destaddr,destport) Negotiates a connection through a SOCKS4 server. """ # Check if the destination address provided is an IP address rmtrslv = False try: ipaddr = socket.inet_aton(destaddr) except socket.error: # It's a DNS name. Check where it should be resolved. if self.__proxy[3]==True: ipaddr = "\x00\x00\x00\x01" rmtrslv = True else: ipaddr = socket.inet_aton(socket.gethostbyname(destaddr)) # Construct the request packet req = "\x04\x01" + struct.pack(">H",destport) + ipaddr # The username parameter is considered userid for SOCKS4 if self.__proxy[4] != None: req = req + self.__proxy[4] req = req + "\x00" # DNS name if remote resolving is required # NOTE: This is actually an extension to the SOCKS4 protocol # called SOCKS4A and may not be supported in all cases. if rmtrslv==True: req = req + destaddr + "\x00" self.sendall(req) # Get the response from the server resp = self.__recvall(8) if resp[0] != "\x00": # Bad data self.close() raise GeneralProxyError((1,_generalerrors[1])) if resp[1] != "\x5A": # Server returned an error self.close() if ord(resp[1]) in (91,92,93): self.close() raise Socks4Error((ord(resp[1]),_socks4errors[ord(resp[1])-90])) else: raise Socks4Error((94,_socks4errors[4])) # Get the bound address/port self.__proxysockname = (socket.inet_ntoa(resp[4:]),struct.unpack(">H",resp[2:4])[0]) if rmtrslv != None: self.__proxypeername = (socket.inet_ntoa(ipaddr),destport) else: self.__proxypeername = (destaddr,destport) def __negotiatehttp(self,destaddr,destport): """__negotiatehttp(self,destaddr,destport) Negotiates a connection through an HTTP server. """ # If we need to resolve locally, we do this now if self.__proxy[3] == False: addr = socket.gethostbyname(destaddr) else: addr = destaddr self.sendall("CONNECT " + addr + ":" + str(destport) + " HTTP/1.1\r\n" + "Host: " + destaddr + "\r\n\r\n") # We read the response until we get the string "\r\n\r\n" resp = self.recv(1) while resp.find("\r\n\r\n")==-1: resp = resp + self.recv(1) # We just need the first line to check if the connection # was successful statusline = resp.splitlines()[0].split(" ",2) if statusline[0] not in ("HTTP/1.0","HTTP/1.1"): self.close() raise GeneralProxyError((1,_generalerrors[1])) try: statuscode = int(statusline[1]) except ValueError: self.close() raise GeneralProxyError((1,_generalerrors[1])) if statuscode != 200: self.close() raise HTTPError((statuscode,statusline[2])) self.__proxysockname = ("0.0.0.0",0) self.__proxypeername = (addr,destport) def connect(self,destpair): """connect(self,despair) Connects to the specified destination through a proxy. destpar - A tuple of the IP/DNS address and the port number. (identical to socket's connect). To select the proxy server use setproxy(). """ # Do a minimal input check first if (type(destpair) in (list,tuple)==False) or (len(destpair)<2) or (type(destpair[0])!=str) or (type(destpair[1])!=int): raise GeneralProxyError((5,_generalerrors[5])) if self.__proxy[0] == PROXY_TYPE_SOCKS5: if self.__proxy[2] != None: portnum = self.__proxy[2] else: portnum = 1080 _orgsocket.connect(self,(self.__proxy[1],portnum)) self.__negotiatesocks5(destpair[0],destpair[1]) elif self.__proxy[0] == PROXY_TYPE_SOCKS4: if self.__proxy[2] != None: portnum = self.__proxy[2] else: portnum = 1080 _orgsocket.connect(self,(self.__proxy[1],portnum)) self.__negotiatesocks4(destpair[0],destpair[1]) elif self.__proxy[0] == PROXY_TYPE_HTTP: if self.__proxy[2] != None: portnum = self.__proxy[2] else: portnum = 8080 _orgsocket.connect(self,(self.__proxy[1],portnum)) self.__negotiatehttp(destpair[0],destpair[1]) elif self.__proxy[0] == None: _orgsocket.connect(self,(destpair[0],destpair[1])) else: raise GeneralProxyError((4,_generalerrors[4]))
theRealTacoTime/poclbm
socks.py
Python
gpl-3.0
13,397
# Neural Networks Demystified # Part 2: Forward Propagation # # Supporting code for short YouTube series on artificial neural networks. # # Stephen Welch # @stephencwelch ## ----------------------- Part 1 ---------------------------- ## import numpy as np # X = (hours sleeping, hours studying), y = Score on test X = np.array(([3,5], [5,1], [10,2]), dtype=float) y = np.array(([75], [82], [93]), dtype=float) # Normalize X = X/np.amax(X, axis=0) y = y/100 #Max test score is 100 ## ----------------------- Part 2 ---------------------------- ## class Neural_Network(object): def __init__(self): #Define Hyperparameters self.inputLayerSize = 2 self.outputLayerSize = 1 self.hiddenLayerSize = 3 #Weights (parameters) self.W1 = np.random.randn(self.inputLayerSize, self.hiddenLayerSize) self.W2 = np.random.randn(self.hiddenLayerSize, self.outputLayerSize) def forward(self, X): #Propagate inputs though network self.z2 = np.dot(X, self.W1) self.a2 = self.sigmoid(self.z2) self.z3 = np.dot(self.a2, self.W2) yHat = self.sigmoid(self.z3) return yHat def sigmoid(self, z): #Apply sigmoid activation function to scalar, vector, or matrix return 1/(1+np.exp(-z))
vbsteja/code
Python/ML_DL/DL/Neural-Networks-Demystified-master/partTwo.py
Python
apache-2.0
1,330
def minutes_string_to_seconds_int(minutes): try: return int(minutes) * 60 except ValueError: return None def replace_newlines_and_strip(text): return text.replace("\n", "").strip() def replace_double_slashes_with_https(url): return url.replace("//", "https://")
stevenvolckaert/plugin.video.vrt.nu
resources/lib/vrtplayer/statichelper.py
Python
gpl-3.0
298
from __future__ import absolute_import from __future__ import division from __future__ import print_function import random import itertools import numpy as np import tensorflow as tf from third_party.bi_att_flow.basic.read_data import DataSet from third_party.bi_att_flow.my.tensorflow.general import get_initializer from third_party.bi_att_flow.my.tensorflow.nn import softsel, get_logits, highway_network, multi_conv1d from third_party.bi_att_flow.my.tensorflow.rnn import bidirectional_dynamic_rnn from third_party.bi_att_flow.my.tensorflow.rnn_cell import SwitchableDropoutWrapper, AttentionCell def get_model(config): """ Returns a single model running on one CPU / GPU""" with tf.device('/{}:0'.format(config.device_type)): with tf.name_scope('model_0') as scope: model = Model(config, scope, rep=True) return [model] class Model(object): def __init__(self, config, scope, rep=True): self.scope = scope self.config = config self.global_step = tf.get_variable( 'global_step', shape=[], dtype='int32', initializer=tf.constant_initializer(0), trainable=False) # Define forward inputs here N, M, JX, JQ, VW, VC, W = \ config.batch_size, config.max_num_sents, config.max_sent_size, \ config.max_ques_size, config.word_vocab_size, config.char_vocab_size, config.max_word_size self.x = tf.placeholder('int32', [N, None, None], name='x') self.cx = tf.placeholder('int32', [N, None, None, W], name='cx') self.x_mask = tf.placeholder('bool', [N, None, None], name='x_mask') self.q = tf.placeholder('int32', [N, None], name='q') self.cq = tf.placeholder('int32', [N, None, W], name='cq') self.q_mask = tf.placeholder('bool', [N, None], name='q_mask') self.y = tf.placeholder('bool', [N, None, None], name='y') self.y2 = tf.placeholder('bool', [N, None, None], name='y2') self.is_train = tf.placeholder('bool', [], name='is_train') self.new_emb_mat = tf.placeholder( 'float', [None, config.word_emb_size], name='new_emb_mat') # Define misc self.tensor_dict = {} # Forward outputs / loss inputs self.logits = None self.yp = None self.var_list = None # Loss outputs self.loss = None self._build_forward() self._build_loss() self.var_ema = None if rep: self._build_var_ema() if config.mode == 'train': self._build_ema() self.summary = tf.summary.merge_all() self.summary = tf.summary.merge( tf.get_collection('summaries', scope=self.scope)) def _build_forward(self): config = self.config N, M, JX, JQ, VW, VC, d, W = \ config.batch_size, config.max_num_sents, config.max_sent_size, \ config.max_ques_size, config.word_vocab_size, config.char_vocab_size, config.hidden_size, \ config.max_word_size JX = tf.shape(self.x)[2] JQ = tf.shape(self.q)[1] M = tf.shape(self.x)[1] dc, dw, dco = config.char_emb_size, config.word_emb_size, config.char_out_size # Setting the scope in order to limit variable reusability. with tf.variable_scope('emb'): if config.use_char_emb: with tf.variable_scope('emb_var'), tf.device('/cpu:0'): char_emb_mat = tf.get_variable( 'char_emb_mat', shape=[VC, dc], dtype='float') with tf.variable_scope('char'): Acx = tf.nn.embedding_lookup(char_emb_mat, self.cx) # [N, M, JX, W, dc] Acq = tf.nn.embedding_lookup(char_emb_mat, self.cq) # [N, JQ, W, dc] Acx = tf.reshape(Acx, [-1, JX, W, dc]) Acq = tf.reshape(Acq, [-1, JQ, W, dc]) filter_sizes = list(map(int, config.out_channel_dims.split(','))) heights = list(map(int, config.filter_heights.split(','))) assert sum(filter_sizes) == dco, (filter_sizes, dco) with tf.variable_scope('conv'): xx = multi_conv1d( Acx, filter_sizes, heights, 'VALID', self.is_train, config.keep_prob, scope='xx') if config.share_cnn_weights: tf.get_variable_scope().reuse_variables() qq = multi_conv1d( Acq, filter_sizes, heights, 'VALID', self.is_train, config.keep_prob, scope='xx') else: qq = multi_conv1d( Acq, filter_sizes, heights, 'VALID', self.is_train, config.keep_prob, scope='qq') xx = tf.reshape(xx, [-1, M, JX, dco]) qq = tf.reshape(qq, [-1, JQ, dco]) if config.use_word_emb: with tf.variable_scope('emb_var'), tf.device('/cpu:0'): if config.mode == 'train': word_emb_mat = tf.get_variable( 'word_emb_mat', dtype='float', shape=[VW, dw], initializer=get_initializer(config.emb_mat)) else: word_emb_mat = tf.get_variable( 'word_emb_mat', shape=[VW, dw], dtype='float') if config.use_glove_for_unk: word_emb_mat = tf.concat([word_emb_mat, self.new_emb_mat], 0) with tf.name_scope('word'): Ax = tf.nn.embedding_lookup(word_emb_mat, self.x) # [N, M, JX, d] Aq = tf.nn.embedding_lookup(word_emb_mat, self.q) # [N, JQ, d] self.tensor_dict['x'] = Ax self.tensor_dict['q'] = Aq if config.use_char_emb: xx = tf.concat([xx, Ax], 3) # [N, M, JX, di] qq = tf.concat([qq, Aq], 2) # [N, JQ, di] else: xx = Ax qq = Aq # highway network if config.highway: with tf.variable_scope('highway'): xx = highway_network( xx, config.highway_num_layers, True, wd=config.wd, is_train=self.is_train) tf.get_variable_scope().reuse_variables() qq = highway_network( qq, config.highway_num_layers, True, wd=config.wd, is_train=self.is_train) self.tensor_dict['xx'] = xx self.tensor_dict['qq'] = qq x_len = tf.reduce_sum(tf.cast(self.x_mask, 'int32'), 2) # [N, M] q_len = tf.reduce_sum(tf.cast(self.q_mask, 'int32'), 1) # [N] with tf.variable_scope('prepro'): d_fw_cell = SwitchableDropoutWrapper( tf.contrib.rnn.BasicLSTMCell(d, state_is_tuple=True), self.is_train, input_keep_prob=config.input_keep_prob) d_bw_cell = SwitchableDropoutWrapper( tf.contrib.rnn.BasicLSTMCell(d, state_is_tuple=True), self.is_train, input_keep_prob=config.input_keep_prob) (fw_u, bw_u), ((_, fw_u_f), (_, bw_u_f)) = bidirectional_dynamic_rnn( d_fw_cell, d_bw_cell, qq, q_len, dtype='float', scope='u1') # [N, J, d], [N, d] u = tf.concat([fw_u, bw_u], 2) if config.share_lstm_weights: tf.get_variable_scope().reuse_variables() fw_cell = tf.contrib.rnn.BasicLSTMCell( d, state_is_tuple=True, reuse=True) bw_cell = tf.contrib.rnn.BasicLSTMCell( d, state_is_tuple=True, reuse=True) (fw_h, bw_h), _ = bidirectional_dynamic_rnn( fw_cell, bw_cell, xx, x_len, dtype='float', scope='u1') # [N, M, JX, 2d] h = tf.concat([fw_h, bw_h], 3) # [N, M, JX, 2d] else: fw_cell = tf.contrib.rnn.BasicLSTMCell(d, state_is_tuple=True) bw_cell = tf.contrib.rnn.BasicLSTMCell(d, state_is_tuple=True) (fw_h, bw_h), _ = bidirectional_dynamic_rnn( fw_cell, bw_cell, xx, x_len, dtype='float', scope='h1') # [N, M, JX, 2d] h = tf.concat([fw_h, bw_h], 3) # [N, M, JX, 2d] self.tensor_dict['u'] = u self.tensor_dict['h'] = h with tf.variable_scope('main'): if config.dynamic_att: p0 = h u = tf.reshape( tf.tile(tf.expand_dims(u, 1), [1, M, 1, 1]), [N * M, JQ, 2 * d]) q_mask = tf.reshape( tf.tile(tf.expand_dims(self.q_mask, 1), [1, M, 1]), [N * M, JQ]) first_fw_cell_0 = AttentionCell( tf.contrib.rnn.BasicLSTMCell(d, state_is_tuple=True), u, mask=q_mask, mapper='sim', input_keep_prob=self.config.input_keep_prob, is_train=self.is_train) first_bw_cell_0 = AttentionCell( tf.contrib.rnn.BasicLSTMCell(d, state_is_tuple=True), u, mask=q_mask, mapper='sim', input_keep_prob=self.config.input_keep_prob, is_train=self.is_train) first_fw_cell_1 = AttentionCell( tf.contrib.rnn.BasicLSTMCell(d, state_is_tuple=True), u, mask=q_mask, mapper='sim', input_keep_prob=self.config.input_keep_prob, is_train=self.is_train) first_bw_cell_1 = AttentionCell( tf.contrib.rnn.BasicLSTMCell(d, state_is_tuple=True), u, mask=q_mask, mapper='sim', input_keep_prob=self.config.input_keep_prob, is_train=self.is_train) else: p0 = attention_layer( config, self.is_train, h, u, h_mask=self.x_mask, u_mask=self.q_mask, scope='p0', tensor_dict=self.tensor_dict) first_fw_cell_0 = SwitchableDropoutWrapper( tf.contrib.rnn.BasicLSTMCell(d, state_is_tuple=True), self.is_train, input_keep_prob=config.input_keep_prob) first_bw_cell_0 = SwitchableDropoutWrapper( tf.contrib.rnn.BasicLSTMCell(d, state_is_tuple=True), self.is_train, input_keep_prob=config.input_keep_prob) first_fw_cell_1 = SwitchableDropoutWrapper( tf.contrib.rnn.BasicLSTMCell(d, state_is_tuple=True), self.is_train, input_keep_prob=config.input_keep_prob) first_bw_cell_1 = SwitchableDropoutWrapper( tf.contrib.rnn.BasicLSTMCell(d, state_is_tuple=True), self.is_train, input_keep_prob=config.input_keep_prob) (fw_g0, bw_g0), _ = bidirectional_dynamic_rnn( first_fw_cell_0, first_bw_cell_0, p0, x_len, dtype='float', scope='g0') # [N, M, JX, 2d] g0 = tf.concat([fw_g0, bw_g0], 3) (fw_g1, bw_g1), _ = bidirectional_dynamic_rnn( first_fw_cell_1, first_bw_cell_1, g0, x_len, dtype='float', scope='g1') # [N, M, JX, 2d] g1 = tf.concat([fw_g1, bw_g1], 3) logits = get_logits( [g1, p0], d, True, wd=config.wd, input_keep_prob=config.input_keep_prob, mask=self.x_mask, is_train=self.is_train, func=config.answer_func, scope='logits1') a1i = softsel( tf.reshape(g1, [N, M * JX, 2 * d]), tf.reshape(logits, [N, M * JX])) a1i = tf.tile(tf.expand_dims(tf.expand_dims(a1i, 1), 1), [1, M, JX, 1]) d_fw_cell = SwitchableDropoutWrapper( tf.contrib.rnn.BasicLSTMCell(d, state_is_tuple=True), self.is_train, input_keep_prob=config.input_keep_prob) d_bw_cell = SwitchableDropoutWrapper( tf.contrib.rnn.BasicLSTMCell(d, state_is_tuple=True), self.is_train, input_keep_prob=config.input_keep_prob) (fw_g2, bw_g2), _ = bidirectional_dynamic_rnn( d_fw_cell, d_bw_cell, tf.concat([p0, g1, a1i, g1 * a1i], 3), x_len, dtype='float', scope='g2') # [N, M, JX, 2d] g2 = tf.concat([fw_g2, bw_g2], 3) logits2 = get_logits( [g2, p0], d, True, wd=config.wd, input_keep_prob=config.input_keep_prob, mask=self.x_mask, is_train=self.is_train, func=config.answer_func, scope='logits2') flat_logits = tf.reshape(logits, [-1, M * JX]) flat_yp = tf.nn.softmax(flat_logits) # [-1, M*JX] yp = tf.reshape(flat_yp, [-1, M, JX]) flat_logits2 = tf.reshape(logits2, [-1, M * JX]) flat_yp2 = tf.nn.softmax(flat_logits2) yp2 = tf.reshape(flat_yp2, [-1, M, JX]) self.tensor_dict['g1'] = g1 self.tensor_dict['g2'] = g2 self.logits = flat_logits self.logits2 = flat_logits2 self.yp = yp self.yp2 = yp2 def _build_loss(self): config = self.config JX = tf.shape(self.x)[2] M = tf.shape(self.x)[1] JQ = tf.shape(self.q)[1] loss_mask = tf.reduce_max(tf.cast(self.q_mask, 'float'), 1) losses = tf.nn.softmax_cross_entropy_with_logits( logits=self.logits, labels=tf.cast(tf.reshape(self.y, [-1, M * JX]), 'float')) ce_loss = tf.reduce_mean(loss_mask * losses) tf.add_to_collection('losses', ce_loss) ce_loss2 = tf.reduce_mean( tf.nn.softmax_cross_entropy_with_logits( logits=self.logits2, labels=tf.cast(tf.reshape(self.y2, [-1, M * JX]), 'float'))) tf.add_to_collection('losses', ce_loss2) self.loss = tf.add_n( tf.get_collection('losses', scope=self.scope), name='loss') tf.summary.scalar(self.loss.op.name, self.loss) tf.add_to_collection('ema/scalar', self.loss) def _build_ema(self): self.ema = tf.train.ExponentialMovingAverage(self.config.decay) ema = self.ema tensors = tf.get_collection( 'ema/scalar', scope=self.scope) + tf.get_collection( 'ema/vector', scope=self.scope) ema_op = ema.apply(tensors) for var in tf.get_collection('ema/scalar', scope=self.scope): ema_var = ema.average(var) tf.summary.scalar(ema_var.op.name, ema_var) for var in tf.get_collection('ema/vector', scope=self.scope): ema_var = ema.average(var) tf.histogram_summary(ema_var.op.name, ema_var) with tf.control_dependencies([ema_op]): self.loss = tf.identity(self.loss) def _build_var_ema(self): self.var_ema = tf.train.ExponentialMovingAverage(self.config.var_decay) ema = self.var_ema ema_op = ema.apply(tf.trainable_variables()) with tf.control_dependencies([ema_op]): self.loss = tf.identity(self.loss) def get_loss(self): return self.loss def get_global_step(self): return self.global_step def get_var_list(self): return self.var_list def get_feed_dict(self, batch, is_train, supervised=True): assert isinstance(batch, DataSet) config = self.config N, M, JX, JQ, VW, VC, d, W = \ config.batch_size, config.max_num_sents, config.max_sent_size, \ config.max_ques_size, config.word_vocab_size, config.char_vocab_size, config.hidden_size, config.max_word_size feed_dict = {} if config.len_opt: """ Note that this optimization results in variable GPU RAM usage (i.e. can cause OOM in the middle of training.) First test without len_opt and make sure no OOM, and use len_opt """ if sum(len(sent) for para in batch.data['x'] for sent in para) == 0: new_JX = 1 else: new_JX = max(len(sent) for para in batch.data['x'] for sent in para) JX = min(JX, new_JX) if sum(len(ques) for ques in batch.data['q']) == 0: new_JQ = 1 else: new_JQ = max(len(ques) for ques in batch.data['q']) JQ = min(JQ, new_JQ) if config.cpu_opt: if sum(len(para) for para in batch.data['x']) == 0: new_M = 1 else: new_M = max(len(para) for para in batch.data['x']) M = min(M, new_M) x = np.zeros([N, M, JX], dtype='int32') cx = np.zeros([N, M, JX, W], dtype='int32') x_mask = np.zeros([N, M, JX], dtype='bool') q = np.zeros([N, JQ], dtype='int32') cq = np.zeros([N, JQ, W], dtype='int32') q_mask = np.zeros([N, JQ], dtype='bool') feed_dict[self.x] = x feed_dict[self.x_mask] = x_mask feed_dict[self.cx] = cx feed_dict[self.q] = q feed_dict[self.cq] = cq feed_dict[self.q_mask] = q_mask feed_dict[self.is_train] = is_train if config.use_glove_for_unk: feed_dict[self.new_emb_mat] = batch.shared['new_emb_mat'] if supervised: y = np.zeros([N, M, JX], dtype='bool') y2 = np.zeros([N, M, JX], dtype='bool') feed_dict[self.y] = y feed_dict[self.y2] = y2 assert not config.single X = batch.data['x'] for i, (xi, yi) in enumerate(zip(X, batch.data['y'])): if len(yi) == 0: continue start_idx, stop_idx = random.choice(yi) j, k = start_idx j2, k2 = stop_idx if config.squash: offset = sum(map(len, xi[:j])) j, k = 0, k + offset offset = sum(map(len, xi[:j2])) j2, k2 = 0, k2 + offset y[i, j, k] = True y2[i, j2, k2 - 1] = True def _get_word(word): d = batch.shared['word2idx'] for each in (word, word.lower(), word.capitalize(), word.upper()): if each in d: return d[each] if config.use_glove_for_unk: d2 = batch.shared['new_word2idx'] for each in (word, word.lower(), word.capitalize(), word.upper()): if each in d2: return d2[each] + len(d) return 1 def _get_char(char): d = batch.shared['char2idx'] if char in d: return d[char] return 1 for i, xi in enumerate(X): if self.config.squash: xi = [list(itertools.chain(*xi))] for j, xij in enumerate(xi): if j == config.max_num_sents: break for k, xijk in enumerate(xij): if k == config.max_sent_size: break each = _get_word(xijk) assert isinstance(each, int), each x[i, j, k] = each x_mask[i, j, k] = True for l, xijkl in enumerate(xijk): if l == config.max_word_size: break cx[i, j, k, l] = _get_char(xijkl) # batch, sentence, word, char for i, qi in enumerate(batch.data['q']): for j, qij in enumerate(qi): q[i, j] = _get_word(qij) q_mask[i, j] = True for i, cqi in enumerate(batch.data['cq']): for j, cqij in enumerate(cqi): for k, cqijk in enumerate(cqij): cq[i, j, k] = _get_char(cqijk) if k + 1 == config.max_word_size: break return feed_dict def bi_attention(config, is_train, h, u, h_mask=None, u_mask=None, scope=None, tensor_dict=None): with tf.variable_scope(scope or 'bi_attention'): JX = tf.shape(h)[2] M = tf.shape(h)[1] JQ = tf.shape(u)[1] h_aug = tf.tile(tf.expand_dims(h, 3), [1, 1, 1, JQ, 1]) u_aug = tf.tile(tf.expand_dims(tf.expand_dims(u, 1), 1), [1, M, JX, 1, 1]) if h_mask is None: hu_mask = None else: h_mask_aug = tf.tile(tf.expand_dims(h_mask, 3), [1, 1, 1, JQ]) u_mask_aug = tf.tile( tf.expand_dims(tf.expand_dims(u_mask, 1), 1), [1, M, JX, 1]) hu_mask = h_mask_aug & u_mask_aug u_logits = get_logits( [h_aug, u_aug], None, True, wd=config.wd, mask=hu_mask, is_train=is_train, func=config.logit_func, scope='u_logits') # [N, M, JX, JQ] u_a = softsel(u_aug, u_logits) # [N, M, JX, d] h_a = softsel(h, tf.reduce_max(u_logits, 3)) # [N, M, d] h_a = tf.tile(tf.expand_dims(h_a, 2), [1, 1, JX, 1]) if tensor_dict is not None: a_u = tf.nn.softmax(u_logits) # [N, M, JX, JQ] a_h = tf.nn.softmax(tf.reduce_max(u_logits, 3)) tensor_dict['a_u'] = a_u tensor_dict['a_h'] = a_h variables = tf.get_collection( tf.GraphKeys.GLOBAL_VARIABLES, scope=tf.get_variable_scope().name) for var in variables: tensor_dict[var.name] = var return u_a, h_a def attention_layer(config, is_train, h, u, h_mask=None, u_mask=None, scope=None, tensor_dict=None): with tf.variable_scope(scope or 'attention_layer'): JX = tf.shape(h)[2] M = tf.shape(h)[1] JQ = tf.shape(u)[1] if config.q2c_att or config.c2q_att: u_a, h_a = bi_attention( config, is_train, h, u, h_mask=h_mask, u_mask=u_mask, tensor_dict=tensor_dict) if not config.c2q_att: u_a = tf.tile( tf.expand_dims(tf.expand_dims(tf.reduce_mean(u, 1), 1), 1), [1, M, JX, 1]) if config.q2c_att: p0 = tf.concat([h, u_a, h * u_a, h * h_a], 3) else: p0 = tf.concat([h, u_a, h * u_a], 3) return p0
google/active-qa
third_party/bi_att_flow/basic/model.py
Python
apache-2.0
21,148
import json import logging import mimetypes import static_replace import xblock.reference.plugins from functools import partial from requests.auth import HTTPBasicAuth import dogstats_wrapper as dog_stats_api from opaque_keys import InvalidKeyError from django.conf import settings from django.contrib.auth.models import User from django.core.cache import cache from django.core.urlresolvers import reverse from django.http import Http404, HttpResponse from django.views.decorators.csrf import csrf_exempt from capa.xqueue_interface import XQueueInterface from courseware.access import has_access, get_user_role from courseware.masquerade import setup_masquerade from courseware.model_data import FieldDataCache, DjangoKeyValueStore from lms.lib.xblock.field_data import LmsFieldData from lms.lib.xblock.runtime import LmsModuleSystem, unquote_slashes, quote_slashes from edxmako.shortcuts import render_to_string from eventtracking import tracker from psychometrics.psychoanalyze import make_psychometrics_data_update_handler from student.models import anonymous_id_for_user, user_by_anonymous_id from xblock.core import XBlock from xblock.fields import Scope from xblock.runtime import KvsFieldData, KeyValueStore from xblock.exceptions import NoSuchHandlerError from xblock.django.request import django_to_webob_request, webob_to_django_response from xmodule.error_module import ErrorDescriptor, NonStaffErrorDescriptor from xmodule.exceptions import NotFoundError, ProcessingError from opaque_keys.edx.locations import SlashSeparatedCourseKey from xmodule.contentstore.django import contentstore from xmodule.modulestore.django import modulestore, ModuleI18nService from xmodule.modulestore.exceptions import ItemNotFoundError from xmodule.util.duedate import get_extended_due_date from xmodule_modifiers import ( replace_course_urls, replace_jump_to_id_urls, replace_static_urls, add_staff_markup, wrap_xblock, request_token ) from xmodule.lti_module import LTIModule from xmodule.x_module import XModuleDescriptor from util.json_request import JsonResponse from util.sandboxing import can_execute_unsafe_code, get_python_lib_zip log = logging.getLogger(__name__) if settings.XQUEUE_INTERFACE.get('basic_auth') is not None: REQUESTS_AUTH = HTTPBasicAuth(*settings.XQUEUE_INTERFACE['basic_auth']) else: REQUESTS_AUTH = None XQUEUE_INTERFACE = XQueueInterface( settings.XQUEUE_INTERFACE['url'], settings.XQUEUE_INTERFACE['django_auth'], REQUESTS_AUTH, ) # TODO: course_id and course_key are used interchangeably in this file, which is wrong. # Some brave person should make the variable names consistently someday, but the code's # coupled enough that it's kind of tricky--you've been warned! class LmsModuleRenderError(Exception): """ An exception class for exceptions thrown by module_render that don't fit well elsewhere """ pass def make_track_function(request): ''' Make a tracking function that logs what happened. For use in ModuleSystem. ''' import track.views def function(event_type, event): return track.views.server_track(request, event_type, event, page='x_module') return function def toc_for_course(request, course, active_chapter, active_section, field_data_cache): ''' Create a table of contents from the module store Return format: [ {'display_name': name, 'url_name': url_name, 'sections': SECTIONS, 'active': bool}, ... ] where SECTIONS is a list [ {'display_name': name, 'url_name': url_name, 'format': format, 'due': due, 'active' : bool, 'graded': bool}, ...] active is set for the section and chapter corresponding to the passed parameters, which are expected to be url_names of the chapter+section. Everything else comes from the xml, or defaults to "". chapters with name 'hidden' are skipped. NOTE: assumes that if we got this far, user has access to course. Returns None if this is not the case. field_data_cache must include data from the course module and 2 levels of its descendents ''' with modulestore().bulk_operations(course.id): course_module = get_module_for_descriptor(request.user, request, course, field_data_cache, course.id) if course_module is None: return None chapters = list() for chapter in course_module.get_display_items(): if chapter.hide_from_toc: continue sections = list() for section in chapter.get_display_items(): active = (chapter.url_name == active_chapter and section.url_name == active_section) if not section.hide_from_toc: sections.append({'display_name': section.display_name_with_default, 'url_name': section.url_name, 'format': section.format if section.format is not None else '', 'due': get_extended_due_date(section), 'active': active, 'graded': section.graded, }) chapters.append({'display_name': chapter.display_name_with_default, 'url_name': chapter.url_name, 'sections': sections, 'active': chapter.url_name == active_chapter}) return chapters def get_module(user, request, usage_key, field_data_cache, position=None, log_if_not_found=True, wrap_xmodule_display=True, grade_bucket_type=None, depth=0, static_asset_path=''): """ Get an instance of the xmodule class identified by location, setting the state based on an existing StudentModule, or creating one if none exists. Arguments: - user : User for whom we're getting the module - request : current django HTTPrequest. Note: request.user isn't used for anything--all auth and such works based on user. - usage_key : A UsageKey object identifying the module to load - field_data_cache : a FieldDataCache - position : extra information from URL for user-specified position within module - log_if_not_found : If this is True, we log a debug message if we cannot find the requested xmodule. - wrap_xmodule_display : If this is True, wrap the output display in a single div to allow for the XModule javascript to be bound correctly - depth : number of levels of descendents to cache when loading this module. None means cache all descendents - static_asset_path : static asset path to use (overrides descriptor's value); needed by get_course_info_section, because info section modules do not have a course as the parent module, and thus do not inherit this lms key value. Returns: xmodule instance, or None if the user does not have access to the module. If there's an error, will try to return an instance of ErrorModule if possible. If not possible, return None. """ try: descriptor = modulestore().get_item(usage_key, depth=depth) return get_module_for_descriptor(user, request, descriptor, field_data_cache, usage_key.course_key, position=position, wrap_xmodule_display=wrap_xmodule_display, grade_bucket_type=grade_bucket_type, static_asset_path=static_asset_path) except ItemNotFoundError: if log_if_not_found: log.debug("Error in get_module: ItemNotFoundError") return None except: # Something has gone terribly wrong, but still not letting it turn into a 500. log.exception("Error in get_module") return None def get_xqueue_callback_url_prefix(request): """ Calculates default prefix based on request, but allows override via settings This is separated from get_module_for_descriptor so that it can be called by the LMS before submitting background tasks to run. The xqueue callbacks should go back to the LMS, not to the worker. """ prefix = '{proto}://{host}'.format( proto=request.META.get('HTTP_X_FORWARDED_PROTO', 'https' if request.is_secure() else 'http'), host=request.get_host() ) return settings.XQUEUE_INTERFACE.get('callback_url', prefix) def get_module_for_descriptor(user, request, descriptor, field_data_cache, course_id, position=None, wrap_xmodule_display=True, grade_bucket_type=None, static_asset_path=''): """ Implements get_module, extracting out the request-specific functionality. See get_module() docstring for further details. """ # allow course staff to masquerade as student if has_access(user, 'staff', descriptor, course_id): setup_masquerade(request, True) track_function = make_track_function(request) xqueue_callback_url_prefix = get_xqueue_callback_url_prefix(request) user_location = getattr(request, 'session', {}).get('country_code') return get_module_for_descriptor_internal( user=user, descriptor=descriptor, field_data_cache=field_data_cache, course_id=course_id, track_function=track_function, xqueue_callback_url_prefix=xqueue_callback_url_prefix, position=position, wrap_xmodule_display=wrap_xmodule_display, grade_bucket_type=grade_bucket_type, static_asset_path=static_asset_path, user_location=user_location, request_token=request_token(request), ) def get_module_system_for_user(user, field_data_cache, # Arguments preceding this comment have user binding, those following don't descriptor, course_id, track_function, xqueue_callback_url_prefix, request_token, position=None, wrap_xmodule_display=True, grade_bucket_type=None, static_asset_path='', user_location=None): """ Helper function that returns a module system and student_data bound to a user and a descriptor. The purpose of this function is to factor out everywhere a user is implicitly bound when creating a module, to allow an existing module to be re-bound to a user. Most of the user bindings happen when creating the closures that feed the instantiation of ModuleSystem. The arguments fall into two categories: those that have explicit or implicit user binding, which are user and field_data_cache, and those don't and are just present so that ModuleSystem can be instantiated, which are all the other arguments. Ultimately, this isn't too different than how get_module_for_descriptor_internal was before refactoring. Arguments: see arguments for get_module() request_token (str): A token unique to the request use by xblock initialization Returns: (LmsModuleSystem, KvsFieldData): (module system, student_data) bound to, primarily, the user and descriptor """ student_data = KvsFieldData(DjangoKeyValueStore(field_data_cache)) def make_xqueue_callback(dispatch='score_update'): # Fully qualified callback URL for external queueing system relative_xqueue_callback_url = reverse( 'xqueue_callback', kwargs=dict( course_id=course_id.to_deprecated_string(), userid=str(user.id), mod_id=descriptor.location.to_deprecated_string(), dispatch=dispatch ), ) return xqueue_callback_url_prefix + relative_xqueue_callback_url # Default queuename is course-specific and is derived from the course that # contains the current module. # TODO: Queuename should be derived from 'course_settings.json' of each course xqueue_default_queuename = descriptor.location.org + '-' + descriptor.location.course xqueue = { 'interface': XQUEUE_INTERFACE, 'construct_callback': make_xqueue_callback, 'default_queuename': xqueue_default_queuename.replace(' ', '_'), 'waittime': settings.XQUEUE_WAITTIME_BETWEEN_REQUESTS } # This is a hacky way to pass settings to the combined open ended xmodule # It needs an S3 interface to upload images to S3 # It needs the open ended grading interface in order to get peer grading to be done # this first checks to see if the descriptor is the correct one, and only sends settings if it is # Get descriptor metadata fields indicating needs for various settings needs_open_ended_interface = getattr(descriptor, "needs_open_ended_interface", False) needs_s3_interface = getattr(descriptor, "needs_s3_interface", False) # Initialize interfaces to None open_ended_grading_interface = None s3_interface = None # Create interfaces if needed if needs_open_ended_interface: open_ended_grading_interface = settings.OPEN_ENDED_GRADING_INTERFACE open_ended_grading_interface['mock_peer_grading'] = settings.MOCK_PEER_GRADING open_ended_grading_interface['mock_staff_grading'] = settings.MOCK_STAFF_GRADING if needs_s3_interface: s3_interface = { 'access_key': getattr(settings, 'AWS_ACCESS_KEY_ID', ''), 'secret_access_key': getattr(settings, 'AWS_SECRET_ACCESS_KEY', ''), 'storage_bucket_name': getattr(settings, 'AWS_STORAGE_BUCKET_NAME', 'openended') } def inner_get_module(descriptor): """ Delegate to get_module_for_descriptor_internal() with all values except `descriptor` set. Because it does an access check, it may return None. """ # TODO: fix this so that make_xqueue_callback uses the descriptor passed into # inner_get_module, not the parent's callback. Add it as an argument.... return get_module_for_descriptor_internal( user=user, descriptor=descriptor, field_data_cache=field_data_cache, course_id=course_id, track_function=track_function, xqueue_callback_url_prefix=xqueue_callback_url_prefix, position=position, wrap_xmodule_display=wrap_xmodule_display, grade_bucket_type=grade_bucket_type, static_asset_path=static_asset_path, user_location=user_location, request_token=request_token, ) def handle_grade_event(block, event_type, event): user_id = event.get('user_id', user.id) # Construct the key for the module key = KeyValueStore.Key( scope=Scope.user_state, user_id=user_id, block_scope_id=descriptor.location, field_name='grade' ) student_module = field_data_cache.find_or_create(key) # Update the grades student_module.grade = event.get('value') student_module.max_grade = event.get('max_value') # Save all changes to the underlying KeyValueStore student_module.save() # Bin score into range and increment stats score_bucket = get_score_bucket(student_module.grade, student_module.max_grade) tags = [ u"org:{}".format(course_id.org), u"course:{}".format(course_id), u"score_bucket:{0}".format(score_bucket) ] if grade_bucket_type is not None: tags.append('type:%s' % grade_bucket_type) dog_stats_api.increment("lms.courseware.question_answered", tags=tags) def publish(block, event_type, event): """A function that allows XModules to publish events.""" if event_type == 'grade': handle_grade_event(block, event_type, event) else: track_function(event_type, event) def rebind_noauth_module_to_user(module, real_user): """ A function that allows a module to get re-bound to a real user if it was previously bound to an AnonymousUser. Will only work within a module bound to an AnonymousUser, e.g. one that's instantiated by the noauth_handler. Arguments: module (any xblock type): the module to rebind real_user (django.contrib.auth.models.User): the user to bind to Returns: nothing (but the side effect is that module is re-bound to real_user) """ if user.is_authenticated(): err_msg = ("rebind_noauth_module_to_user can only be called from a module bound to " "an anonymous user") log.error(err_msg) raise LmsModuleRenderError(err_msg) field_data_cache_real_user = FieldDataCache.cache_for_descriptor_descendents( course_id, real_user, module.descriptor ) (inner_system, inner_student_data) = get_module_system_for_user( user=real_user, field_data_cache=field_data_cache_real_user, # These have implicit user bindings, rest of args considered not to descriptor=module.descriptor, course_id=course_id, track_function=track_function, xqueue_callback_url_prefix=xqueue_callback_url_prefix, position=position, wrap_xmodule_display=wrap_xmodule_display, grade_bucket_type=grade_bucket_type, static_asset_path=static_asset_path, user_location=user_location, request_token=request_token ) # rebinds module to a different student. We'll change system, student_data, and scope_ids module.descriptor.bind_for_student( inner_system, LmsFieldData(module.descriptor._field_data, inner_student_data) # pylint: disable=protected-access ) module.descriptor.scope_ids = ( module.descriptor.scope_ids._replace(user_id=real_user.id) # pylint: disable=protected-access ) module.scope_ids = module.descriptor.scope_ids # this is needed b/c NamedTuples are immutable # now bind the module to the new ModuleSystem instance and vice-versa module.runtime = inner_system inner_system.xmodule_instance = module # Build a list of wrapping functions that will be applied in order # to the Fragment content coming out of the xblocks that are about to be rendered. block_wrappers = [] # Wrap the output display in a single div to allow for the XModule # javascript to be bound correctly if wrap_xmodule_display is True: block_wrappers.append(partial( wrap_xblock, 'LmsRuntime', extra_data={'course-id': course_id.to_deprecated_string()}, usage_id_serializer=lambda usage_id: quote_slashes(usage_id.to_deprecated_string()), request_token=request_token, )) # TODO (cpennington): When modules are shared between courses, the static # prefix is going to have to be specific to the module, not the directory # that the xml was loaded from # Rewrite urls beginning in /static to point to course-specific content block_wrappers.append(partial( replace_static_urls, getattr(descriptor, 'data_dir', None), course_id=course_id, static_asset_path=static_asset_path or descriptor.static_asset_path )) # Allow URLs of the form '/course/' refer to the root of multicourse directory # hierarchy of this course block_wrappers.append(partial(replace_course_urls, course_id)) # this will rewrite intra-courseware links (/jump_to_id/<id>). This format # is an improvement over the /course/... format for studio authored courses, # because it is agnostic to course-hierarchy. # NOTE: module_id is empty string here. The 'module_id' will get assigned in the replacement # function, we just need to specify something to get the reverse() to work. block_wrappers.append(partial( replace_jump_to_id_urls, course_id, reverse('jump_to_id', kwargs={'course_id': course_id.to_deprecated_string(), 'module_id': ''}), )) if settings.FEATURES.get('DISPLAY_DEBUG_INFO_TO_STAFF'): if has_access(user, 'staff', descriptor, course_id): has_instructor_access = has_access(user, 'instructor', descriptor, course_id) block_wrappers.append(partial(add_staff_markup, user, has_instructor_access)) # These modules store data using the anonymous_student_id as a key. # To prevent loss of data, we will continue to provide old modules with # the per-student anonymized id (as we have in the past), # while giving selected modules a per-course anonymized id. # As we have the time to manually test more modules, we can add to the list # of modules that get the per-course anonymized id. is_pure_xblock = isinstance(descriptor, XBlock) and not isinstance(descriptor, XModuleDescriptor) module_class = getattr(descriptor, 'module_class', None) is_lti_module = not is_pure_xblock and issubclass(module_class, LTIModule) if is_pure_xblock or is_lti_module: anonymous_student_id = anonymous_id_for_user(user, course_id) else: anonymous_student_id = anonymous_id_for_user(user, None) system = LmsModuleSystem( track_function=track_function, render_template=render_to_string, static_url=settings.STATIC_URL, xqueue=xqueue, # TODO (cpennington): Figure out how to share info between systems filestore=descriptor.runtime.resources_fs, get_module=inner_get_module, user=user, debug=settings.DEBUG, hostname=settings.SITE_NAME, # TODO (cpennington): This should be removed when all html from # a module is coming through get_html and is therefore covered # by the replace_static_urls code below replace_urls=partial( static_replace.replace_static_urls, data_directory=getattr(descriptor, 'data_dir', None), course_id=course_id, static_asset_path=static_asset_path or descriptor.static_asset_path, ), replace_course_urls=partial( static_replace.replace_course_urls, course_key=course_id ), replace_jump_to_id_urls=partial( static_replace.replace_jump_to_id_urls, course_id=course_id, jump_to_id_base_url=reverse('jump_to_id', kwargs={'course_id': course_id.to_deprecated_string(), 'module_id': ''}) ), node_path=settings.NODE_PATH, publish=publish, anonymous_student_id=anonymous_student_id, course_id=course_id, open_ended_grading_interface=open_ended_grading_interface, s3_interface=s3_interface, cache=cache, can_execute_unsafe_code=(lambda: can_execute_unsafe_code(course_id)), get_python_lib_zip=(lambda: get_python_lib_zip(contentstore, course_id)), # TODO: When we merge the descriptor and module systems, we can stop reaching into the mixologist (cpennington) mixins=descriptor.runtime.mixologist._mixins, # pylint: disable=protected-access wrappers=block_wrappers, get_real_user=user_by_anonymous_id, services={ 'i18n': ModuleI18nService(), 'fs': xblock.reference.plugins.FSService(), }, get_user_role=lambda: get_user_role(user, course_id), descriptor_runtime=descriptor.runtime, rebind_noauth_module_to_user=rebind_noauth_module_to_user, user_location=user_location, ) # pass position specified in URL to module through ModuleSystem if position is not None: try: position = int(position) except (ValueError, TypeError): log.exception('Non-integer %r passed as position.', position) position = None system.set('position', position) if settings.FEATURES.get('ENABLE_PSYCHOMETRICS') and user.is_authenticated(): system.set( 'psychometrics_handler', # set callback for updating PsychometricsData make_psychometrics_data_update_handler(course_id, user, descriptor.location) ) system.set(u'user_is_staff', has_access(user, u'staff', descriptor.location, course_id)) system.set(u'user_is_admin', has_access(user, u'staff', 'global')) # make an ErrorDescriptor -- assuming that the descriptor's system is ok if has_access(user, u'staff', descriptor.location, course_id): system.error_descriptor_class = ErrorDescriptor else: system.error_descriptor_class = NonStaffErrorDescriptor return system, student_data def get_module_for_descriptor_internal(user, descriptor, field_data_cache, course_id, # pylint: disable=invalid-name track_function, xqueue_callback_url_prefix, request_token, position=None, wrap_xmodule_display=True, grade_bucket_type=None, static_asset_path='', user_location=None): """ Actually implement get_module, without requiring a request. See get_module() docstring for further details. Arguments: request_token (str): A unique token for this request, used to isolate xblock rendering """ # Do not check access when it's a noauth request. if getattr(user, 'known', True): # Short circuit--if the user shouldn't have access, bail without doing any work if not has_access(user, 'load', descriptor, course_id): return None (system, student_data) = get_module_system_for_user( user=user, field_data_cache=field_data_cache, # These have implicit user bindings, the rest of args are considered not to descriptor=descriptor, course_id=course_id, track_function=track_function, xqueue_callback_url_prefix=xqueue_callback_url_prefix, position=position, wrap_xmodule_display=wrap_xmodule_display, grade_bucket_type=grade_bucket_type, static_asset_path=static_asset_path, user_location=user_location, request_token=request_token ) descriptor.bind_for_student(system, LmsFieldData(descriptor._field_data, student_data)) # pylint: disable=protected-access descriptor.scope_ids = descriptor.scope_ids._replace(user_id=user.id) # pylint: disable=protected-access return descriptor def find_target_student_module(request, user_id, course_id, mod_id): """ Retrieve target StudentModule """ course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) usage_key = course_id.make_usage_key_from_deprecated_string(mod_id) user = User.objects.get(id=user_id) field_data_cache = FieldDataCache.cache_for_descriptor_descendents( course_id, user, modulestore().get_item(usage_key), depth=0, select_for_update=True ) instance = get_module(user, request, usage_key, field_data_cache, grade_bucket_type='xqueue') if instance is None: msg = "No module {0} for user {1}--access denied?".format(mod_id, user) log.debug(msg) raise Http404 return instance @csrf_exempt def xqueue_callback(request, course_id, userid, mod_id, dispatch): ''' Entry point for graded results from the queueing system. ''' data = request.POST.copy() # Test xqueue package, which we expect to be: # xpackage = {'xqueue_header': json.dumps({'lms_key':'secretkey',...}), # 'xqueue_body' : 'Message from grader'} for key in ['xqueue_header', 'xqueue_body']: if key not in data: raise Http404 header = json.loads(data['xqueue_header']) if not isinstance(header, dict) or 'lms_key' not in header: raise Http404 instance = find_target_student_module(request, userid, course_id, mod_id) # Transfer 'queuekey' from xqueue response header to the data. # This is required to use the interface defined by 'handle_ajax' data.update({'queuekey': header['lms_key']}) # We go through the "AJAX" path # So far, the only dispatch from xqueue will be 'score_update' try: # Can ignore the return value--not used for xqueue_callback instance.handle_ajax(dispatch, data) # Save any state that has changed to the underlying KeyValueStore instance.save() except: log.exception("error processing ajax call") raise return HttpResponse("") @csrf_exempt def handle_xblock_callback_noauth(request, course_id, usage_id, handler, suffix=None): """ Entry point for unauthenticated XBlock handlers. """ request.user.known = False return _invoke_xblock_handler(request, course_id, usage_id, handler, suffix, request.user) def handle_xblock_callback(request, course_id, usage_id, handler, suffix=None): """ Generic view for extensions. This is where AJAX calls go. Arguments: - request -- the django request. - location -- the module location. Used to look up the XModule instance - course_id -- defines the course context for this request. Return 403 error if the user is not logged in. Raises Http404 if the location and course_id do not identify a valid module, the module is not accessible by the user, or the module raises NotFoundError. If the module raises any other error, it will escape this function. """ if not request.user.is_authenticated(): return HttpResponse('Unauthenticated', status=403) return _invoke_xblock_handler(request, course_id, usage_id, handler, suffix, request.user) def xblock_resource(request, block_type, uri): # pylint: disable=unused-argument """ Return a package resource for the specified XBlock. """ try: xblock_class = XBlock.load_class(block_type, select=settings.XBLOCK_SELECT_FUNCTION) content = xblock_class.open_local_resource(uri) except IOError: log.info('Failed to load xblock resource', exc_info=True) raise Http404 except Exception: # pylint: disable-msg=broad-except log.error('Failed to load xblock resource', exc_info=True) raise Http404 mimetype, _ = mimetypes.guess_type(uri) return HttpResponse(content, mimetype=mimetype) def _invoke_xblock_handler(request, course_id, usage_id, handler, suffix, user): """ Invoke an XBlock handler, either authenticated or not. Arguments: request (HttpRequest): the current request course_id (str): A string of the form org/course/run usage_id (str): A string of the form i4x://org/course/category/name@revision handler (str): The name of the handler to invoke suffix (str): The suffix to pass to the handler when invoked user (User): The currently logged in user """ try: course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) usage_key = course_id.make_usage_key_from_deprecated_string(unquote_slashes(usage_id)) except InvalidKeyError: raise Http404("Invalid location") # Check submitted files files = request.FILES or {} error_msg = _check_files_limits(files) if error_msg: return HttpResponse(json.dumps({'success': error_msg})) try: descriptor = modulestore().get_item(usage_key) except ItemNotFoundError: log.warn( "Invalid location for course id {course_id}: {usage_key}".format( course_id=usage_key.course_key, usage_key=usage_key ) ) raise Http404 tracking_context_name = 'module_callback_handler' tracking_context = { 'module': { 'display_name': descriptor.display_name_with_default, } } field_data_cache = FieldDataCache.cache_for_descriptor_descendents( course_id, user, descriptor ) instance = get_module(user, request, usage_key, field_data_cache, grade_bucket_type='ajax') if instance is None: # Either permissions just changed, or someone is trying to be clever # and load something they shouldn't have access to. log.debug("No module %s for user %s -- access denied?", usage_key, user) raise Http404 req = django_to_webob_request(request) try: with tracker.get_tracker().context(tracking_context_name, tracking_context): resp = instance.handle(handler, req, suffix) except NoSuchHandlerError: log.exception("XBlock %s attempted to access missing handler %r", instance, handler) raise Http404 # If we can't find the module, respond with a 404 except NotFoundError: log.exception("Module indicating to user that request doesn't exist") raise Http404 # For XModule-specific errors, we log the error and respond with an error message except ProcessingError as err: log.warning("Module encountered an error while processing AJAX call", exc_info=True) return JsonResponse(object={'success': err.args[0]}, status=200) # If any other error occurred, re-raise it to trigger a 500 response except Exception: log.exception("error executing xblock handler") raise return webob_to_django_response(resp) def get_score_bucket(grade, max_grade): """ Function to split arbitrary score ranges into 3 buckets. Used with statsd tracking. """ score_bucket = "incorrect" if(grade > 0 and grade < max_grade): score_bucket = "partial" elif(grade == max_grade): score_bucket = "correct" return score_bucket def _check_files_limits(files): """ Check if the files in a request are under the limits defined by `settings.MAX_FILEUPLOADS_PER_INPUT` and `settings.STUDENT_FILEUPLOAD_MAX_SIZE`. Returns None if files are correct or an error messages otherwise. """ for fileinput_id in files.keys(): inputfiles = files.getlist(fileinput_id) # Check number of files submitted if len(inputfiles) > settings.MAX_FILEUPLOADS_PER_INPUT: msg = 'Submission aborted! Maximum %d files may be submitted at once' % \ settings.MAX_FILEUPLOADS_PER_INPUT return msg # Check file sizes for inputfile in inputfiles: if inputfile.size > settings.STUDENT_FILEUPLOAD_MAX_SIZE: # Bytes msg = 'Submission aborted! Your file "%s" is too large (max size: %d MB)' % \ (inputfile.name, settings.STUDENT_FILEUPLOAD_MAX_SIZE / (1000 ** 2)) return msg return None
UXE/local-edx
lms/djangoapps/courseware/module_render.py
Python
agpl-3.0
35,275
# -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function, unicode_literals import six import unittest import pure_interface class SomeOtherMetaClass(pure_interface.PureInterfaceType): def __new__(mcs, name, bases, clsdict): cls = pure_interface.PureInterfaceType.__new__(mcs, name, bases, clsdict) return cls @six.add_metaclass(SomeOtherMetaClass) class InnocentBystander(object): def method(self): pass @six.add_metaclass(SomeOtherMetaClass) class InnocentBystanderWithABC(object): @pure_interface.abstractmethod def method(self): pass @pure_interface.abstractproperty def prop(self): pass class ABCImpl(InnocentBystanderWithABC): def __init__(self): self.prop = 3 def method(self): pass class MyInterface(pure_interface.PureInterface): def method2(self): pass class SubclassWithInterface(InnocentBystander, MyInterface): def method(self): pass class SubSubclassWithInterface(SubclassWithInterface): def foo(self): pass class SubSubSubclassWithInterface(SubSubclassWithInterface): def bar(self): pass class TestMetaClassMixingChecks(unittest.TestCase): def test_submeta_class(self): try: innocent_bystander = InnocentBystander() innocent_bystander.method() except Exception as exc: self.fail('No exception expected. Got\n' + str(exc)) def test_submeta_class_with_interface(self): with self.assertRaises(TypeError): SubclassWithInterface() def test_bystander(self): # check that property patching is not done to classes that do not inherit an interface with self.assertRaises(TypeError): ABCImpl() def test_dir_subclass(self): listing = dir(SubclassWithInterface) self.assertIn('method2', listing) self.assertIn('method', listing) def test_dir_subsubclass(self): listing = dir(SubSubclassWithInterface) self.assertIn('method2', listing) self.assertIn('method', listing) self.assertIn('foo', listing) def test_dir_subsubsubclass(self): listing = dir(SubSubSubclassWithInterface) self.assertIn('method2', listing) self.assertIn('method', listing) self.assertIn('foo', listing) self.assertIn('bar', listing)
tim-mitchell/pure_interface
tests/test_meta_classes.py
Python
mit
2,516
import dedupe import dedupe.api import unittest import itertools import warnings from collections import OrderedDict def icfi(x): return list(itertools.chain.from_iterable(x)) DATA_SAMPLE = [({'age': '27', 'name': 'Kyle'}, {'age': '50', 'name': 'Bob'}), ({'age': '27', 'name': 'Kyle'}, {'age': '35', 'name': 'William'}), ({'age': '10', 'name': 'Sue'}, {'age': '35', 'name': 'William'}), ({'age': '27', 'name': 'Kyle'}, {'age': '20', 'name': 'Jimmy'}), ({'age': '75', 'name': 'Charlie'}, {'age': '21', 'name': 'Jimbo'})] data_dict = OrderedDict(((0, {'name': 'Bob', 'age': '51'}), (1, {'name': 'Linda', 'age': '50'}), (2, {'name': 'Gene', 'age': '12'}), (3, {'name': 'Tina', 'age': '15'}), (4, {'name': 'Bob B.', 'age': '51'}), (5, {'name': 'bob belcher', 'age': '51'}), (6, {'name': 'linda ', 'age': '50'}))) data_dict_2 = OrderedDict(((7, {'name': 'BOB', 'age': '51'}), (8, {'name': 'LINDA', 'age': '50'}), (9, {'name': 'GENE', 'age': '12'}), (10, {'name': 'TINA', 'age': '15'}), (11, {'name': 'BOB B.', 'age': '51'}), (12, {'name': 'BOB BELCHER', 'age': '51'}), (13, {'name': 'LINDA ', 'age': '50'}))) class ActiveMatch(unittest.TestCase): def setUp(self): self.field_definition = [{'field': 'name', 'type': 'String'}, {'field': 'age', 'type': 'String'}] def test_initialize_fields(self): self.assertRaises(TypeError, dedupe.api.ActiveMatching) with self.assertRaises(ValueError): dedupe.api.ActiveMatching([],) with self.assertRaises(ValueError): dedupe.api.ActiveMatching([{'field': 'name', 'type': 'Custom', 'comparator': lambda x: 1}],) with self.assertRaises(ValueError): dedupe.api.ActiveMatching([{'field': 'name', 'type': 'Custom', 'comparator': lambda x: 1}, {'field': 'age', 'type': 'Custom', 'comparator': lambda x: 1}],) dedupe.api.ActiveMatching([{'field': 'name', 'type': 'Custom', 'comparator': lambda x: 1}, {'field': 'age', 'type': 'String'}],) def test_check_record(self): matcher = dedupe.api.ActiveMatching(self.field_definition) self.assertRaises(ValueError, matcher._checkRecordPair, ()) self.assertRaises(ValueError, matcher._checkRecordPair, (1, 2)) self.assertRaises(ValueError, matcher._checkRecordPair, (1, 2, 3)) self.assertRaises(ValueError, matcher._checkRecordPair, ({}, {})) matcher._checkRecordPair(({'name': 'Frank', 'age': '72'}, {'name': 'Bob', 'age': '27'})) def test_markPair(self): from collections import OrderedDict good_training_pairs = OrderedDict((('match', DATA_SAMPLE[3:5]), ('distinct', DATA_SAMPLE[0:3]))) bad_training_pairs = {'non_dupes': DATA_SAMPLE[0:3], 'match': DATA_SAMPLE[3:5]} matcher = dedupe.api.ActiveMatching(self.field_definition) self.assertRaises(ValueError, matcher.mark_pairs, bad_training_pairs) matcher.mark_pairs(good_training_pairs) with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") matcher.mark_pairs({'match': [], 'distinct': []}) assert len(w) == 1 assert str( w[-1].message) == "Didn't return any labeled record pairs" if __name__ == "__main__": unittest.main()
datamade/dedupe
tests/test_api.py
Python
mit
3,933
"""Some utility functions""" # Authors: Eric Larson <larsoner@uw.edu> # # License: BSD (3-clause) import warnings import operator from copy import deepcopy import subprocess import importlib import os import os.path as op import inspect import sys import time import tempfile import traceback import ssl from shutil import rmtree import atexit import json from functools import partial from distutils.version import LooseVersion import logging import datetime from timeit import default_timer as clock from threading import Timer import numpy as np import scipy as sp from ._externals import decorator # set this first thing to make sure it "takes" try: import pyglet pyglet.options['debug_gl'] = False del pyglet except Exception: pass # for py3k (eventually) if sys.version.startswith('2'): string_types = basestring # noqa input = raw_input # noqa, input is raw_input in py3k text_type = unicode # noqa from __builtin__ import reload from urllib2 import urlopen # noqa from cStringIO import StringIO # noqa else: string_types = str text_type = str from urllib.request import urlopen input = input from io import StringIO # noqa, analysis:ignore from importlib import reload # noqa, analysis:ignore ############################################################################### # LOGGING EXP = 25 logging.addLevelName(EXP, 'EXP') def exp(self, message, *args, **kwargs): """Experiment-level logging.""" self.log(EXP, message, *args, **kwargs) logging.Logger.exp = exp logger = logging.getLogger('expyfun') def flush_logger(): """Flush expyfun logger""" for handler in logger.handlers: handler.flush() def set_log_level(verbose=None, return_old_level=False): """Convenience function for setting the logging level Parameters ---------- verbose : bool, str, int, or None The verbosity of messages to print. If a str, it can be either DEBUG, INFO, WARNING, ERROR, or CRITICAL. Note that these are for convenience and are equivalent to passing in logging.DEBUG, etc. For bool, True is the same as 'INFO', False is the same as 'WARNING'. If None, the environment variable EXPYFUN_LOGGING_LEVEL is read, and if it doesn't exist, defaults to INFO. return_old_level : bool If True, return the old verbosity level. """ if verbose is None: verbose = get_config('EXPYFUN_LOGGING_LEVEL', 'INFO') elif isinstance(verbose, bool): verbose = 'INFO' if verbose is True else 'WARNING' if isinstance(verbose, string_types): verbose = verbose.upper() logging_types = dict(DEBUG=logging.DEBUG, INFO=logging.INFO, WARNING=logging.WARNING, ERROR=logging.ERROR, CRITICAL=logging.CRITICAL) if verbose not in logging_types: raise ValueError('verbose must be of a valid type') verbose = logging_types[verbose] old_verbose = logger.level logger.setLevel(verbose) return (old_verbose if return_old_level else None) def set_log_file(fname=None, output_format='%(asctime)s - %(levelname)-7s - %(message)s', overwrite=None): """Convenience function for setting the log to print to a file Parameters ---------- fname : str, or None Filename of the log to print to. If None, stdout is used. To suppress log outputs, use set_log_level('WARN'). output_format : str Format of the output messages. See the following for examples: http://docs.python.org/dev/howto/logging.html e.g., "%(asctime)s - %(levelname)s - %(message)s". overwrite : bool, or None Overwrite the log file (if it exists). Otherwise, statements will be appended to the log (default). None is the same as False, but additionally raises a warning to notify the user that log entries will be appended. """ handlers = logger.handlers for h in handlers: if isinstance(h, logging.FileHandler): h.close() logger.removeHandler(h) if fname is not None: if op.isfile(fname) and overwrite is None: warnings.warn('Log entries will be appended to the file. Use ' 'overwrite=False to avoid this message in the ' 'future.') mode = 'w' if overwrite is True else 'a' lh = logging.FileHandler(fname, mode=mode) else: """ we should just be able to do: lh = logging.StreamHandler(sys.stdout) but because doctests uses some magic on stdout, we have to do this: """ lh = logging.StreamHandler(WrapStdOut()) lh.setFormatter(logging.Formatter(output_format)) # actually add the stream handler logger.addHandler(lh) ############################################################################### # RANDOM UTILITIES building_doc = any('sphinx-build' in ((''.join(i[4]).lower() + i[1]) if i[4] is not None else '') for i in inspect.stack()) def run_subprocess(command, **kwargs): """Run command using subprocess.Popen Run command and wait for command to complete. If the return code was zero then return, otherwise raise CalledProcessError. By default, this will also add stdout= and stderr=subproces.PIPE to the call to Popen to suppress printing to the terminal. Parameters ---------- command : list of str Command to run as subprocess (see subprocess.Popen documentation). **kwargs : objects Keywoard arguments to pass to ``subprocess.Popen``. Returns ------- stdout : str Stdout returned by the process. stderr : str Stderr returned by the process. """ # code adapted with permission from mne-python kw = dict(stderr=subprocess.PIPE, stdout=subprocess.PIPE) kw.update(kwargs) p = subprocess.Popen(command, **kw) stdout_, stderr = p.communicate() output = (stdout_.decode(), stderr.decode()) if p.returncode: err_fun = subprocess.CalledProcessError.__init__ if 'output' in _get_args(err_fun): raise subprocess.CalledProcessError(p.returncode, command, output) else: raise subprocess.CalledProcessError(p.returncode, command) return output class ZeroClock(object): """Clock that uses "clock" function but starts at zero on init.""" def __init__(self): self._start_time = clock() def get_time(self): """Get time.""" return clock() - self._start_time def date_str(): """Produce a date string for the current date and time Returns ------- datestr : str The date string. """ return str(datetime.datetime.today()).replace(':', '_') class WrapStdOut(object): """Ridiculous class to work around how doctest captures stdout.""" def __getattr__(self, name): # Even more ridiculous than this class, this must be sys.stdout (not # just stdout) in order for this to work (tested on OSX and Linux) return getattr(sys.stdout, name) class _TempDir(str): """Class for creating and auto-destroying temp dir This is designed to be used with testing modules. We cannot simply use __del__() method for cleanup here because the rmtree function may be cleaned up before this object, so we use the atexit module instead. Passing del_after and print_del kwargs to the constructor are helpful primarily for debugging purposes. """ def __new__(self, del_after=True, print_del=False): new = str.__new__(self, tempfile.mkdtemp()) self._del_after = del_after self._print_del = print_del return new def __init__(self): self._path = self.__str__() atexit.register(self.cleanup) def cleanup(self): if self._del_after is True: if self._print_del is True: print('Deleting {} ...'.format(self._path)) rmtree(self._path, ignore_errors=True) def check_units(units): """Ensure user passed valid units type Parameters ---------- units : str Must be ``'norm'``, ``'deg'``, ``'pix'``, or ``'cm'``. """ good_units = ['norm', 'pix', 'deg', 'cm'] if units not in good_units: raise ValueError('"units" must be one of {}, not {}' ''.format(good_units, units)) ############################################################################### # DECORATORS # Following deprecated class copied from scikit-learn class deprecated(object): """Decorator to mark a function or class as deprecated. Issue a warning when the function is called/the class is instantiated and adds a warning to the docstring. The optional extra argument will be appended to the deprecation message and the docstring. Note: to use this with the default value for extra, put in an empty of parentheses: >>> from expyfun._utils import deprecated >>> deprecated() # doctest: +ELLIPSIS <expyfun._utils.deprecated object at ...> >>> @deprecated() ... def some_function(): pass """ # Adapted from http://wiki.python.org/moin/PythonDecoratorLibrary, # but with many changes. # scikit-learn will not import on all platforms b/c it can be # sklearn or scikits.learn, so a self-contained example is used above def __init__(self, extra=''): """ Parameters ---------- extra: string to be added to the deprecation messages """ self.extra = extra def __call__(self, obj): """Call.""" if isinstance(obj, type): return self._decorate_class(obj) else: return self._decorate_fun(obj) def _decorate_class(self, cls): msg = "Class %s is deprecated" % cls.__name__ if self.extra: msg += "; %s" % self.extra # FIXME: we should probably reset __new__ for full generality init = cls.__init__ def wrapped(*args, **kwargs): warnings.warn(msg, category=DeprecationWarning) return init(*args, **kwargs) cls.__init__ = wrapped wrapped.__name__ = '__init__' wrapped.__doc__ = self._update_doc(init.__doc__) wrapped.deprecated_original = init return cls def _decorate_fun(self, fun): """Decorate function fun""" msg = "Function %s is deprecated" % fun.__name__ if self.extra: msg += "; %s" % self.extra def wrapped(*args, **kwargs): warnings.warn(msg, category=DeprecationWarning) return fun(*args, **kwargs) wrapped.__name__ = fun.__name__ wrapped.__dict__ = fun.__dict__ wrapped.__doc__ = self._update_doc(fun.__doc__) return wrapped def _update_doc(self, olddoc): newdoc = "DEPRECATED" if self.extra: newdoc = "%s: %s" % (newdoc, self.extra) if olddoc: newdoc = "%s\n\n%s" % (newdoc, olddoc) return newdoc if hasattr(inspect, 'signature'): # py35 def _get_args(function, varargs=False): params = inspect.signature(function).parameters args = [key for key, param in params.items() if param.kind not in (param.VAR_POSITIONAL, param.VAR_KEYWORD)] if varargs: varargs = [param.name for param in params.values() if param.kind == param.VAR_POSITIONAL] if len(varargs) == 0: varargs = None return args, varargs else: return args else: def _get_args(function, varargs=False): out = inspect.getargspec(function) # args, varargs, keywords, defaults if varargs: return out[:2] else: return out[0] @decorator def verbose_dec(function, *args, **kwargs): """Improved verbose decorator to allow functions to override log-level Do not call this directly to set global verbosrity level, instead use set_log_level(). Parameters ---------- function : callable Function to be decorated by setting the verbosity level. Returns ------- dec - function The decorated function """ arg_names = _get_args(function) if len(arg_names) > 0 and arg_names[0] == 'self': default_level = getattr(args[0], 'verbose', None) else: default_level = None if('verbose' in arg_names): verbose_level = args[arg_names.index('verbose')] else: verbose_level = default_level if verbose_level is not None: old_level = set_log_level(verbose_level, True) # set it back if we get an exception try: ret = function(*args, **kwargs) except Exception: set_log_level(old_level) raise set_log_level(old_level) return ret else: ret = function(*args, **kwargs) return ret def _new_pyglet(): import pyglet return LooseVersion(pyglet.version) >= LooseVersion('1.4') def _has_video(raise_error=False): exceptions = list() good = True if _new_pyglet(): try: from pyglet.media.codecs.ffmpeg import FFmpegSource # noqa except ImportError: exceptions.append(traceback.format_exc()) good = False else: if raise_error: print('Found FFmpegSource for new Pyglet') else: try: from pyglet.media.avbin import AVbinSource # noqa except ImportError: exceptions.append(traceback.format_exc()) try: from pyglet.media.sources.avbin import AVbinSource # noqa except ImportError: exceptions.append(traceback.format_exc()) good = False else: if raise_error: print('Found AVbinSource for old Pyglet 1') else: if raise_error: print('Found AVbinSource for old Pyglet 2') if raise_error and not good: raise RuntimeError('Video support not enabled, got exception(s):\n' '\n***********************\n'.join(exceptions)) return good def requires_video(): """Require FFmpeg/AVbin.""" import pytest return pytest.mark.skipif(not _has_video(), reason='Requires FFmpeg/AVbin') def requires_opengl21(func): """Require OpenGL.""" import pytest import pyglet.gl vendor = pyglet.gl.gl_info.get_vendor() version = pyglet.gl.gl_info.get_version() sufficient = pyglet.gl.gl_info.have_version(2, 0) return pytest.mark.skipif(not sufficient, reason='OpenGL too old: %s %s' % (vendor, version,))(func) def requires_lib(lib): """Requires lib decorator.""" import pytest try: importlib.import_module(lib) except Exception as exp: val = True reason = 'Needs %s (%s)' % (lib, exp) else: val = False reason = '' return pytest.mark.skipif(val, reason=reason) def _has_scipy_version(version): return (LooseVersion(sp.__version__) >= LooseVersion(version)) def _get_user_home_path(): """Return standard preferences path""" # this has been checked on OSX64, Linux64, and Win32 val = os.getenv('APPDATA' if 'nt' == os.name.lower() else 'HOME', None) if val is None: raise ValueError('expyfun config file path could ' 'not be determined, please report this ' 'error to expyfun developers') return val def fetch_data_file(fname): """Fetch example remote file Parameters ---------- fname : str The remote filename to get. If the filename already exists on the local system, the file will not be fetched again. Returns ------- fname : str The filename on the local system where the file was downloaded. """ path = get_config('EXPYFUN_DATA_PATH', op.join(_get_user_home_path(), '.expyfun', 'data')) fname_out = op.join(path, fname) if not op.isdir(op.dirname(fname_out)): os.makedirs(op.dirname(fname_out)) fname_url = ('https://github.com/LABSN/expyfun-data/raw/master/{0}' ''.format(fname)) try: # until we get proper certificates context = ssl._create_unverified_context() this_urlopen = partial(urlopen, context=context) except AttributeError: context = None this_urlopen = urlopen if not op.isfile(fname_out): try: with open(fname_out, 'wb') as fid: www = this_urlopen(fname_url, timeout=30.0) try: fid.write(www.read()) finally: www.close() except Exception: os.remove(fname_out) raise return fname_out def get_config_path(): r"""Get path to standard expyfun config file. Returns ------- config_path : str The path to the expyfun configuration file. On windows, this will be '%APPDATA%\.expyfun\expyfun.json'. On every other system, this will be $HOME/.expyfun/expyfun.json. """ val = op.join(_get_user_home_path(), '.expyfun', 'expyfun.json') return val # List the known configuration values known_config_types = ('RESPONSE_DEVICE', 'AUDIO_CONTROLLER', 'DB_OF_SINE_AT_1KHZ_1RMS', 'EXPYFUN_EYELINK', 'SOUND_CARD_API', 'SOUND_CARD_API_OPTIONS', 'SOUND_CARD_BACKEND', 'SOUND_CARD_FS', 'SOUND_CARD_NAME', 'SOUND_CARD_FIXED_DELAY', 'SOUND_CARD_TRIGGER_CHANNELS', 'SOUND_CARD_TRIGGER_INSERTION', 'SOUND_CARD_TRIGGER_SCALE', 'SOUND_CARD_TRIGGER_ID_AFTER_ONSET', 'SOUND_CARD_DRIFT_TRIGGER', 'TDT_CIRCUIT_PATH', 'TDT_DELAY', 'TDT_INTERFACE', 'TDT_MODEL', 'TDT_TRIG_DELAY', 'TRIGGER_CONTROLLER', 'TRIGGER_ADDRESS', 'WINDOW_SIZE', 'SCREEN_NUM', 'SCREEN_WIDTH', 'SCREEN_DISTANCE', 'SCREEN_SIZE_PIX', 'EXPYFUN_LOGGING_LEVEL', ) # These allow for partial matches: 'NAME_1' is okay key if 'NAME' is listed known_config_wildcards = () def get_config(key=None, default=None, raise_error=False): """Read expyfun preference from env, then expyfun config Parameters ---------- key : str The preference key to look for. The os environment is searched first, then the expyfun config file is parsed. default : str | None Value to return if the key is not found. raise_error : bool If True, raise an error if the key is not found (instead of returning default). Returns ------- value : str | None The preference key value. """ if key is not None and not isinstance(key, string_types): raise ValueError('key must be a string') # first, check to see if key is in env if key is not None and key in os.environ: return os.environ[key] # second, look for it in expyfun config file config_path = get_config_path() if not op.isfile(config_path): key_found = False val = default else: with open(config_path, 'r') as fid: config = json.load(fid) if key is None: return config key_found = True if key in config else False val = config.get(key, default) if not key_found and raise_error is True: meth_1 = 'os.environ["%s"] = VALUE' % key meth_2 = 'expyfun.utils.set_config("%s", VALUE)' % key raise KeyError('Key "%s" not found in environment or in the ' 'expyfun config file:\n%s\nTry either:\n' ' %s\nfor a temporary solution, or:\n' ' %s\nfor a permanent one. You can also ' 'set the environment variable before ' 'running python.' % (key, config_path, meth_1, meth_2)) return val def set_config(key, value): """Set expyfun preference in config Parameters ---------- key : str | None The preference key to set. If None, a tuple of the valid keys is returned, and ``value`` is ignored. value : str | None The value to assign to the preference key. If None, the key is deleted. """ if key is None: return sorted(known_config_types) if not isinstance(key, string_types): raise ValueError('key must be a string') # While JSON allow non-string types, we allow users to override config # settings using env, which are strings, so we enforce that here if not isinstance(value, string_types) and value is not None: raise ValueError('value must be a string or None') if key not in known_config_types and not \ any(k in key for k in known_config_wildcards): warnings.warn('Setting non-standard config type: "%s"' % key) # Read all previous values config_path = get_config_path() if op.isfile(config_path): with open(config_path, 'r') as fid: config = json.load(fid) else: config = dict() logger.info('Attempting to create new expyfun configuration ' 'file:\n%s' % config_path) if value is None: config.pop(key, None) else: config[key] = value # Write all values directory = op.split(config_path)[0] if not op.isdir(directory): os.mkdir(directory) with open(config_path, 'w') as fid: json.dump(config, fid, sort_keys=True, indent=0) ############################################################################### # MISC def fake_button_press(ec, button='1', delay=0.): """Fake a button press after a delay Notes ----- This function only works with the keyboard controller (not TDT)! It uses threads to ensure that control is passed back, so other commands can be called (like wait_for_presses). """ def send(): ec._response_handler._on_pyglet_keypress(button, [], True) Timer(delay, send).start() if delay > 0. else send() def fake_mouse_click(ec, pos, button='left', delay=0.): """Fake a mouse click after a delay""" button = dict(left=1, middle=2, right=4)[button] # trans to pyglet def send(): ec._mouse_handler._on_pyglet_mouse_click(pos[0], pos[1], button, []) Timer(delay, send).start() if delay > 0. else send() def _check_pyglet_version(raise_error=False): """Check pyglet version, return True if usable. """ import pyglet is_usable = LooseVersion(pyglet.version) >= LooseVersion('1.2') if raise_error is True and is_usable is False: raise ImportError('On Linux, you must run at least Pyglet ' 'version 1.2, and you are running ' '{0}'.format(pyglet.version)) return is_usable def _wait_secs(secs, ec=None): """Wait a specified number of seconds. Parameters ---------- secs : float Number of seconds to wait. ec : None | expyfun.ExperimentController instance The ExperimentController. Notes ----- This function uses a while loop. Although this slams the CPU, it will guarantee that events (keypresses, etc.) are processed. """ # hog the cpu, checking time t0 = clock() if ec is not None: while (clock() - t0) < secs: ec._dispatch_events() ec.check_force_quit() time.sleep(0.0001) else: wins = _get_display().get_windows() while (clock() - t0) < secs: for win in wins: win.dispatch_events() time.sleep(0.0001) def running_rms(signal, win_length): """RMS of ``signal`` with rectangular window ``win_length`` samples long. Parameters ---------- signal : array_like The (1-dimesional) signal of interest. win_length : int Length (in samples) of the rectangular window """ assert signal.ndim == 1 assert win_length > 0 # The following is equivalent to: # sqrt(convolve(signal ** 2, ones(win_length) / win_length, 'valid')) # But an order of magnitude faster: 60 ms vs 7 ms for: # # x = np.random.RandomState(0).randn(1000001) # %timeit expyfun._utils.running_rms(x, 441) # sig2 = signal * signal c1 = np.cumsum(sig2) out = c1[win_length - 1:].copy() if len(out) == 0: # len(signal) < len(win_length) out = np.array([np.sqrt(c1[-1] / signal.size)]) else: out[1:] -= c1[:-win_length] out /= win_length np.sqrt(out, out=out) return out def _fix_audio_dims(signal, n_channels): """Make it so a valid audio buffer is in the standard dimensions. Parameters ---------- signal : array_like The signal whose dimensions should be checked and fixed. n_channels : int The number of channels that the output should have. If the input is mono and n_channels=2, it will be tiled to be shape (2, n_samples). Otherwise, the number of channels in signal must match n_channels. Returns ------- signal_fixed : array The signal with standard dimensions (n_channels, N). """ # Check requested channel output n_channels = int(operator.index(n_channels)) signal = np.asarray(np.atleast_2d(signal), dtype=np.float32) # Check dimensionality if signal.ndim != 2: raise ValueError('Sound data must have one or two dimensions, got %s.' % (signal.ndim,)) # Return data with correct dimensions if n_channels == 2 and signal.shape[0] == 1: signal = np.tile(signal, (n_channels, 1)) if signal.shape[0] != n_channels: raise ValueError('signal channel count %d did not match required ' 'channel count %d' % (signal.shape[0], n_channels)) return signal def _sanitize(text_like): """Cast as string, encode as UTF-8 and sanitize any escape characters. """ return text_type(text_like).encode('unicode_escape').decode('utf-8') def _sort_keys(x): """Sort and return keys of dict""" keys = list(x.keys()) # note: not thread-safe idx = np.argsort([str(k) for k in keys]) keys = [keys[ii] for ii in idx] return keys def object_diff(a, b, pre=''): """Compute all differences between two python variables Parameters ---------- a : object Currently supported: dict, list, tuple, ndarray, int, str, bytes, float, StringIO, BytesIO. b : object Must be same type as ``a``. pre : str String to prepend to each line. Returns ------- diffs : str A string representation of the differences. Notes ----- Taken from mne-python with permission. """ out = '' if type(a) != type(b): out += pre + ' type mismatch (%s, %s)\n' % (type(a), type(b)) elif isinstance(a, dict): k1s = _sort_keys(a) k2s = _sort_keys(b) m1 = set(k2s) - set(k1s) if len(m1): out += pre + ' x1 missing keys %s\n' % (m1) for key in k1s: if key not in k2s: out += pre + ' x2 missing key %s\n' % key else: out += object_diff(a[key], b[key], pre + 'd1[%s]' % repr(key)) elif isinstance(a, (list, tuple)): if len(a) != len(b): out += pre + ' length mismatch (%s, %s)\n' % (len(a), len(b)) else: for xx1, xx2 in zip(a, b): out += object_diff(xx1, xx2, pre='') elif isinstance(a, (string_types, int, float, bytes)): if a != b: out += pre + ' value mismatch (%s, %s)\n' % (a, b) elif a is None: if b is not None: out += pre + ' a is None, b is not (%s)\n' % (b) elif isinstance(a, np.ndarray): if not np.array_equal(a, b): out += pre + ' array mismatch\n' else: raise RuntimeError(pre + ': unsupported type %s (%s)' % (type(a), a)) return out def _check_skip_backend(backend): from expyfun._sound_controllers import _import_backend import pytest if isinstance(backend, dict): # actually an AC backend = backend['SOUND_CARD_BACKEND'] try: _import_backend(backend) except Exception as exc: pytest.skip('Skipping test for backend %s: %s' % (backend, exc)) def _check_params(params, keys, defaults, name): if not isinstance(params, dict): raise TypeError('{0} must be a dict, got type {1}' .format(name, type(params))) params = deepcopy(params) if not isinstance(params, dict): raise TypeError('{0} must be a dict, got {1}' .format(name, type(params))) # Set sensible defaults for values that are not passed for k in keys: params[k] = params.get(k, get_config(k, defaults.get(k, None))) # Check keys for k in params.keys(): if k not in keys: raise KeyError('Unrecognized key in {0}["{1}"], must be ' 'one of {2}'.format(name, k, ', '.join(keys))) return params def _get_display(): import pyglet try: display = pyglet.canvas.get_display() except AttributeError: # < 1.4 display = pyglet.window.get_platform().get_default_display() return display
drammock/expyfun
expyfun/_utils.py
Python
bsd-3-clause
30,243
from django.utils.deprecation import MiddlewareMixin from subdomains.middleware import SubdomainURLRoutingMiddleware class SubdomainMiddleware(MiddlewareMixin, SubdomainURLRoutingMiddleware): pass
Ajapaik/ajapaik-web
ajapaik/ajapaik/middleware.py
Python
gpl-3.0
203
#!/usr/bin/env python3 # # Copyright (C) 2009 Leandro Lisboa Penz <lpenz@lpenz.org> # This file is subject to the terms and conditions defined in # file 'LICENSE.txt', which is part of this source code package. try: from setuptools import setup except ImportError: from distutils.core import setup import re def version_get(): with open('ftpsmartsync/__init__.py') as fd: for line in fd: m = re.match('^PROGRAM_VERSION = "(?P<version>[0-9.]+)"', line) if m: return m.group('version') setup(name="ftpsmartsync", version=version_get(), description="Sync local path with FTP remote efficiently " "by transmitting only what is necessary", author="Leandro Lisboa Penz", author_email="lpenz@lpenz.org", url="http://github.com/lpenz/ftpsmartsync", data_files=[('share/man/man1', ['ftpsmartsync.1'])], packages=['ftpsmartsync'], scripts=["bin/ftpsmartsync"], long_description="""\ ftpsmartsync is a program that synchronizes all files beneath the current directory with an FTP host efficiently by keeping a remote file with the hashes of the files sent. """, classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'License :: OSI Approved :: ' 'GNU General Public License v2 or later (GPLv2+)', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', ], license="GPL2")
lpenz/ftpsync
setup.py
Python
gpl-2.0
1,592
import pygame, sys, math, time from Score import * class Timer(Score): def __init__(self, pos): Score.__init__(self, pos) self.startTime = time.clock() self.image = self.font.render("Time: " + str(self.value), True, (0,0,255)) self.rect = self.image.get_rect(center = self.rect.center) def update(self): newValue = int(time.clock() - self.startTime) if newValue != self.value: self.value = newValue self.image = self.font.render("Time: " + str(self.value), True, (0,0,255)) self.rect = self.image.get_rect(center = self.rect.center)
KRHS-GameProgramming-2016/AstroDigger
Timer.py
Python
mit
629
#!/usr/bin/env python ########################################################################## # run/ec2-setup/spot.py # # Part of Project Thrill - http://project-thrill.org # # Copyright (C) 2015 Matthias Stumpp <mstumpp@gmail.com> # # All rights reserved. Published under the BSD-2 license in the LICENSE file. ########################################################################## import boto3 import time import json import datetime import sys with open('config.json') as data_file: data = json.load(data_file) client = boto3.client('ec2') ec2 = boto3.resource('ec2') job_id = int(time.time()) blockMappings = [{'DeviceName': '/dev/sda1', 'Ebs': { 'VolumeSize': 8, 'DeleteOnTermination': True, 'VolumeType': 'gp2' } }] if data["VOL_SNAPSHOT_ID"]: blockMappings.append( { 'DeviceName': data["DEVICE"], 'Ebs': { 'SnapshotId': data["VOL_SNAPSHOT_ID"], 'DeleteOnTermination': True, 'VolumeType': 'gp2' } }) response = client.request_spot_instances(SpotPrice=data["SPOT_PRICE"], InstanceCount=data["COUNT"], Type=data["TYPE"], #ValidFrom=datetime.datetime(2015, 10, 11, 18, 10, 00), ValidUntil=datetime.datetime(2015, 10, 11, 19, 37, 00), LaunchSpecification={ 'ImageId' : data["AMI_ID"], 'KeyName' : data["EC2_KEY_HANDLE"], 'InstanceType' : data["INSTANCE_TYPE"], 'SecurityGroups' : [ data["SECGROUP_HANDLE"] ], 'Placement' : { 'AvailabilityZone': data["ZONE"] 'BlockDeviceMappings' : blockMappings} }) request_ids = [] for request in response['SpotInstanceRequests']: request_ids.append(request['SpotInstanceRequestId']) fulfilled_instances = [] loop = True; print "waiting for instances to get fulfilled..." while loop: requests = client.describe_spot_instance_requests(SpotInstanceRequestIds=request_ids) for request in requests['SpotInstanceRequests']: if request['State'] in ['closed', 'cancelled', 'failed']: print request['SpotInstanceRequestId'] + " " + request['State'] loop = False break; # TODO(ms) ensure running instances are terminated if 'InstanceId' in request and request['InstanceId'] not in running_instances: fulfilled_instances.append(request['InstanceId']) print request['InstanceId'] + " running..." if len(fulfilled_instances) == int(data["COUNT"]): print 'all requested instances are fulfilled' break; time.sleep(5) if loop == False: print "unable to fulfill all requested instances... aborting..." sys.exit(); # add tag to each instance for instance in fulfilled_instances: instance.create_tags(Tags=[{'Key': 'JobId', 'Value': str(job_id)}]) # ensure all instances are running loop = True; while loop: loop = False response = client.describe_instance_status(InstanceIds=running_instances, IncludeAllInstances=True) for status in response['InstanceStatuses']: if status['InstanceState']['Name'] != 'running': loop = True print "all instances are running..." print str(data["COUNT"]) + " instances up and running! JobId: " + str(job_id) ##########################################################################
manpen/thrill
run/ec2-setup/spot.py
Python
bsd-2-clause
3,829
from mock import patch, Mock from django import test from django.core import exceptions from django_google_maps import fields class GeoPtFieldTests(test.TestCase): def test_sets_lat_lon_on_initialization(self): geo_pt = fields.GeoPt("15.001,32.001") self.assertEqual(15.001, geo_pt.lat) self.assertEqual(32.001, geo_pt.lon) def test_uses_lat_comma_lon_as_unicode_representation(self): lat_lon_string = "15.001,32.001" geo_pt = fields.GeoPt(lat_lon_string) self.assertEqual(lat_lon_string, unicode(geo_pt)) def test_two_GeoPts_with_same_lat_lon_should_be_equal(self): geo_pt_1 = fields.GeoPt("15.001,32.001") geo_pt_2 = fields.GeoPt("15.001,32.001") self.assertEqual(geo_pt_1, geo_pt_2) def test_two_GeoPts_with_different_lat_should_not_be_equal(self): geo_pt_1 = fields.GeoPt("15.001,32.001") geo_pt_2 = fields.GeoPt("20.001,32.001") self.assertNotEqual(geo_pt_1, geo_pt_2) def test_two_GeoPts_with_different_lon_should_not_be_equal(self): geo_pt_1 = fields.GeoPt("15.001,32.001") geo_pt_2 = fields.GeoPt("15.001,62.001") self.assertNotEqual(geo_pt_1, geo_pt_2) def test_is_not_equal_when_comparison_is_not_GeoPt_object(self): geo_pt_1 = fields.GeoPt("15.001,32.001") geo_pt_2 = "15.001,32.001" self.assertNotEqual(geo_pt_1, geo_pt_2) def test_allows_GeoPt_instantiated_with_empty_string(self): geo_pt = fields.GeoPt('') self.assertEqual(None, geo_pt.lat) self.assertEqual(None, geo_pt.lon) def test_uses_empty_string_as_unicode_representation_for_empty_GeoPt(self): geo_pt = fields.GeoPt('') self.assertEqual('', unicode(geo_pt)) @patch("django_google_maps.fields.GeoPt.__init__", Mock(return_value=None)) def test_splits_geo_point_on_comma(self): lat, lon = fields.GeoPt(Mock())._split_geo_point("15.001,32.001") self.assertEqual('15.001', lat) self.assertEqual('32.001', lon) @patch("django_google_maps.fields.GeoPt.__init__", Mock(return_value=None)) def test_raises_error_when_attribute_error_on_split(self): geo_point = Mock() geo_point.split.side_effect = AttributeError geo_pt = fields.GeoPt(Mock()) self.assertRaises(exceptions.ValidationError, geo_pt._split_geo_point, geo_point) @patch("django_google_maps.fields.GeoPt.__init__", Mock(return_value=None)) def test_raises_error_when_type_error_on_split(self): geo_point = Mock() geo_point.split.side_effect = ValueError geo_pt = fields.GeoPt(Mock()) self.assertRaises(exceptions.ValidationError, geo_pt._split_geo_point, geo_point) @patch("django_google_maps.fields.GeoPt.__init__", Mock(return_value=None)) def test_returns_float_value_when_valid_value(self): geo_pt = fields.GeoPt(Mock()) val = geo_pt._validate_geo_range('45.005', 90) self.assertEqual(45.005, val) self.assertIsInstance(val, float) @patch("django_google_maps.fields.GeoPt.__init__", Mock(return_value=None)) def test_raises_exception_when_type_error(self): geo_pt = fields.GeoPt(Mock()) self.assertRaises(exceptions.ValidationError, geo_pt._validate_geo_range, object, 90) @patch("django_google_maps.fields.GeoPt.__init__", Mock(return_value=None)) def test_raises_exception_when_value_error(self): geo_pt = fields.GeoPt(Mock()) self.assertRaises(exceptions.ValidationError, geo_pt._validate_geo_range, 'a', 90) @patch("django_google_maps.fields.GeoPt.__init__", Mock(return_value=None)) def test_raises_exception_when_value_is_out_of_upper_range(self): geo_pt = fields.GeoPt(Mock()) self.assertRaises(exceptions.ValidationError, geo_pt._validate_geo_range, '90.01', 90) @patch("django_google_maps.fields.GeoPt.__init__", Mock(return_value=None)) def test_raises_exception_when_value_is_out_of_lower_range(self): geo_pt = fields.GeoPt(Mock()) self.assertRaises(exceptions.ValidationError, geo_pt._validate_geo_range, '-90.01', 90)
desarrollosimagos/svidb
administrativo/django_google_maps/tests.py
Python
gpl-3.0
4,146
# -*- coding: utf-8 -*- # Generated by Django 1.11.21 on 2019-06-13 18:03 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('projects', '0042_increase_env_variable_value_max_length'), ] operations = [ migrations.AddField( model_name='importedfile', name='build', field=models.IntegerField(null=True, verbose_name='Build id'), ), ]
rtfd/readthedocs.org
readthedocs/projects/migrations/0043_add-build-field.py
Python
mit
500
import httplib2 import re from django.conf import settings from django.contrib.sites.models import Site from django.http import HttpRequest from django.utils.importlib import import_module from oembed.constants import DOMAIN_RE, OEMBED_ALLOWED_SIZES, SOCKET_TIMEOUT from oembed.exceptions import OEmbedHTTPException def size_to_nearest(width=None, height=None, allowed_sizes=OEMBED_ALLOWED_SIZES, force_fit=False): """ Generate some dimensions for resizing an object. This function DOES NOT handle scaling, it simply calculates maximums. These values should then be passed to the resize() method which will scale it and return the scaled width & height. """ minwidth, minheight = min(allowed_sizes) maxwidth, maxheight = max(allowed_sizes) if not width and not height: return maxwidth, maxheight if width: width = int(width) > minwidth and int(width) or minwidth elif force_fit: width = maxwidth if height: height = int(height) > minheight and int(height) or minheight elif force_fit: height = maxheight for size in sorted(allowed_sizes): if width and not height: if width >= size[0]: maxwidth = size[0] if force_fit: maxheight = size[1] else: break elif height and not width: if height >= size[1]: maxheight = size[1] if force_fit: maxwidth = size[0] else: break else: if force_fit: if (width >= size[0]) and (height >= size[1]): maxwidth, maxheight = size else: break else: if width >= size[0]: maxwidth = size[0] if height >= size[1]: maxheight = size[1] return maxwidth, maxheight def scale(width, height, new_width, new_height=None): # determine if resizing needs to be done (will not scale up) if width < new_width: if not new_height or height < new_height: return (width, height) # calculate ratios width_percent = (new_width / float(width)) if new_height: height_percent = (new_height / float(height)) if not new_height or width_percent < height_percent: new_height = int((float(height) * float(width_percent))) else: new_width = int((float(width) * float(height_percent))) return (new_width, new_height) def fetch_url(url, method='GET', user_agent='django-oembed', timeout=SOCKET_TIMEOUT): """ Fetch response headers and data from a URL, raising a generic exception for any kind of failure. """ sock = httplib2.Http(timeout=timeout) request_headers = { 'User-Agent': user_agent, 'Accept-Encoding': 'gzip'} try: headers, raw = sock.request(url, headers=request_headers, method=method) except: raise OEmbedHTTPException('Error fetching %s' % url) return headers, raw def get_domain(url): match = re.search(DOMAIN_RE, url) if match: return match.group() return '' def relative_to_full(url, example_url): """ Given a url which may or may not be a relative url, convert it to a full url path given another full url as an example """ if re.match('https?:\/\/', url): return url domain = get_domain(example_url) if domain: return '%s%s' % (domain, url) return url def mock_request(): """ Generate a fake request object to allow oEmbeds to use context processors. """ current_site = Site.objects.get_current() request = HttpRequest() request.META['SERVER_NAME'] = current_site.domain return request def load_class(path): """ dynamically load a class given a string of the format package.Class """ package, klass = path.rsplit('.', 1) module = import_module(package) return getattr(module, klass) def cleaned_sites(): """ Create a list of tuples mapping domains from the sites table to their site name. The domains will be cleaned into regexes that may be more permissive than the site domain is in the db. [(domain_regex, domain_name, domain_string), ...] """ mappings = {} for site in Site.objects.all(): # match the site domain, breaking it into several pieces match = re.match(r'(https?://)?(www[^\.]*\.)?([^/]+)', site.domain) if match is not None: http, www, domain = match.groups() # if the protocol is specified, use it, otherwise accept 80/443 http_re = http or r'https?:\/\/' # whether or not there's a www (or www2 :x) allow it in the match www_re = r'(?:www[^\.]*\.)?' # build a regex of the permissive http re, the www re, and the domain domain_re = http_re + www_re + domain # now build a pretty string representation of the domain http = http or r'http://' www = www or '' normalized = http + www + domain mappings[site.pk] = (domain_re, site.name, normalized) return mappings
0101/djangoembed
oembed/utils.py
Python
mit
5,373
from django import template register = template.Library() @register.tag def capture(parser, token): """{% capture as [foo] %}""" bits = token.split_contents() if len(bits) != 3: raise template.TemplateSyntaxError("'capture' node requires `as (variable name)`.") nodelist = parser.parse(('endcapture',)) parser.delete_first_token() return CaptureNode(nodelist, bits[2]) class CaptureNode(template.Node): def __init__(self, nodelist, varname): self.nodelist = nodelist self.varname = varname def render(self, context): output = self.nodelist.render(context) context[self.varname] = output return ''
ericholscher/devmason-server
devmason_server/templatetags/capture.py
Python
mit
682
#!/usr/bin/env python # -*- coding: utf-8 -*- """Make use of synaptic as backend.""" # Copyright (C) 2008-2010 Sebastian Heinlein <devel@glatzor.de> # Copyright (C) 2005-2007 Canonical # # Licensed under the GNU General Public License Version 2 # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. __author__ = "Sebastian Heinlein <devel@glatzor.de>, " \ "Michael Vogt <mvo@canonical.com" import tempfile from gettext import gettext as _ from gi.repository import GObject from defer import Deferred import sessioninstaller.errors class SynapticBackend(object): """Make use of Synaptic to install and remove packages.""" def _run_synaptic(self, xid, opt, tempf, interaction): deferred = Deferred() if tempf: opt.extend(["--set-selections-file", "%s" % tempf.name]) #FIXME: Take interaction into account opt.extend(["-o", "Synaptic::closeZvt=true"]) if xid: opt.extend(["--parent-window-id", "%s" % (xid)]) cmd = ["/usr/bin/gksu", "--desktop", "/usr/share/applications/update-manager.desktop", "--", "/usr/sbin/synaptic", "--hide-main-window", "--non-interactive"] cmd.extend(opt) flags = GObject.SPAWN_DO_NOT_REAP_CHILD (pid, stdin, stdout, stderr) = GObject.spawn_async(cmd, flags=flags) GObject.child_watch_add(pid, self._on_synaptic_exit, (tempf, deferred)) return deferred def _on_synaptic_exit(self, pid, condition, (tempf, deferred)): if tempf: tempf.close() if condition == 0: deferred.callback() else: deferred.errback(sessioninstaller.errors.ModifyFailed()) def remove_packages(self, xid, package_names, interaction): opt = [] # custom progress strings #opt.append("--progress-str") #opt.append("%s" % _("Please wait, this can take some time.")) #opt.append("--finish-str") #opt.append("%s" % _("Update is complete")) tempf = tempfile.NamedTemporaryFile() for pkg_name in package_names: tempf.write("%s\tuninstall\n" % pkg_name) tempf.flush() return self._run_synaptic(xid, opt, tempf, interaction) def install_packages(self, xid, package_names, interaction): opt = [] # custom progress strings #opt.append("--progress-str") #opt.append("%s" % _("Please wait, this can take some time.")) #opt.append("--finish-str") #opt.append("%s" % _("Update is complete")) tempf = tempfile.NamedTemporaryFile() for pkg_name in package_names: tempf.write("%s\tinstall\n" % pkg_name) tempf.flush() return self._run_synaptic(xid, opt, tempf, interaction) def install_package_files(self, xid, package_names, interaction): raise NotImplemented # vim:ts=4:sw=4:et
yasoob/PythonRSSReader
venv/lib/python2.7/dist-packages/sessioninstaller/backends/synaptic.py
Python
mit
3,567
''' This module contains a number of tests that check hmf's results against those of genmf and/or CAMB. Firstly we test transfer functions/power spectra against the output from CAMB to make sure we are producing them correctly (with pycamb within hmf). We also check the normalisation of the power spectrum done with hmf vs. genmf. Then we check results for sigma, lnsigma, and the differential and cumulative mass functions against genmf using two different methods. The first is to produce a power spectrum straight from CAMB to use in genmf (we have to modify the first and last values so that genmf can actually use it), while using a generated power spectrum from hmf (with same parameters) within hmf. The second is to use the same generated power from hmf in both genmf and hmf. These should be equivalent of course, but this is a check. We then also check the effects of redshift (z=2) on all above quantities. The data files in the data/ directory are the following: ST_0 etc :: output from genmf with given fit and redshift, produced with default cosmology here hmf_power :: the output (normalised) power spectrum from hmf, between exp(-21), exp(21) camb_power :: the output (un-normalised) power spec from camb with parameters as set here. [CAMB VERSION MAR13] params.ini :: the params.ini file input to CAMB for all camb results (just for legacy) genmf_power :: the power spectrum (normalised) produced by genmf To be more explicit, the power spectrum in all cases is produced with the following parameters: self._transfer_cosmo = {"w_lam" :-1, "omegab" : 0.05, "omegac" : 0.25, "omegav" : 0.7, "omegan" : 0.0, "H0" : 70, 'cs2_lam' : 1, 'TCMB' : 2.725, 'yhe' : 0.24, 'Num_Nu_massless' : 3.04, 'reion__redshift': 10.3, 'reion__optical_depth': 0.085 } self._extra_cosmo = {"sigma_8":0.8, "n":1, "delta_c":1.686, "crit_dens":27.755 * 10 ** 10 } self._transfer_options = {'Num_Nu_massive' : 0, 'reion__fraction' :-1, 'reion__delta_redshift' : 1.5, 'Scalar_initial_condition' : 1, 'scalar_amp' : 1, 'scalar_running' : 0, 'tensor_index' : 0, 'tensor_ratio' : 1, 'lAccuracyBoost' : 1, 'lSampleBoost' : 1, 'AccuracyBoost' : 1, 'WantScalars' : True, 'WantTensors' : False, 'reion__reionization' : True, 'reion__use_optical_depth' : True, 'w_perturb' : False, 'DoLensing' : False, 'transfer__k_per_logint': 50, 'transfer__kmax':10} ''' #=============================================================================== # Some Imports #=============================================================================== import numpy as np from hmf import MassFunction # from scipy.interpolate import InterpolatedUnivariateSpline as spline import inspect import os LOCATION = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) #======================================================================= # Some general functions used in tests #======================================================================= def rms_diff(vec1, vec2, tol): mask = np.logical_and(np.logical_not(np.isnan(vec1)), np.logical_not(np.isnan(vec2))) vec1 = vec1[mask] vec2 = vec2[mask] err = np.sqrt(np.mean(((vec1 - vec2) / vec2) ** 2)) print "RMS Error: ", err, "(> ", tol, ")" return err < tol def max_diff_rel(vec1, vec2, tol): mask = np.logical_and(np.logical_not(np.isnan(vec1)), np.logical_not(np.isnan(vec2))) vec1 = vec1[mask] vec2 = vec2[mask] err = np.max(np.abs((vec1 - vec2) / vec2)) print "Max Diff: ", err, "(> ", tol, ")" return err < tol def max_diff(vec1, vec2, tol): mask = np.logical_and(np.logical_not(np.isnan(vec1)), np.logical_not(np.isnan(vec2))) vec1 = vec1[mask] vec2 = vec2[mask] err = np.max(np.abs((vec1 - vec2))) print "Max Diff: ", err, "(> ", tol, ")" return err < tol #=============================================================================== # The Test Classes #=============================================================================== class TestGenMF(object): def check_col(self, pert, fit, redshift, origin, col): """ Able to check all columns only dependent on base cosmology (not fit) """ data = np.genfromtxt(LOCATION + "/data/" + fit + '_' + str(int(redshift)) + '_' + origin)[::-1][400:1201] # We have to do funky stuff to the data if its been cut by genmf if col is "sigma": assert max_diff_rel(pert.sigma, data[:, 5], 0.01) elif col is "lnsigma": # We just do diff on this one because it passes through 0 assert max_diff(pert.lnsigma, data[:, 3], 0.01) elif col is "n_eff": assert max_diff_rel(pert.n_eff, data[:, 6], 0.01) elif col is "dndlog10m": assert rms_diff(pert.dndlog10m, 10 ** data[:, 1], 0.02) elif col is "fsigma": assert rms_diff(pert.fsigma, data[:, 4], 0.01) elif col is "ngtm": assert rms_diff(pert.ngtm, 10 ** data[:, 2], 0.05) def test_sigmas(self): hmf = MassFunction(M=np.linspace(7, 15, 801), omegab=0.05, omegac=0.25, omegav=0.7, sigma_8=0.8, n=1, H0=70.0, lnk=np.linspace(-21, 21, 500), transfer__kmax=10, transfer__k_per_logint=50, mf_fit='ST', z=0.0) for redshift in [0.0, 2.0]: hmf.update(z=redshift) for origin in ['camb', 'hmf']: for col in ['sigma', 'lnsigma', 'n_eff']: yield self.check_col, hmf, "ST", redshift, origin, col def test_fits(self): hmf = MassFunction(M=np.linspace(7, 15, 801), omegab=0.05, omegac=0.25, omegav=0.7, sigma_8=0.8, n=1, H0=70.0, lnk=np.linspace(-21, 21, 500), transfer__kmax=10, transfer__k_per_logint=50, mf_fit='ST', z=0.0) for redshift in [0.0, 2.0]: hmf.update(z=redshift) for fit in ["ST", "PS", "Reed03", "Warren", "Jenkins", "Reed07"]: hmf.update(mf_fit=fit) for origin in ['camb', 'hmf']: for col in ['dndlog10m', 'ngtm', 'fsigma']: yield self.check_col, hmf, fit, redshift, origin, col
tbs1980/hmf
tests/test_genmf.py
Python
mit
7,376
# Copyright (C) 2012 Aniket Panse <contact@aniketpanse.in # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # Aniket Panse <contact@aniketpanse.in> grants Johnny Vestergaard <jkv@unixcluster.dk> # a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable # copyright license to reproduce, prepare derivative works of, publicly # display, publicly perform, sublicense, relicense, and distribute [the] Contributions # and such derivative works. import smtplib import base64 import hmac import os import tempfile import shutil import gevent.monkey from gevent.server import StreamServer from beeswarm.drones.honeypot.honeypot import Honeypot from beeswarm.drones.honeypot.capabilities import smtp gevent.monkey.patch_all() import unittest class SmtpTests(unittest.TestCase): def setUp(self): self.work_dir = tempfile.mkdtemp() Honeypot.prepare_environment(self.work_dir) def tearDown(self): if os.path.isdir(self.work_dir): shutil.rmtree(self.work_dir) def test_connection(self): """ Tries to connect and run a EHLO command. Very basic test. """ # Use uncommon port so that we can run test even if the Honeypot is running. options = {'enabled': 'True', 'port': 0, 'protocol_specific_data': {'banner': 'test'}, 'users': {'test': 'test'}, } cap = smtp.smtp(options, self.work_dir) srv = StreamServer(('0.0.0.0', 0), cap.handle_session) srv.start() smtp_ = smtplib.SMTP('127.0.0.1', srv.server_port, local_hostname='localhost', timeout=15) smtp_.ehlo() smtp_.quit() srv.stop() def test_AUTH_CRAM_MD5_reject(self): """ Makes sure the server rejects all invalid login attempts that use the CRAM-MD5 Authentication method. """ options = {'enabled': 'True', 'port': 0, 'protocol_specific_data': {'banner': 'Test'}, 'users': {'someguy': 'test'}} cap = smtp.smtp(options, self.work_dir) srv = StreamServer(('0.0.0.0', 0), cap.handle_session) srv.start() def encode_cram_md5(challenge, user, password): challenge = base64.decodestring(challenge) response = user + ' ' + hmac.HMAC(password, challenge).hexdigest() return base64.b64encode(response) smtp_ = smtplib.SMTP('127.0.0.1', srv.server_port, local_hostname='localhost', timeout=15) _, resp = smtp_.docmd('AUTH', 'CRAM-MD5') code, resp = smtp_.docmd(encode_cram_md5(resp, 'test', 'test')) # For now, the server's going to return a 535 code. self.assertEqual(code, 535) srv.stop() def test_AUTH_PLAIN_reject(self): """ Makes sure the server rejects all invalid login attempts that use the PLAIN Authentication method. """ options = {'enabled': 'True', 'port': 0, 'protocol_specific_data': {'banner': 'Test'}, 'users': {'someguy': 'test'}} cap = smtp.smtp(options, self.work_dir) srv = StreamServer(('0.0.0.0', 0), cap.handle_session) srv.start() smtp_ = smtplib.SMTP('127.0.0.1', srv.server_port, local_hostname='localhost', timeout=15) arg = '\0%s\0%s' % ('test', 'test') code, resp = smtp_.docmd('AUTH', 'PLAIN ' + base64.b64encode(arg)) self.assertEqual(code, 535) srv.stop() def test_AUTH_LOGIN_reject(self): """ Makes sure the server rejects all invalid login attempts that use the LOGIN Authentication method. """ options = {'enabled': 'True', 'port': 0, 'protocol_specific_data': {'banner': 'Test'}, 'users': {'someguy': 'test'}} cap = smtp.smtp(options, self.work_dir) srv = StreamServer(('0.0.0.0', 0), cap.handle_session) srv.start() smtp_ = smtplib.SMTP('127.0.0.1', srv.server_port, local_hostname='localhost', timeout=15) smtp_.docmd('AUTH', 'LOGIN') smtp_.docmd(base64.b64encode('test')) code, resp = smtp_.docmd(base64.b64encode('test')) self.assertEqual(code, 535) srv.stop() def test_AUTH_CRAM_MD5(self): """ Makes sure the server accepts valid login attempts that use the CRAM-MD5 Authentication method. """ options = {'enabled': 'True', 'port': 0, 'protocol_specific_data': {'banner': 'Test'}, 'users': {'test': 'test'}} cap = smtp.smtp(options, self.work_dir) srv = StreamServer(('0.0.0.0', 0), cap.handle_session) srv.start() def encode_cram_md5(challenge, user, password): challenge = base64.decodestring(challenge) response = user + ' ' + hmac.HMAC(password, challenge).hexdigest() return base64.b64encode(response) smtp_ = smtplib.SMTP('127.0.0.1', srv.server_port, local_hostname='localhost', timeout=15) _, resp = smtp_.docmd('AUTH', 'CRAM-MD5') code, resp = smtp_.docmd(encode_cram_md5(resp, 'test', 'test')) # For now, the server's going to return a 535 code. self.assertEqual(code, 235) srv.stop() def test_AUTH_PLAIN(self): """ Makes sure the server accepts valid login attempts that use the PLAIN Authentication method. """ options = {'enabled': 'True', 'port': 0, 'protocol_specific_data': {'banner': 'Test'}, 'users': {'test': 'test'}} cap = smtp.smtp(options, self.work_dir) srv = StreamServer(('0.0.0.0', 0), cap.handle_session) srv.start() smtp_ = smtplib.SMTP('127.0.0.1', srv.server_port, local_hostname='localhost', timeout=15) arg = '\0%s\0%s' % ('test', 'test') code, resp = smtp_.docmd('AUTH', 'PLAIN ' + base64.b64encode(arg)) self.assertEqual(code, 235) srv.stop() def test_AUTH_LOGIN(self): """ Makes sure the server accepts valid login attempts that use the LOGIN Authentication method. """ options = {'enabled': 'True', 'port': 0, 'protocol_specific_data': {'banner': 'Test'}, 'users': {'test': 'test'}} cap = smtp.smtp(options, self.work_dir) srv = StreamServer(('0.0.0.0', 0), cap.handle_session) srv.start() smtp_client = smtplib.SMTP('127.0.0.1', srv.server_port, local_hostname='localhost', timeout=15) smtp_client.docmd('AUTH', 'LOGIN') smtp_client.docmd(base64.b64encode('test')) code, resp = smtp_client.docmd(base64.b64encode('test')) self.assertEqual(code, 235) srv.stop() if __name__ == '__main__': unittest.main()
honeynet/beeswarm
beeswarm/drones/honeypot/tests/test_smtp.py
Python
gpl-3.0
7,214
""" Unit tests for the module. Thomas Ogden <t@ogden.eu> """ import os import unittest import numpy as np from maxwellbloch import mb_solve, t_funcs, spectral, utility # Absolute path of tests/json directory, so that tests can be called from # different directories. JSON_DIR = os.path.abspath(os.path.join(__file__, '../', 'json')) class TestInit(unittest.TestCase): def test_init_default(self): """ Test Default Initialise """ mb_solve_00 = mb_solve.MBSolve() self.assertEqual(mb_solve_00.atom.num_states, 1) # TODO: And the rest! def test_init_00(self): json_path = os.path.join(JSON_DIR, "mb_solve_01.json") mb_solve_01 = mb_solve.MBSolve().from_json(json_path) @unittest.skip("TODO") class TestSolveOverThermalDetunings(unittest.TestCase): def test_00(self): json_path = os.path.join(JSON_DIR, "mb_solve_01.json") mb_solve_00 = mb_solve.MBSolve().from_json(json_path) result_Delta = mb_solve_00.solve_over_thermal_detunings() self.assertEqual(len(result_Delta), len(mb_solve_00.thermal_delta_list)) class TestMBSolve(unittest.TestCase): def test_mb_solve(self): """ Basic test of mb_solve method. """ json_path = os.path.join(JSON_DIR, "mb_solve_01.json") mb_solve_00 = mb_solve.MBSolve().from_json(json_path) mb_solve_00.mbsolve() def test_no_atoms(self): """ Setting the number density ampl to 0.0, i.e. no atoms. The end pulse should be the same as the start. """ json_path = os.path.join(JSON_DIR, "mb_solve_no_atoms.json") mbs = mb_solve.MBSolve().from_json(json_path) mbs.mbsolve(step='euler') self.assertEqual(mbs.Omegas_zt.shape, (1, 5, 101)) # Check that the field at the end of the medium matches the field # at the start of the medium. self.assertTrue(np.allclose(mbs.Omegas_zt[:, 0, :], mbs.Omegas_zt[:, -1, :], rtol=1.0e-6)) def test_no_atoms_ab(self): """ Setting the number density to 0.0, i.e. no atoms, with AB step. """ json_path = os.path.join(JSON_DIR, "mb_solve_no_atoms.json") mbs = mb_solve.MBSolve().from_json(json_path) mbs.mbsolve(step='ab') # Check that the field at the end of the medium matches the field # at the start of the medium. self.assertTrue(np.allclose(mbs.Omegas_zt[:, 0, :], mbs.Omegas_zt[:, -1, :], rtol=1.0e-6)) def test_no_decays(self): """ Empty decay list. """ json_path = os.path.join(JSON_DIR, "mb_solve_no_decays.json") mb_solve_nd = mb_solve.MBSolve().from_json(json_path) mb_solve_nd.mbsolve() def test_no_rabi_freq_t_func(self): """ Empty decay list. TODO: No mbsolve, should be in init""" json_path = os.path.join(JSON_DIR, "mb_solve_no_rabi_freq_t_func.json") mbs = mb_solve.MBSolve().from_json(json_path) # self.assertEqual(mbs.ob_atom.fields[0].rabi_freq_t_func, # t_funcs.square_1) self.assertDictEqual(mbs.atom.fields[0].rabi_freq_t_args, {"ampl_0": 1.0, "on_0": 0.0, "off_0": 1.0}) def test_two_gaussian_2pi(self): """ Test of a gaussian input 2pi soliton propagating through a two-level system. """ json_path = os.path.join(JSON_DIR, "mbs_two_gaussian_2pi.json") mbs = mb_solve.MBSolve().from_json(json_path) mbs.mbsolve() # Input pulse is 2pi self.assertAlmostEqual(mbs.fields_area()[0][0]/(np.pi), 2.0, places=1) # Output pulse is 2pi self.assertAlmostEqual(mbs.fields_area()[0][-1]/(np.pi), 2.0, places=1) def test_two_gaussian_2pi_n_pi(self): """ Test of a gaussian input 2pi soliton propagating through a two-level system. """ json_path = os.path.join(JSON_DIR, "mbs_two_gaussian_2pi_n_pi.json") mbs = mb_solve.MBSolve().from_json(json_path) mbs.mbsolve() # Input pulse is 2pi self.assertAlmostEqual(mbs.fields_area()[0][0]/(np.pi), 2.0, places=1) # Output pulse is 2pi self.assertAlmostEqual(mbs.fields_area()[0][-1]/(np.pi), 2.0, places=1) def test_two_sech_2pi(self): """ Test of a 2pi soliton propagating through a two-level system. """ json_path = os.path.join(JSON_DIR, "mbs_two_sech_2pi.json") mbs = mb_solve.MBSolve().from_json(json_path) mbs.mbsolve() # Input pulse is 2pi self.assertAlmostEqual(mbs.fields_area()[0][0]/(np.pi), 2.0, places=1) # Output pulse is 2pi self.assertAlmostEqual(mbs.fields_area()[0][-1]/(np.pi), 2.0, places=1) def test_two_sech_2pi_n_pi(self): """ Test of a 2pi soliton propagating through a two-level system, passing n_pi. """ json_path = os.path.join(JSON_DIR, "mbs_two_sech_2pi_n_pi.json") mbs = mb_solve.MBSolve().from_json(json_path) mbs.mbsolve() # Input pulse is 2pi self.assertAlmostEqual(mbs.fields_area()[0][0]/(np.pi), 2.0, places=1) # Output pulse is 2pi self.assertAlmostEqual(mbs.fields_area()[0][-1]/(np.pi), 2.0, places=1) def test_no_vel_classes(self): """ Empty velocity class dict. """ json_path = os.path.join(JSON_DIR, "mb_solve_01.json") mbs = mb_solve.MBSolve().from_json(json_path) vc = {} mbs.build_velocity_classes(vc) mbs.mbsolve() def test_no_vel_classes_inner(self): """ No inner delta values in dict. TODO: No mbsolve, should be init""" json_path = os.path.join(JSON_DIR, "mb_solve_01.json") mbs = mb_solve.MBSolve().from_json(json_path) vc = { "thermal_delta_min": -1.0, "thermal_delta_max": 1.0, "thermal_delta_steps": 2, "thermal_width": 1.0 } mbs.build_velocity_classes(vc) mbs.mbsolve() def test_zero_thermal_width(self): """TODO: No mbsolve, should be in init""" json_path = os.path.join(JSON_DIR, "mb_solve_01.json") mbs = mb_solve.MBSolve().from_json(json_path) vc = { "thermal_delta_min": -1.0, "thermal_delta_max": 1.0, "thermal_delta_steps": 2, "thermal_delta_inner_min": 0.0, "thermal_delta_inner_max": 0.0, "thermal_delta_inner_steps": 0, "thermal_width": 0.0 } self.assertRaises(ValueError, mbs.build_velocity_classes, vc) def test_vel_classes(self): """Tests that for a linear two-level system with velocity classes, the absorption matches the known Voigt profile. """ json_path = os.path.join(JSON_DIR, "velocity-classes.json") mbs = mb_solve.MBSolve().from_json(json_path) mbs.mbsolve() freq_list = spectral.freq_list(mbs) abs = spectral.absorption(mbs, 0, -1) voigt = spectral.voigt_two_linear_known(freq_list, 1.0, 0.05).imag # Assert that the max of the abs residuals between the absorption # profile and the known broadened Voigt absorption profile for linear # two-level systems is below a tolerance self.assertTrue(np.max(np.abs(abs - voigt)) < 0.05) class TestSaveLoad(unittest.TestCase): """ Tests for the MBSolve save and load methods. """ def test_save_load_01(self): """ Solve a basic MBSolve problem. Save the results to file. Set the results in the MBSolve object to null. Load the results from file and check that they equal the original values. """ json_path = os.path.join(JSON_DIR, "mb_solve_01.json") mb_solve_01 = mb_solve.MBSolve().from_json(json_path) Omegas_zt, states_zt = mb_solve_01.mbsolve() mb_solve_01.save_results() mb_solve_01.Omegas_zt = None mb_solve_01.states_zt = None mb_solve_01.load_results() Omegas_zt_loaded = mb_solve_01.Omegas_zt states_zt_loaded = mb_solve_01.states_zt self.assertTrue((Omegas_zt == Omegas_zt_loaded).all()) self.assertTrue((states_zt == states_zt_loaded).all()) def test_save_load_no_recalc(self): json_path = os.path.join(JSON_DIR, "mb_solve_01.json") mb_solve_01 = mb_solve.MBSolve().from_json(json_path) Omegas_zt, states_zt = mb_solve_01.mbsolve() mb_solve_01.save_results() mb_solve_01.Omegas_zt = None mb_solve_01.states_zt = None Omegas_zt, states_zt = mb_solve_01.mbsolve(recalc=False) Omegas_zt_loaded = mb_solve_01.Omegas_zt states_zt_loaded = mb_solve_01.states_zt self.assertTrue((Omegas_zt == Omegas_zt_loaded).all()) self.assertTrue((states_zt == states_zt_loaded).all()) class TestBuildZlist(unittest.TestCase): def test_00(self): mb_solve_00 = mb_solve.MBSolve() zlist = np.array([0., .1, .2, .3, .4, .5, .6, .7, .8, .9, 1.]) self.assertTrue(np.allclose(mb_solve_00.zlist, zlist, rtol=1.0e-6)) class TestGetOmegasIntpTFuncs(unittest.TestCase): """ Unit tests of the get_Omegas_intp_t_funcs method """ def test_one_field(self): """ For the case of a single field """ json_path = os.path.join(JSON_DIR, "mb_solve_01.json") mb_solve_00 = mb_solve.MBSolve().from_json(json_path) self.assertEqual(mb_solve_00.get_Omegas_intp_t_funcs(), ['intp']) def test_two_fields(self): """ For the case of two fields """ json_path = os.path.join(JSON_DIR, "mb_solve_lamda.json") mb_solve_lamda = mb_solve.MBSolve().from_json(json_path) self.assertEqual(mb_solve_lamda.get_Omegas_intp_t_funcs(), ['intp', 'intp']) class TestGetOmegasIntpTArgs(unittest.TestCase): """ Unit tests of the get_Omegas_intp_t_args method """ def test_one_field(self): """ For the case of a single field """ json_path = os.path.join(JSON_DIR, "mb_solve_01.json") mb_solve_00 = mb_solve.MBSolve().from_json(json_path) Omegas_z = mb_solve_00.Omegas_zt[:, 0, :] t_args = mb_solve_00.get_Omegas_intp_t_args(Omegas_z) self.assertEqual(len(t_args), 1) self.assertTrue(np.all(t_args[0]['tlist'] == mb_solve_00.tlist)) self.assertTrue(np.all(t_args[0]['ylist'] == Omegas_z/(2.0*np.pi))) class TestPopulations(unittest.TestCase): def test_twolevel_shape(self): json_path = os.path.join(JSON_DIR, "mb_solve_01.json") mbs = mb_solve.MBSolve().from_json(json_path) pop_lower = mbs.populations([0]) pop_upper = mbs.populations([1]) np.testing.assert_allclose(pop_lower, np.zeros((mbs.z_steps+1, mbs.t_steps+1))) np.testing.assert_allclose(pop_upper, np.zeros((mbs.z_steps+1, mbs.t_steps+1))) class TestPopulationsField(unittest.TestCase): def test_twolevel_shape(self): json_path = os.path.join(JSON_DIR, "mb_solve_01.json") mbs = mb_solve.MBSolve().from_json(json_path) pop_upper = mbs.populations_field(field_idx=0, upper=True) pop_lower = mbs.populations_field(field_idx=0, upper=False) np.testing.assert_allclose(pop_lower, np.zeros((mbs.z_steps+1, mbs.t_steps+1))) np.testing.assert_allclose(pop_upper, np.zeros((mbs.z_steps+1, mbs.t_steps+1))) class TestCoherences(unittest.TestCase): def test_twolevel_shape(self): json_path = os.path.join(JSON_DIR, "mb_solve_01.json") mbs = mb_solve.MBSolve().from_json(json_path) coh = mbs.coherences([[0, 1]]) np.testing.assert_allclose(coh, np.zeros((mbs.z_steps+1, mbs.t_steps+1))) class TestCoherencesField(unittest.TestCase): def test_twolevel_shape(self): json_path = os.path.join(JSON_DIR, "mb_solve_01.json") mbs = mb_solve.MBSolve().from_json(json_path) coh = mbs.coherences_field(field_idx=0) np.testing.assert_allclose(coh, np.zeros((mbs.z_steps+1, mbs.t_steps+1)))
tommyogden/maxwellbloch
maxwellbloch/tests/test_mb_solve.py
Python
mit
12,255
<<<<<<< HEAD <<<<<<< HEAD # Test the most dynamic corner cases of Python's runtime semantics. import builtins import contextlib import unittest from test.support import run_unittest, swap_item, swap_attr class RebindBuiltinsTests(unittest.TestCase): """Test all the ways that we can change/shadow globals/builtins.""" def configure_func(self, func, *args): """Perform TestCase-specific configuration on a function before testing. By default, this does nothing. Example usage: spinning a function so that a JIT will optimize it. Subclasses should override this as needed. Args: func: function to configure. *args: any arguments that should be passed to func, if calling it. Returns: Nothing. Work will be performed on func in-place. """ pass def test_globals_shadow_builtins(self): # Modify globals() to shadow an entry in builtins. def foo(): return len([1, 2, 3]) self.configure_func(foo) self.assertEqual(foo(), 3) with swap_item(globals(), "len", lambda x: 7): self.assertEqual(foo(), 7) def test_modify_builtins(self): # Modify the builtins module directly. def foo(): return len([1, 2, 3]) self.configure_func(foo) self.assertEqual(foo(), 3) with swap_attr(builtins, "len", lambda x: 7): self.assertEqual(foo(), 7) def test_modify_builtins_while_generator_active(self): # Modify the builtins out from under a live generator. def foo(): x = range(3) yield len(x) yield len(x) self.configure_func(foo) g = foo() self.assertEqual(next(g), 3) with swap_attr(builtins, "len", lambda x: 7): self.assertEqual(next(g), 7) def test_modify_builtins_from_leaf_function(self): # Verify that modifications made by leaf functions percolate up the # callstack. with swap_attr(builtins, "len", len): def bar(): builtins.len = lambda x: 4 def foo(modifier): l = [] l.append(len(range(7))) modifier() l.append(len(range(7))) return l self.configure_func(foo, lambda: None) self.assertEqual(foo(bar), [7, 4]) def test_cannot_change_globals_or_builtins_with_eval(self): def foo(): return len([1, 2, 3]) self.configure_func(foo) # Note that this *doesn't* change the definition of len() seen by foo(). builtins_dict = {"len": lambda x: 7} globals_dict = {"foo": foo, "__builtins__": builtins_dict, "len": lambda x: 8} self.assertEqual(eval("foo()", globals_dict), 3) self.assertEqual(eval("foo()", {"foo": foo}), 3) def test_cannot_change_globals_or_builtins_with_exec(self): def foo(): return len([1, 2, 3]) self.configure_func(foo) globals_dict = {"foo": foo} exec("x = foo()", globals_dict) self.assertEqual(globals_dict["x"], 3) # Note that this *doesn't* change the definition of len() seen by foo(). builtins_dict = {"len": lambda x: 7} globals_dict = {"foo": foo, "__builtins__": builtins_dict, "len": lambda x: 8} exec("x = foo()", globals_dict) self.assertEqual(globals_dict["x"], 3) def test_cannot_replace_builtins_dict_while_active(self): def foo(): x = range(3) yield len(x) yield len(x) self.configure_func(foo) g = foo() self.assertEqual(next(g), 3) with swap_item(globals(), "__builtins__", {"len": lambda x: 7}): self.assertEqual(next(g), 3) def test_cannot_replace_builtins_dict_between_calls(self): def foo(): return len([1, 2, 3]) self.configure_func(foo) self.assertEqual(foo(), 3) with swap_item(globals(), "__builtins__", {"len": lambda x: 7}): self.assertEqual(foo(), 3) def test_eval_gives_lambda_custom_globals(self): globals_dict = {"len": lambda x: 7} foo = eval("lambda: len([])", globals_dict) self.configure_func(foo) self.assertEqual(foo(), 7) def test_main(): run_unittest(RebindBuiltinsTests) if __name__ == "__main__": test_main() ======= # Test the most dynamic corner cases of Python's runtime semantics. import builtins import contextlib import unittest from test.support import run_unittest, swap_item, swap_attr class RebindBuiltinsTests(unittest.TestCase): """Test all the ways that we can change/shadow globals/builtins.""" def configure_func(self, func, *args): """Perform TestCase-specific configuration on a function before testing. By default, this does nothing. Example usage: spinning a function so that a JIT will optimize it. Subclasses should override this as needed. Args: func: function to configure. *args: any arguments that should be passed to func, if calling it. Returns: Nothing. Work will be performed on func in-place. """ pass def test_globals_shadow_builtins(self): # Modify globals() to shadow an entry in builtins. def foo(): return len([1, 2, 3]) self.configure_func(foo) self.assertEqual(foo(), 3) with swap_item(globals(), "len", lambda x: 7): self.assertEqual(foo(), 7) def test_modify_builtins(self): # Modify the builtins module directly. def foo(): return len([1, 2, 3]) self.configure_func(foo) self.assertEqual(foo(), 3) with swap_attr(builtins, "len", lambda x: 7): self.assertEqual(foo(), 7) def test_modify_builtins_while_generator_active(self): # Modify the builtins out from under a live generator. def foo(): x = range(3) yield len(x) yield len(x) self.configure_func(foo) g = foo() self.assertEqual(next(g), 3) with swap_attr(builtins, "len", lambda x: 7): self.assertEqual(next(g), 7) def test_modify_builtins_from_leaf_function(self): # Verify that modifications made by leaf functions percolate up the # callstack. with swap_attr(builtins, "len", len): def bar(): builtins.len = lambda x: 4 def foo(modifier): l = [] l.append(len(range(7))) modifier() l.append(len(range(7))) return l self.configure_func(foo, lambda: None) self.assertEqual(foo(bar), [7, 4]) def test_cannot_change_globals_or_builtins_with_eval(self): def foo(): return len([1, 2, 3]) self.configure_func(foo) # Note that this *doesn't* change the definition of len() seen by foo(). builtins_dict = {"len": lambda x: 7} globals_dict = {"foo": foo, "__builtins__": builtins_dict, "len": lambda x: 8} self.assertEqual(eval("foo()", globals_dict), 3) self.assertEqual(eval("foo()", {"foo": foo}), 3) def test_cannot_change_globals_or_builtins_with_exec(self): def foo(): return len([1, 2, 3]) self.configure_func(foo) globals_dict = {"foo": foo} exec("x = foo()", globals_dict) self.assertEqual(globals_dict["x"], 3) # Note that this *doesn't* change the definition of len() seen by foo(). builtins_dict = {"len": lambda x: 7} globals_dict = {"foo": foo, "__builtins__": builtins_dict, "len": lambda x: 8} exec("x = foo()", globals_dict) self.assertEqual(globals_dict["x"], 3) def test_cannot_replace_builtins_dict_while_active(self): def foo(): x = range(3) yield len(x) yield len(x) self.configure_func(foo) g = foo() self.assertEqual(next(g), 3) with swap_item(globals(), "__builtins__", {"len": lambda x: 7}): self.assertEqual(next(g), 3) def test_cannot_replace_builtins_dict_between_calls(self): def foo(): return len([1, 2, 3]) self.configure_func(foo) self.assertEqual(foo(), 3) with swap_item(globals(), "__builtins__", {"len": lambda x: 7}): self.assertEqual(foo(), 3) def test_eval_gives_lambda_custom_globals(self): globals_dict = {"len": lambda x: 7} foo = eval("lambda: len([])", globals_dict) self.configure_func(foo) self.assertEqual(foo(), 7) def test_main(): run_unittest(RebindBuiltinsTests) if __name__ == "__main__": test_main() >>>>>>> b875702c9c06ab5012e52ff4337439b03918f453 ======= # Test the most dynamic corner cases of Python's runtime semantics. import builtins import contextlib import unittest from test.support import run_unittest, swap_item, swap_attr class RebindBuiltinsTests(unittest.TestCase): """Test all the ways that we can change/shadow globals/builtins.""" def configure_func(self, func, *args): """Perform TestCase-specific configuration on a function before testing. By default, this does nothing. Example usage: spinning a function so that a JIT will optimize it. Subclasses should override this as needed. Args: func: function to configure. *args: any arguments that should be passed to func, if calling it. Returns: Nothing. Work will be performed on func in-place. """ pass def test_globals_shadow_builtins(self): # Modify globals() to shadow an entry in builtins. def foo(): return len([1, 2, 3]) self.configure_func(foo) self.assertEqual(foo(), 3) with swap_item(globals(), "len", lambda x: 7): self.assertEqual(foo(), 7) def test_modify_builtins(self): # Modify the builtins module directly. def foo(): return len([1, 2, 3]) self.configure_func(foo) self.assertEqual(foo(), 3) with swap_attr(builtins, "len", lambda x: 7): self.assertEqual(foo(), 7) def test_modify_builtins_while_generator_active(self): # Modify the builtins out from under a live generator. def foo(): x = range(3) yield len(x) yield len(x) self.configure_func(foo) g = foo() self.assertEqual(next(g), 3) with swap_attr(builtins, "len", lambda x: 7): self.assertEqual(next(g), 7) def test_modify_builtins_from_leaf_function(self): # Verify that modifications made by leaf functions percolate up the # callstack. with swap_attr(builtins, "len", len): def bar(): builtins.len = lambda x: 4 def foo(modifier): l = [] l.append(len(range(7))) modifier() l.append(len(range(7))) return l self.configure_func(foo, lambda: None) self.assertEqual(foo(bar), [7, 4]) def test_cannot_change_globals_or_builtins_with_eval(self): def foo(): return len([1, 2, 3]) self.configure_func(foo) # Note that this *doesn't* change the definition of len() seen by foo(). builtins_dict = {"len": lambda x: 7} globals_dict = {"foo": foo, "__builtins__": builtins_dict, "len": lambda x: 8} self.assertEqual(eval("foo()", globals_dict), 3) self.assertEqual(eval("foo()", {"foo": foo}), 3) def test_cannot_change_globals_or_builtins_with_exec(self): def foo(): return len([1, 2, 3]) self.configure_func(foo) globals_dict = {"foo": foo} exec("x = foo()", globals_dict) self.assertEqual(globals_dict["x"], 3) # Note that this *doesn't* change the definition of len() seen by foo(). builtins_dict = {"len": lambda x: 7} globals_dict = {"foo": foo, "__builtins__": builtins_dict, "len": lambda x: 8} exec("x = foo()", globals_dict) self.assertEqual(globals_dict["x"], 3) def test_cannot_replace_builtins_dict_while_active(self): def foo(): x = range(3) yield len(x) yield len(x) self.configure_func(foo) g = foo() self.assertEqual(next(g), 3) with swap_item(globals(), "__builtins__", {"len": lambda x: 7}): self.assertEqual(next(g), 3) def test_cannot_replace_builtins_dict_between_calls(self): def foo(): return len([1, 2, 3]) self.configure_func(foo) self.assertEqual(foo(), 3) with swap_item(globals(), "__builtins__", {"len": lambda x: 7}): self.assertEqual(foo(), 3) def test_eval_gives_lambda_custom_globals(self): globals_dict = {"len": lambda x: 7} foo = eval("lambda: len([])", globals_dict) self.configure_func(foo) self.assertEqual(foo(), 7) def test_main(): run_unittest(RebindBuiltinsTests) if __name__ == "__main__": test_main() >>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
ArcherSys/ArcherSys
Lib/test/test_dynamic.py
Python
mit
13,577
#!/usr/bin/python -S # # Copyright 2009 Canonical Ltd. This software is licensed under the # GNU Affero General Public License version 3 (see the file LICENSE). """Perform auto-approvals and auto-blocks on translation import queue""" import _pythonpath from lp.translations.scripts.import_queue_gardener import ImportQueueGardener if __name__ == '__main__': script = ImportQueueGardener( 'translations-import-queue-gardener', dbuser='translations_import_queue_gardener') script.lock_and_run()
abramhindle/UnnaturalCodeFork
python/testdata/launchpad/cronscripts/rosetta-approve-imports.py
Python
agpl-3.0
523
#! /usr/bin/env python3 import json import pathlib import platform import shutil import zipfile import more_itertools # PyPI: more-itertools import requests # PyPI: requests import minecraft_data # https://github.com/fenhl/python-minecraft-data def _download(url, local_filename=None): #FROM http://stackoverflow.com/a/16696317/667338 if local_filename is None: local_filename = url.split('#')[0].split('?')[0].split('/')[-1] if local_filename == '': raise ValueError('no local filename specified') r = requests.get(url, stream=True) with open(local_filename, 'wb') as f: for chunk in r.iter_content(chunk_size=1024): if chunk: # filter out keep-alive new chunks f.write(chunk) f.flush() def build_items(): data = minecraft_data(minecraft_version()) all_items = {} for item in data.items_list: all_items[item['name']] = { 'name': item['displayName'], #TODO solid #TODO image #TODO damagedImages #TODO blockID 'itemID': item['id'], #TODO damageValues #TODO effects #TODO tagPath #TODO tagVariants #TODO obtaining #TODO dropsSelf #TODO whenPlaced #TODO creativeMenu #TODO pickBlock #TODO durability 'stackable': item['stackSize'] #TODO alwaysGlow } return {'minecraft': all_items} def client_versions_path(): if platform.node() == 'gharch': import minecraft # https://github.com/wurstmineberg/systemd-minecraft return minecraft.CONFIG['paths']['clientVersions'] else: return pathlib.Path('/opt/wurstmineberg/.minecraft/versions') def download_client(version, client_jar_path): client_jar_path.parent.mkdir(exist_ok=True, parents=True) # get version info versions_json = requests.get('https://launchermeta.mojang.com/mc/game/version_manifest.json').json() version_dict = more_itertools.one(filter(lambda version_dict: version_dict.get('id') == version, versions_json['versions'])) version_json = requests.get(version_dict['url']).json() # get client jar _download(version_json['downloads']['client']['url'], local_filename=client_jar_path) def minecraft_version(): if platform.node() == 'gharch': import minecraft # https://github.com/wurstmineberg/systemd-minecraft return minecraft.World().version() else: return '1.16.1' #TODO automate? if __name__ == '__main__': # unzip client version = minecraft_version() client_jar_path = client_versions_path() / version / f'{version}.jar' if not client_jar_path.exists(): download_client(version, client_jar_path) with zipfile.ZipFile(str(client_jar_path)) as client_jar: client_jar.extractall('build/clientjar') client = pathlib.Path('build/clientjar') # copy advancements if pathlib.Path('json/advancements').exists(): shutil.rmtree('json/advancements') shutil.copytree(str(client / 'data' / 'minecraft' / 'advancements'), 'json/advancements') # build items if pathlib.Path('json/items.json').exists(): pathlib.Path('json/items.json').unlink() with pathlib.Path('json/items.json').open('w') as items_json_f: json.dump(build_items(), items_json_f, indent=4, sort_keys=True) print(file=items_json_f) # add trailing newline # copy lang if pathlib.Path('json/lang.json').exists(): pathlib.Path('json/lang.json').unlink() lang = {} shutil.copy2(client / 'assets' / 'minecraft' / 'lang' / 'en_us.json', 'json/lang.json') # remove client files shutil.rmtree('build/clientjar')
wurstmineberg/assets.wurstmineberg.de
build/build.py
Python
mit
3,775
from django.conf.urls import include, url from whattheadmin import views urlpatterns = [ url(r'email/send', views.send_email, name='admin-new-email'), url(r'', views.dashboard, name='admin-dashboard'), ]
mikeshultz/whatthediff
whattheadmin/urls.py
Python
gpl-2.0
212
# coding: utf-8 """ Course Schedule and Details Settings page. """ from __future__ import unicode_literals from bok_choy.promise import EmptyPromise from .course_page import CoursePage from .utils import press_the_notification_button class SettingsPage(CoursePage): """ Course Schedule and Details Settings page. """ url_path = "settings/details" def is_browser_on_page(self): return self.q(css='body.view-settings').present def refresh_and_wait_for_load(self): """ Refresh the page and wait for all resources to load. """ self.browser.refresh() self.wait_for_page() def get_elements(self, css_selector): self.wait_for_element_presence( css_selector, 'Elements matching "{}" selector are present'.format(css_selector) ) results = self.q(css=css_selector) return results def get_element(self, css_selector): results = self.get_elements(css_selector=css_selector) return results[0] if results else None @property def pre_requisite_course_options(self): """ Returns the pre-requisite course drop down field options. """ return self.get_elements('#pre-requisite-course') @property def entrance_exam_field(self): """ Returns the enable entrance exam checkbox. """ return self.get_element('#entrance-exam-enabled') @property def alert_confirmation_title(self): """ Returns the alert confirmation element, which contains text such as 'Your changes have been saved.' """ return self.get_element('#alert-confirmation-title') def require_entrance_exam(self, required=True): """ Set the entrance exam requirement via the checkbox. """ checkbox = self.entrance_exam_field selected = checkbox.is_selected() if required and not selected: checkbox.click() self.wait_for_element_visibility( '#entrance-exam-minimum-score-pct', 'Entrance exam minimum score percent is visible' ) if not required and selected: checkbox.click() self.wait_for_element_invisibility( '#entrance-exam-minimum-score-pct', 'Entrance exam minimum score percent is invisible' ) @property def course_license(self): """ Property. Returns the text of the license type for the course ("All Rights Reserved" or "Creative Commons") """ license_types_css = "section.license ul.license-types li.license-type" self.wait_for_element_presence( license_types_css, "license type buttons are present", ) selected = self.q(css=license_types_css + " button.is-selected") if selected.is_present(): return selected.text[0] # Look for the license text that will be displayed by default, # if no button is yet explicitly selected license_text = self.q(css='section.license span.license-text') if license_text.is_present(): return license_text.text[0] return None @course_license.setter def course_license(self, license_name): """ Sets the course license to the given license_name (str, "All Rights Reserved" or "Creative Commons") """ license_types_css = "section.license ul.license-types li.license-type" self.wait_for_element_presence( license_types_css, "license type buttons are present", ) button_xpath = ( "//section[contains(@class, 'license')]" "//ul[contains(@class, 'license-types')]" "//li[contains(@class, 'license-type')]" "//button[contains(text(),'{license_name}')]" ).format(license_name=license_name) button = self.q(xpath=button_xpath) if not button.present: raise Exception("Invalid license name: {name}".format(name=license_name)) button.click() def save_changes(self, wait_for_confirmation=True): """ Clicks save button, waits for confirmation unless otherwise specified """ press_the_notification_button(self, "save") if wait_for_confirmation: self.wait_for_element_visibility( '#alert-confirmation-title', 'Save confirmation message is visible' ) def refresh_page(self, wait_for_confirmation=True): """ Reload the page. """ self.browser.refresh() if wait_for_confirmation: EmptyPromise( lambda: self.q(css='body.view-settings').present, 'Page is refreshed' ).fulfill() self.wait_for_ajax()
vismartltd/edx-platform
common/test/acceptance/pages/studio/settings.py
Python
agpl-3.0
4,906
#------------------------------------------------------------------------ # # Register Gramplet # #------------------------------------------------------------------------ register(GRAMPLET, id="Data Entry Gramplet", name=_("Data Entry Gramplet"), description = _("Gramplet for quick data entry"), height=375, expand=False, gramplet = 'DataEntryGramplet', gramplet_title=_("Data Entry"), detached_width = 510, detached_height = 480, version = '1.0.46', gramps_target_version = "5.1", status=STABLE, # not yet tested with python 3 fname="DataEntryGramplet.py", help_url="Data Entry Gramplet", navtypes=["Person"], )
gramps-project/addons-source
DataEntryGramplet/DataEntryGramplet.gpr.py
Python
gpl-2.0
761
import factory from datetime import datetime from django.utils.text import slugify from django.utils.timezone import make_aware from .. import models class UserFactory(factory.DjangoModelFactory): """Factory for making demo users.""" class Meta: model = models.User # This next line is actually performed in _create, below, but it shown here for # declarative documentation. django_get_or_create = ['username'] first_name = "Gwen" last_name = "Ifill" username = factory.LazyAttribute(lambda u: slugify(u.first_name)) email = factory.LazyAttribute( lambda u: "%s@%s.om" % (slugify(u.first_name), slugify(u.first_name))) password = "secret" # There's is a circular dependency on users/organizations, so we can't create an # org when we create the first user for the org. To create a user with an organization, # do this: # u = UserFactory() # o = OrganizationFactory() # u.organization = o # # organization = factory.SubFactory('editorial.tests.factories.OrganizationFactory') user_type = "Editor" credit_name = factory.LazyAttribute(lambda c: "Credit %s %s" % (c.first_name, c.last_name)) title = "Managing Editor" phone = "415-555-1212" bio = factory.LazyAttribute(lambda c: "Bio for %s." % c.first_name) location = "Baltimore, Maryland" expertise = ["Writing", "Editing"] # notes = [] # FIXME photo = factory.django.ImageField() # facebook = factory.LazyAttribute( # lambda c: "http://facebook.com/%s" % slugify(c.first_name)) # twitter = factory.LazyAttribute(lambda c: "http://twitter.com/%s" % slugify(c.first_name)) # github = factory.LazyAttribute(lambda c: "http://github.com/%s" % slugify(c.first_name)) # linkedin = factory.LazyAttribute( # lambda c: "http://linkedin.com/%s" % slugify(c.first_name)) # instagram = factory.LazyAttribute( # lambda c: "http://instagram.com/%s" % slugify(c.first_name)) # snapchat = factory.LazyAttribute( # lambda c: "http://snapchat.com/%s" % slugify(c.first_name)) # vine = factory.LazyAttribute(lambda c: "http://vine.com/%s" % slugify(c.first_name)) website = factory.LazyAttribute(lambda c: "http://www.%s.com/" % slugify(c.first_name)) @classmethod def _create(cls, model_class, *args, **kwargs): """Override the default ``_create`` with our custom call.""" try: return models.User.objects.get(username=kwargs['username']) except models.User.DoesNotExist: manager = cls._get_manager(model_class) return manager.create_user(*args, **kwargs) class DiscussionFactory(factory.DjangoModelFactory): """Factory for making demo discussions.""" class Meta: model = models.Discussion discussion_type = "???" class OrganizationFactory(factory.DjangoModelFactory): """Factory for making demo organizations.""" class Meta: model = models.Organization django_get_or_create = ['name'] name = "Baltimore Sun" owner = factory.SubFactory(UserFactory) org_description = factory.LazyAttribute(lambda c: "Description for " + c.name) logo = factory.django.ImageField() location = "Baltimore, Maryland" creation_date = make_aware(datetime(2017, 1, 1)) # facebook = factory.LazyAttribute(lambda c: "http://facebook.com/%s" % slugify(c.name)) # twitter = factory.LazyAttribute(lambda c: "http://twitter.com/%s" % slugify(c.name)) website = factory.LazyAttribute(lambda c: "http://www.%s.com/" % slugify(c.name)) discussion = factory.SubFactory(DiscussionFactory, discussion_type="ORG") @factory.post_generation def add_owner_to_org(self, create, extracted, **kwargs): if not create: # Simple build, do nothing. return self.owner.organization = self self.owner.save() class NetworkFactory(factory.DjangoModelFactory): """Factory for making network.""" class Meta: model = models.Network django_get_or_create = ['name'] name = "Baltimore Co-Op" owner_organization = factory.SubFactory(OrganizationFactory) creation_date = make_aware(datetime(2017, 1, 1)) network_description = "Description of network." logo = factory.django.ImageField() # organizations = [] ... discussion = factory.SubFactory(DiscussionFactory, discussion_type='NET') @factory.post_generation def organizations(self, create, extracted, **kwargs): if not create: # Simple build, do nothing. return if extracted: # A list of groups were passed in, use them for org in extracted: self.organizations.add(org) class SeriesFactory(factory.DjangoModelFactory): """Factory for making series.""" class Meta: model = models.Series django_get_or_create = ['name'] name = "American Pets" series_description = "Description of series." owner = factory.SubFactory(UserFactory) organization = factory.SubFactory(OrganizationFactory) # team ... creation_date = make_aware(datetime(2017, 1, 1)) sensitive = False share = False # share_with = ... share_with_date = None collaborate = False # collaborate_with = ... archived = False discussion = factory.SubFactory(DiscussionFactory, discussion_type='SER') @factory.post_generation def team(self, create, extracted, **kwargs): if not create: # Simple build, do nothing. return if extracted: # A list of groups were passed in, use them for user in extracted: self.team.add(user) @factory.post_generation def share_with(self, create, extracted, **kwargs): if not create: # Simple build, do nothing. return if extracted: # A list of groups were passed in, use them for network in extracted: self.share_with.add(network) @factory.post_generation def collaborate_with(self, create, extracted, **kwargs): if not create: # Simple build, do nothing. return if extracted: # A list of groups were passed in, use them for org in extracted: self.collaborate_with.add(org) class StoryFactory(factory.DjangoModelFactory): """Factory for making stories.""" class Meta: model = models.Story django_get_or_create = ['name'] series = factory.SubFactory(SeriesFactory) owner = factory.SubFactory(UserFactory) organization = factory.SubFactory(OrganizationFactory) original_story = True name = "Cute Kitten Rescued" story_description = "Description of story." embargo = False embargo_datetime = None creation_date = make_aware(datetime(2017, 1, 1)) # team = ... sensitive = False share = False share_with_date = None ready_to_share = False # share_with = ... collaborate = False # collaborate_with = ... archived = False discussion = factory.SubFactory(DiscussionFactory, discussion_type='STO') @factory.post_generation def team(self, create, extracted, **kwargs): if not create: # Simple build, do nothing. return if extracted: # A list of groups were passed in, use them for user in extracted: self.team.add(user) @factory.post_generation def share_with(self, create, extracted, **kwargs): if not create: # Simple build, do nothing. return if extracted: # A list of groups were passed in, use them for network in extracted: self.share_with.add(network) @factory.post_generation def collaborate_with(self, create, extracted, **kwargs): if not create: # Simple build, do nothing. return if extracted: # A list of groups were passed in, use them for org in extracted: self.collaborate_with.add(org) class ProjectFactory(factory.DjangoModelFactory): """Factory for making projects.""" class Meta: model = models.Project django_get_or_create = ['name'] name = "Cover Congress" project_description = "Description of project." project_logo = factory.django.ImageField() owner = factory.SubFactory(UserFactory) organization = factory.SubFactory(OrganizationFactory) # team = ... creation_date = make_aware(datetime(2017, 1, 1)) sensitive = False share = False # share_with = ... share_with_date = None collaborate = False # collaborate_with = ... archived = False discussion = factory.SubFactory(DiscussionFactory, discussion_type='STO') website = factory.LazyAttribute(lambda c: "http://www.%s.com/" % slugify(c.name)) # github = factory.LazyAttribute(lambda c: "http://github.com/%s" % slugify(c.name)) # facebook = factory.LazyAttribute(lambda c: "http://facebook.com/%s" % slugify(c.name)) # twitter = factory.LazyAttribute(lambda c: "http://twitter.com/%s" % slugify(c.name)) # instagram = factory.LazyAttribute(lambda c: "http://instagram.com/%s" % slugify(c.name)) # snapchat = factory.LazyAttribute(lambda c: "http://snapchat.com/%s" % slugify(c.name)) # youtube = factory.LazyAttribute(lambda c: "http://youtube.com/%s" % slugify(c.name)) # governing_document_assets = [...] # project_document_assets = [...] @factory.post_generation def team(self, create, extracted, **kwargs): if not create: # Simple build, do nothing. return if extracted: # A list of groups were passed in, use them for user in extracted: self.team.add(user) @factory.post_generation def share_with(self, create, extracted, **kwargs): if not create: # Simple build, do nothing. return if extracted: # A list of groups were passed in, use them for network in extracted: self.share_with.add(network) @factory.post_generation def collaborate_with(self, create, extracted, **kwargs): if not create: # Simple build, do nothing. return if extracted: # A list of groups were passed in, use them for org in extracted: self.collaborate_with.add(org) @factory.post_generation def governing_document_assets(self, create, extracted, **kwargs): if not create: # Simple build, do nothing. return if extracted: # A list of groups were passed in, use them for asset in extracted: self.governing_document_assets.add(asset) @factory.post_generation def project_document_assets(self, create, extracted, **kwargs): if not create: # Simple build, do nothing. return if extracted: # A list of groups were passed in, use them for asset in extracted: self.project_document_assets.add(asset) class TaskFactory(factory.DjangoModelFactory): """Factory for making tasks.""" class Meta: model = models.Task django_get_or_create = ['title'] title = "Get photos" owner = factory.SubFactory(UserFactory) text = "take pictures" # assigned_to = ... task_status = "In Progress" important = False creation_date = make_aware(datetime(2017, 1, 1)) due_date = make_aware(datetime(2017, 1, 1)) inprogress_date = make_aware(datetime(2017, 1, 10)) completion_date = make_aware(datetime(2017, 1, 25)) # project = ... # series = ... # story = ... # event = ... @factory.post_generation def assigned_to(self, create, extracted, **kwargs): if not create: # Simple build, do nothing. return if extracted: # A list of groups were passed in, use them for user in extracted: self.assigned_to.add(user)
ProjectFacet/facet
project/editorial/tests/factories.py
Python
mit
12,283
#! /usr/bin/env python2.6 # # Delete a list of existing users from the running configuration using # edit-config; protect the transaction using a lock. # # $ ./nc06.py broccoli bob alice import sys, os, warnings warnings.simplefilter("ignore", DeprecationWarning) from ncclient import manager template = """<config xmlns:xc="urn:ietf:params:xml:ns:netconf:base:1.0"> <aaa xmlns="http://tail-f.com/ns/aaa/1.1"> <authentication> <users> <user xc:operation="delete"> <name>%s</name> </user></users></authentication></aaa></config>""" def demo(host, user, names): with manager.connect(host=host, port=22, username=user) as m: with m.locked(target='running'): for n in names: m.edit_config(target='running', config=template % n) if __name__ == '__main__': demo(sys.argv[1], os.getenv("USER"), sys.argv[2:])
nnakamot/ncclient
examples/nc06.py
Python
apache-2.0
857
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Cranberry documentation build configuration file, created by # cookiecutter pipproject # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('../..')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = 'Cranberry' copyright = '2016, Daniel Schwabacher' author = 'Daniel Schwabacher' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '0.0.1' # The full version, including alpha/beta/rc tags. release = '0.0.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. # "<project> v<release> documentation" by default. #html_title = 'Cranberry v0.0.1' # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (relative to this directory) to use as a favicon of # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not None, a 'Last updated on:' timestamp is inserted at every page # bottom, using the given strftime format. # The empty string is equivalent to '%b %d, %Y'. #html_last_updated_fmt = None # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh' #html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # 'ja' uses this config value. # 'zh' user can custom change `jieba` dictionary path. #html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. #html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = 'Cranberrydoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', # Latex figure (float) alignment #'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'Cranberry.tex', 'Cranberry Documentation', 'Daniel Schwabacher', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'Cranberry', 'Cranberry Documentation', [author], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'Cranberry', 'Cranberry Documentation', author, 'Cranberry', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False
danielschwabacher/cranberry
docs/source/conf.py
Python
bsd-3-clause
9,391
from django.contrib.auth.decorators import login_required from django.shortcuts import render_to_response, redirect from django.template import RequestContext from django.http import HttpResponse from django.utils import simplejson from Custom_Remotes.models import * @login_required def customRemotes(request): context = {} context['customRemotes'] = Remote.objects.all() context['devices'] = Devices.objects.all() return render_to_response('custom_remotes.html', context, context_instance=RequestContext(request)) @login_required def autoRemotes(request): context = {} context['devices'] = Devices.objects.all() return render_to_response('auto_remotes.html', context, context_instance=RequestContext(request)) @login_required def editButton(request, buttonID): context = {} if request.method == 'POST': if 'clearButton' in request.POST: b = Button.objects.filter(id=buttonID)[0] remoteID = b.remote.id b.delete() return redirect('/remote/' + str(remoteID) + '/') return render_to_response('button.html', context, context_instance=RequestContext(request)) @login_required def newButton(request, remoteID, y, x): if request.method == 'POST': data = simplejson.loads(request.raw_post_data) actionID = data[0] name = data[1] icon = data[2] color = data[3] remote = Remote.objects.filter(id=remoteID)[0] action = Actions.objects.filter(id=actionID)[0] newButton = Button(name=name, x=x, y=y, action=action, icon=icon, remote=remote, color=color) newButton.save() return redirect('/remote/' + str(remoteID) + '/') context = {} context['newButton'] = True form = ButtonForm() formActions = [] for action in Actions.objects.all(): actualAction = action.getSubclassInstance() if hasattr(actualAction, 'visible'): if actualAction.visible: formActions.append(action.id) else: formActions.append(action.id) form.fields['action'].queryset = Actions.objects.filter(id__in=formActions) context['buttonForm'] = form return render_to_response('button.html', context, context_instance=RequestContext(request)) @login_required def runButton(request, buttonID): b = Button.objects.filter(id=buttonID)[0] b.action.runAction() return HttpResponse(simplejson.dumps(''), mimetype='application/javascript') @login_required def remote(request, remoteID): context = {} context['edit'] = True remote = Remote.objects.filter(id=remoteID)[0] buttons = [] assignedButtons = Button.objects.filter(remote=remote) for row in range(0, remote.rows): buttons.append({}) for button in assignedButtons: buttons[button.y][str(button.x)] = button if not len(assignedButtons): context['no_assigned_buttons'] = True remote.buttons = buttons context['remote'] = remote return render_to_response('remote.html', context, context_instance=RequestContext(request)) @login_required def deviceRemote(request, deviceID): context = {} buttons = [] context['edit'] = False device = Devices.objects.filter(id=deviceID)[0] actions = Actions.objects.filter(device=device) remote = Remote(name=device.name, style=1, user=request.user) numActions = len(actions) remote.rows = numActions / 3 if numActions % 3: remote.rows += 1 for row in range(0, remote.rows): buttons.append({}) for col in range(0, 3): if row * 3 + col < numActions: action = actions[row * 3 + col] buttons[row][col] = Button(name=action.name, icon='star', action=action, id=str(action.id)) remote.buttons = buttons context['remote'] = remote return render_to_response('remote.html', context, context_instance=RequestContext(request)) @login_required def remotes(request): context = {} if request.method == 'POST': if 'saveRemote' in request.POST: r = Remote(user=request.user) newRemote = RemoteForm(request.POST, instance=r) if newRemote.is_valid(): newRemote.save() if 'deleteRemote' in request.POST: Remote.objects.filter(id=request.POST['deleteRemote']).delete() # delete related buttons here if nec. context['remotes'] = Remote.objects.filter(user=request.user) context['remoteForm'] = RemoteForm() return render_to_response('remotes.html', context, context_instance=RequestContext(request))
dandroid88/webmote
modules/Custom_Remotes/views.py
Python
gpl-3.0
4,617
# coding: utf-8 import sys import importlib import pandas as pd import numpy as np from collections import Counter import tensorflow as tf import numpy as np import matplotlib.pyplot as plt class Visual: def __init__(self): pass def main(self): # Read file df = pd.read_csv('./log/acculoss_dropout_1.0_train_0.05_batch_100_iter_10000.csv') # Set values x = df['epoch'].values y0 = df['train_loss'].values y1 = df['train_accuracy'].values y2 = df['test_accuracy'].values # Set background color to white fig = plt.figure() fig.patch.set_facecolor('white') # Plot lines plt.xlabel('epoch') plt.plot(x, y0, label='train_loss') plt.plot(x, y1, label='train_accuracy') plt.plot(x, y2, label='test_accuracy') plt.legend() # Visualize plt.show() if __name__ == "__main__": Visual().main()
iShoto/incoption
src/visual.py
Python
mit
965
"""CSSStyleDeclaration implements DOM Level 2 CSS CSSStyleDeclaration and extends CSS2Properties see http://www.w3.org/TR/1998/REC-CSS2-19980512/syndata.html#parsing-errors Unknown properties ------------------ User agents must ignore a declaration with an unknown property. For example, if the style sheet is:: H1 { color: red; rotation: 70minutes } the user agent will treat this as if the style sheet had been:: H1 { color: red } Cssutils gives a message about any unknown properties but keeps any property (if syntactically correct). Illegal values -------------- User agents must ignore a declaration with an illegal value. For example:: IMG { float: left } /* correct CSS2 */ IMG { float: left here } /* "here" is not a value of 'float' */ IMG { background: "red" } /* keywords cannot be quoted in CSS2 */ IMG { border-width: 3 } /* a unit must be specified for length values */ A CSS2 parser would honor the first rule and ignore the rest, as if the style sheet had been:: IMG { float: left } IMG { } IMG { } IMG { } Cssutils again will issue a message (WARNING in this case) about invalid CSS2 property values. TODO: This interface is also used to provide a read-only access to the computed values of an element. See also the ViewCSS interface. - return computed values and not literal values - simplify unit pairs/triples/quadruples 2px 2px 2px 2px -> 2px for border/padding... - normalize compound properties like: background: no-repeat left url() #fff -> background: #fff url() no-repeat left """ __all__ = ['CSSStyleDeclaration', 'Property'] __docformat__ = 'restructuredtext' __version__ = '$Id$' from cssproperties import CSS2Properties from property import Property import cssutils import xml.dom class CSSStyleDeclaration(CSS2Properties, cssutils.util.Base2): """The CSSStyleDeclaration class represents a single CSS declaration block. This class may be used to determine the style properties currently set in a block or to set style properties explicitly within the block. While an implementation may not recognize all CSS properties within a CSS declaration block, it is expected to provide access to all specified properties in the style sheet through the CSSStyleDeclaration interface. Furthermore, implementations that support a specific level of CSS should correctly handle CSS shorthand properties for that level. For a further discussion of shorthand properties, see the CSS2Properties interface. Additionally the CSS2Properties interface is implemented. $css2propertyname All properties defined in the CSS2Properties class are available as direct properties of CSSStyleDeclaration with their respective DOM name, so e.g. ``fontStyle`` for property 'font-style'. These may be used as:: >>> style = CSSStyleDeclaration(cssText='color: red') >>> style.color = 'green' >>> print style.color green >>> del style.color >>> print style.color <BLANKLINE> Format:: [Property: Value Priority?;]* [Property: Value Priority?]? """ def __init__(self, cssText=u'', parentRule=None, readonly=False): """ :param cssText: Shortcut, sets CSSStyleDeclaration.cssText :param parentRule: The CSS rule that contains this declaration block or None if this CSSStyleDeclaration is not attached to a CSSRule. :param readonly: defaults to False """ super(CSSStyleDeclaration, self).__init__() self._parentRule = parentRule self.cssText = cssText self._readonly = readonly def __contains__(self, nameOrProperty): """Check if a property (or a property with given name) is in style. :param name: a string or Property, uses normalized name and not literalname """ if isinstance(nameOrProperty, Property): name = nameOrProperty.name else: name = self._normalize(nameOrProperty) return name in self.__nnames() def __iter__(self): """Iterator of set Property objects with different normalized names.""" def properties(): for name in self.__nnames(): yield self.getProperty(name) return properties() def keys(self): """Analoguous to standard dict returns property names which are set in this declaration.""" return list(self.__nnames()) def __getitem__(self, CSSName): """Retrieve the value of property ``CSSName`` from this declaration. ``CSSName`` will be always normalized. """ return self.getPropertyValue(CSSName) def __setitem__(self, CSSName, value): """Set value of property ``CSSName``. ``value`` may also be a tuple of (value, priority), e.g. style['color'] = ('red', 'important') ``CSSName`` will be always normalized. """ priority = None if type(value) == tuple: value, priority = value return self.setProperty(CSSName, value, priority) def __delitem__(self, CSSName): """Delete property ``CSSName`` from this declaration. If property is not in this declaration return u'' just like removeProperty. ``CSSName`` will be always normalized. """ return self.removeProperty(CSSName) def __setattr__(self, n, v): """Prevent setting of unknown properties on CSSStyleDeclaration which would not work anyway. For these ``CSSStyleDeclaration.setProperty`` MUST be called explicitly! TODO: implementation of known is not really nice, any alternative? """ known = ['_tokenizer', '_log', '_ttypes', '_seq', 'seq', 'parentRule', '_parentRule', 'cssText', 'valid', 'wellformed', '_readonly', '_profiles'] known.extend(CSS2Properties._properties) if n in known: super(CSSStyleDeclaration, self).__setattr__(n, v) else: raise AttributeError(u'Unknown CSS Property, ' u'``CSSStyleDeclaration.setProperty("%s", ' u'...)`` MUST be used.' % n) def __repr__(self): return u"cssutils.css.%s(cssText=%r)" % ( self.__class__.__name__, self.getCssText(separator=u' ')) def __str__(self): return u"<cssutils.css.%s object length=%r (all: %r) at 0x%x>" % ( self.__class__.__name__, self.length, len(self.getProperties(all=True)), id(self)) def __nnames(self): """Return iterator for all different names in order as set if names are set twice the last one is used (double reverse!) """ names = [] for item in reversed(self.seq): val = item.value if isinstance(val, Property) and not val.name in names: names.append(val.name) return reversed(names) # overwritten accessor functions for CSS2Properties' properties def _getP(self, CSSName): """(DOM CSS2Properties) Overwritten here and effectively the same as ``self.getPropertyValue(CSSname)``. Parameter is in CSSname format ('font-style'), see CSS2Properties. Example:: >>> style = CSSStyleDeclaration(cssText='font-style:italic;') >>> print style.fontStyle italic """ return self.getPropertyValue(CSSName) def _setP(self, CSSName, value): """(DOM CSS2Properties) Overwritten here and effectively the same as ``self.setProperty(CSSname, value)``. Only known CSS2Properties may be set this way, otherwise an AttributeError is raised. For these unknown properties ``setPropertyValue(CSSname, value)`` has to be called explicitly. Also setting the priority of properties needs to be done with a call like ``setPropertyValue(CSSname, value, priority)``. Example:: >>> style = CSSStyleDeclaration() >>> style.fontStyle = 'italic' >>> # or >>> style.setProperty('font-style', 'italic', '!important') """ self.setProperty(CSSName, value) # TODO: Shorthand ones def _delP(self, CSSName): """(cssutils only) Overwritten here and effectively the same as ``self.removeProperty(CSSname)``. Example:: >>> style = CSSStyleDeclaration(cssText='font-style:italic;') >>> del style.fontStyle >>> print style.fontStyle <BLANKLINE> """ self.removeProperty(CSSName) def children(self): """Generator yielding any known child in this declaration including *all* properties, comments or CSSUnknownrules. """ for item in self._seq: yield item.value def _getCssText(self): """Return serialized property cssText.""" return cssutils.ser.do_css_CSSStyleDeclaration(self) def _setCssText(self, cssText): """Setting this attribute will result in the parsing of the new value and resetting of all the properties in the declaration block including the removal or addition of properties. :exceptions: - :exc:`~xml.dom.NoModificationAllowedErr`: Raised if this declaration is readonly or a property is readonly. - :exc:`~xml.dom.SyntaxErr`: Raised if the specified CSS string value has a syntax error and is unparsable. """ self._checkReadonly() tokenizer = self._tokenize2(cssText) # for closures: must be a mutable new = {'wellformed': True} def ident(expected, seq, token, tokenizer=None): # a property tokens = self._tokensupto2(tokenizer, starttoken=token, semicolon=True) if self._tokenvalue(tokens[-1]) == u';': tokens.pop() property = Property(parent=self) property.cssText = tokens if property.wellformed: seq.append(property, 'Property') else: self._log.error(u'CSSStyleDeclaration: Syntax Error in ' u'Property: %s' % self._valuestr(tokens)) # does not matter in this case return expected def unexpected(expected, seq, token, tokenizer=None): # error, find next ; or } to omit upto next property ignored = self._tokenvalue(token) + self._valuestr( self._tokensupto2(tokenizer, propertyvalueendonly=True)) self._log.error(u'CSSStyleDeclaration: Unexpected token, ignoring ' 'upto %r.' % ignored,token) # does not matter in this case return expected # [Property: Value;]* Property: Value? newseq = self._tempSeq() wellformed, expected = self._parse(expected=None, seq=newseq, tokenizer=tokenizer, productions={'IDENT': ident},#, 'CHAR': char}, default=unexpected) # wellformed set by parse for item in newseq: item.value._parent = self # do not check wellformed as invalid things are removed anyway self._setSeq(newseq) cssText = property(_getCssText, _setCssText, doc=u"(DOM) A parsable textual representation of the " u"declaration block excluding the surrounding curly " u"braces.") def getCssText(self, separator=None): """ :returns: serialized property cssText, each property separated by given `separator` which may e.g. be ``u''`` to be able to use cssText directly in an HTML style attribute. ``;`` is part of each property (except the last one) and **cannot** be set with separator! """ return cssutils.ser.do_css_CSSStyleDeclaration(self, separator) def _setParentRule(self, parentRule): self._parentRule = parentRule # for x in self.children(): # x.parent = self parentRule = property(lambda self: self._parentRule, _setParentRule, doc="(DOM) The CSS rule that contains this declaration block or " "None if this CSSStyleDeclaration is not attached to a CSSRule.") def getProperties(self, name=None, all=False): """ :param name: optional `name` of properties which are requested. Only properties with this **always normalized** `name` are returned. If `name` is ``None`` all properties are returned (at least one for each set name depending on parameter `all`). :param all: if ``False`` (DEFAULT) only the effective properties are returned. If name is given a list with only one property is returned. if ``True`` all properties including properties set multiple times with different values or priorities for different UAs are returned. The order of the properties is fully kept as in the original stylesheet. :returns: a list of :class:`~cssutils.css.Property` objects set in this declaration. """ if name and not all: # single prop but list p = self.getProperty(name) if p: return [p] else: return [] elif not all: # effective Properties in name order return [self.getProperty(name)for name in self.__nnames()] else: # all properties or all with this name nname = self._normalize(name) properties = [] for item in self.seq: val = item.value if isinstance(val, Property) and ( (bool(nname) == False) or (val.name == nname)): properties.append(val) return properties def getProperty(self, name, normalize=True): """ :param name: of the CSS property, always lowercase (even if not normalized) :param normalize: if ``True`` (DEFAULT) name will be normalized (lowercase, no simple escapes) so "color", "COLOR" or "C\olor" will all be equivalent If ``False`` may return **NOT** the effective value but the effective for the unnormalized name. :returns: the effective :class:`~cssutils.css.Property` object. """ nname = self._normalize(name) found = None for item in reversed(self.seq): val = item.value if isinstance(val, Property): if (normalize and nname == val.name) or name == val.literalname: if val.priority: return val elif not found: found = val return found def getPropertyCSSValue(self, name, normalize=True): """ :param name: of the CSS property, always lowercase (even if not normalized) :param normalize: if ``True`` (DEFAULT) name will be normalized (lowercase, no simple escapes) so "color", "COLOR" or "C\olor" will all be equivalent If ``False`` may return **NOT** the effective value but the effective for the unnormalized name. :returns: :class:`~cssutils.css.CSSValue`, the value of the effective property if it has been explicitly set for this declaration block. (DOM) Used to retrieve the object representation of the value of a CSS property if it has been explicitly set within this declaration block. Returns None if the property has not been set. (This method returns None if the property is a shorthand property. Shorthand property values can only be accessed and modified as strings, using the getPropertyValue and setProperty methods.) **cssutils currently always returns a CSSValue if the property is set.** for more on shorthand properties see http://www.dustindiaz.com/css-shorthand/ """ nname = self._normalize(name) if nname in self._SHORTHANDPROPERTIES: self._log.info(u'CSSValue for shorthand property "%s" should be ' u'None, this may be implemented later.' % nname, neverraise=True) p = self.getProperty(name, normalize) if p: return p.cssValue else: return None def getPropertyValue(self, name, normalize=True): """ :param name: of the CSS property, always lowercase (even if not normalized) :param normalize: if ``True`` (DEFAULT) name will be normalized (lowercase, no simple escapes) so "color", "COLOR" or "C\olor" will all be equivalent If ``False`` may return **NOT** the effective value but the effective for the unnormalized name. :returns: the value of the effective property if it has been explicitly set for this declaration block. Returns the empty string if the property has not been set. """ p = self.getProperty(name, normalize) if p: return p.value else: return u'' def getPropertyPriority(self, name, normalize=True): """ :param name: of the CSS property, always lowercase (even if not normalized) :param normalize: if ``True`` (DEFAULT) name will be normalized (lowercase, no simple escapes) so "color", "COLOR" or "C\olor" will all be equivalent If ``False`` may return **NOT** the effective value but the effective for the unnormalized name. :returns: the priority of the effective CSS property (e.g. the "important" qualifier) if the property has been explicitly set in this declaration block. The empty string if none exists. """ p = self.getProperty(name, normalize) if p: return p.priority else: return u'' def removeProperty(self, name, normalize=True): """ (DOM) Used to remove a CSS property if it has been explicitly set within this declaration block. :param name: of the CSS property :param normalize: if ``True`` (DEFAULT) name will be normalized (lowercase, no simple escapes) so "color", "COLOR" or "C\olor" will all be equivalent. The effective Property value is returned and *all* Properties with ``Property.name == name`` are removed. If ``False`` may return **NOT** the effective value but the effective for the unnormalized `name` only. Also only the Properties with the literal name `name` are removed. :returns: the value of the property if it has been explicitly set for this declaration block. Returns the empty string if the property has not been set or the property name does not correspond to a known CSS property :exceptions: - :exc:`~xml.dom.NoModificationAllowedErr`: Raised if this declaration is readonly or the property is readonly. """ self._checkReadonly() r = self.getPropertyValue(name, normalize=normalize) newseq = self._tempSeq() if normalize: # remove all properties with name == nname nname = self._normalize(name) for item in self.seq: if not (isinstance(item.value, Property) and item.value.name == nname): newseq.appendItem(item) else: # remove all properties with literalname == name for item in self.seq: if not (isinstance(item.value, Property) and item.value.literalname == name): newseq.appendItem(item) self._setSeq(newseq) return r def setProperty(self, name, value=None, priority=u'', normalize=True): """(DOM) Set a property value and priority within this declaration block. :param name: of the CSS property to set (in W3C DOM the parameter is called "propertyName"), always lowercase (even if not normalized) If a property with this `name` is present it will be reset. cssutils also allowed `name` to be a :class:`~cssutils.css.Property` object, all other parameter are ignored in this case :param value: the new value of the property, ignored if `name` is a Property. :param priority: the optional priority of the property (e.g. "important"), ignored if `name` is a Property. :param normalize: if True (DEFAULT) `name` will be normalized (lowercase, no simple escapes) so "color", "COLOR" or "C\olor" will all be equivalent :exceptions: - :exc:`~xml.dom.SyntaxErr`: Raised if the specified value has a syntax error and is unparsable. - :exc:`~xml.dom.NoModificationAllowedErr`: Raised if this declaration is readonly or the property is readonly. """ self._checkReadonly() if isinstance(name, Property): newp = name name = newp.literalname elif not value: # empty string or None effectively removed property return self.removeProperty(name) else: newp = Property(name, value, priority) if not newp.wellformed: self._log.warn(u'Invalid Property: %s: %s %s' % (name, value, priority)) else: nname = self._normalize(name) properties = self.getProperties(name, all=(not normalize)) for property in reversed(properties): if normalize and property.name == nname: property.cssValue = newp.cssValue.cssText property.priority = newp.priority break elif property.literalname == name: property.cssValue = newp.cssValue.cssText property.priority = newp.priority break else: newp.parent = self self.seq._readonly = False self.seq.append(newp, 'Property') self.seq._readonly = True def item(self, index): """(DOM) Retrieve the properties that have been explicitly set in this declaration block. The order of the properties retrieved using this method does not have to be the order in which they were set. This method can be used to iterate over all properties in this declaration block. :param index: of the property to retrieve, negative values behave like negative indexes on Python lists, so -1 is the last element :returns: the name of the property at this ordinal position. The empty string if no property exists at this position. **ATTENTION:** Only properties with different names are counted. If two properties with the same name are present in this declaration only the effective one is included. :meth:`item` and :attr:`length` work on the same set here. """ names = list(self.__nnames()) try: return names[index] except IndexError: return u'' length = property(lambda self: len(list(self.__nnames())), doc=u"(DOM) The number of distinct properties that have " u"been explicitly in this declaration block. The " u"range of valid indices is 0 to length-1 inclusive. " u"These are properties with a different ``name`` " u"only. :meth:`item` and :attr:`length` work on the " u"same set here.")
kgn/cssutils
src/cssutils/css/cssstyledeclaration.py
Python
gpl-3.0
25,095
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class DataProtectionOperations: """DataProtectionOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.dataprotection.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config async def check_feature_support( self, location: str, parameters: "_models.FeatureValidationRequestBase", **kwargs: Any ) -> "_models.FeatureValidationResponseBase": """Validates if a feature is supported. Validates if a feature is supported. :param location: :type location: str :param parameters: Feature support request object. :type parameters: ~azure.mgmt.dataprotection.models.FeatureValidationRequestBase :keyword callable cls: A custom type or function that will be passed the direct response :return: FeatureValidationResponseBase, or the result of cls(response) :rtype: ~azure.mgmt.dataprotection.models.FeatureValidationResponseBase :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FeatureValidationResponseBase"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2021-07-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.check_feature_support.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'location': self._serialize.url("location", location, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'FeatureValidationRequestBase') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('FeatureValidationResponseBase', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized check_feature_support.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataProtection/locations/{location}/checkFeatureSupport'} # type: ignore
Azure/azure-sdk-for-python
sdk/dataprotection/azure-mgmt-dataprotection/azure/mgmt/dataprotection/aio/operations/_data_protection_operations.py
Python
mit
4,996
#!/usr/bin/env python ### # (C) Copyright (2012-2015) Hewlett Packard Enterprise Development LP # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. ### from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import from builtins import range from future import standard_library standard_library.install_aliases() import sys PYTHON_VERSION = sys.version_info[:3] PY2 = (PYTHON_VERSION[0] == 2) if PY2: if PYTHON_VERSION < (2, 7, 9): raise Exception('Must use Python 2.7.9 or later') elif PYTHON_VERSION < (3, 4): raise Exception('Must use Python 3.4 or later') import hpOneView as hpov from pprint import pprint def acceptEULA(con): # See if we need to accept the EULA before we try to log in con.get_eula_status() try: if con.get_eula_status() is True: print('EULA display needed') con.set_eula('no') except Exception as e: print('EXCEPTION:') print(e) def login(con, credential): # Login with givin credentials try: con.login(credential) except: print('Login failed') def getconvol(sto): vols = sto.get_connectable_storage_volume_templates() pprint(vols) def main(): parser = argparse.ArgumentParser(add_help=True, formatter_class=argparse.RawTextHelpFormatter, description=''' Display Connectable Volume Templates Usage: ''') parser.add_argument('-a', dest='host', required=True, help=''' HP OneView Appliance hostname or IP address''') parser.add_argument('-u', dest='user', required=False, default='Administrator', help=''' HP OneView Username''') parser.add_argument('-p', dest='passwd', required=True, help=''' HP OneView Password''') parser.add_argument('-c', dest='cert', required=False, help=''' Trusted SSL Certificate Bundle in PEM (Base64 Encoded DER) Format''') parser.add_argument('-y', dest='proxy', required=False, help=''' Proxy (host:port format''') args = parser.parse_args() credential = {'userName': args.user, 'password': args.passwd} con = hpov.connection(args.host) sto = hpov.storage(con) if args.proxy: con.set_proxy(args.proxy.split(':')[0], args.proxy.split(':')[1]) if args.cert: con.set_trusted_ssl_bundle(args.cert) login(con, credential) acceptEULA(con) getconvol(sto) if __name__ == '__main__': import sys import argparse sys.exit(main()) # vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
miqui/python-hpOneView
examples/scripts/get-connectible-volume-templates.py
Python
mit
3,774
# Copyright (C) 2011 Canonical Ltd. # Copyright (C) 2012 Hewlett-Packard Development Company, L.P. # # Author: Scott Moser <scott.moser@canonical.com> # Author: Juerg Haefliger <juerg.haefliger@hp.com> # # This file is part of cloud-init. See LICENSE file for license information. """ Scripts Per Boot ---------------- **Summary:** run per boot scripts Any scripts in the ``scripts/per-boot`` directory on the datasource will be run every time the system boots. Scripts will be run in alphabetical order. This module does not accept any config keys. **Internal name:** ``cc_scripts_per_boot`` **Module frequency:** per always **Supported distros:** all """ import os from cloudinit import util from cloudinit.settings import PER_ALWAYS frequency = PER_ALWAYS SCRIPT_SUBDIR = 'per-boot' def handle(name, _cfg, cloud, log, _args): # Comes from the following: # https://forums.aws.amazon.com/thread.jspa?threadID=96918 runparts_path = os.path.join(cloud.get_cpath(), 'scripts', SCRIPT_SUBDIR) try: util.runparts(runparts_path) except Exception: log.warning("Failed to run module %s (%s in %s)", name, SCRIPT_SUBDIR, runparts_path) raise # vi: ts=4 expandtab
larsks/cloud-init
cloudinit/config/cc_scripts_per_boot.py
Python
gpl-3.0
1,232
import json import datetime from collections import defaultdict, Counter from django.core.management.base import BaseCommand from rrl import RateLimiter from ...models import UsageReport, Profile class Command(BaseCommand): help = "aggregate usage reports for API keys" def add_arguments(self, parser): parser.add_argument("filenames", nargs="+") def process_log_line(self, line): day = line["timestamp"][:10] if line["event"] == "graphql": endpoint = "graphql" self.count_by_day[day][(line["api_key"], endpoint)] += 1 self.duration_by_day[day][line["api_key"]][endpoint] += line["duration"] self.lines += 1 def process_file(self, filename): with open(filename) as f: for line in f.readlines(): if line.startswith("{"): self.process_log_line(json.loads(line)) def handle(self, *args, **options): # day -> (key, endpoint) -> # self.count_by_day = defaultdict(Counter) self.duration_by_day = defaultdict( lambda: defaultdict(lambda: defaultdict(float)) ) self.lines = 0 for filename in options["filenames"]: self.process_file(filename) print(f"processed {self.lines} lines") # don't use the oldest day, it is probably a partial and will overwrite good data newest_day = "2000-01-01" oldest_day = "2100-01-01" for day in self.count_by_day.keys(): if day > newest_day: newest_day = day if day < oldest_day: oldest_day = day print(f"found logs from {oldest_day} to {newest_day}, dropping {oldest_day}") keys = {key.api_key: key for key in Profile.objects.all()} limiter = RateLimiter( prefix="v3", tiers=[], use_redis_time=False, track_daily_usage=True ) for key in keys: usage = limiter.get_usage_since( key, datetime.date.today() - datetime.timedelta(days=7) ) for daily_usage in usage: UsageReport.objects.update_or_create( profile=keys[key], date=daily_usage.date, endpoint="v3", defaults=dict(calls=daily_usage.calls, total_duration_seconds=0), ) for day, counter in self.count_by_day.items(): # skip oldest day if day == oldest_day: continue # build log-based usage reports for (key, endpoint), calls in counter.items(): duration = self.duration_by_day[day][key][endpoint] # convert key try: profile = keys[key] except KeyError: print(f"unknown key {key} with {calls} calls") continue UsageReport.objects.update_or_create( profile=profile, date=day, endpoint=endpoint, defaults=dict(calls=calls, total_duration_seconds=duration), )
openstates/openstates.org
profiles/management/commands/aggregate_api_usage.py
Python
mit
3,177
import math def digitFactorialSum(n): return sum([math.factorial(int(x)) for x in str(n)]) def repeatedLength(n): repeatedList = [] while n not in repeatedList: repeatedList.append(n) n = digitFactorialSum(n) return len(repeatedList) if __name__ == "__main__": cnt = 0 for i in range(1, 1000000): if repeatedLength(i) == 60: cnt += 1 print cnt
python27/AlgorithmSolution
ProjectEuler/51_100/Problem#74.py
Python
agpl-3.0
411
# stdlibb import time # 3p import mock # project from checks import AgentCheck from tests.checks.common import AgentCheckTest RESULTS_TIMEOUT = 5 CONFIG = { 'instances': [{ 'name': 'conn_error', 'url': 'https://thereisnosuchlink.com', 'check_certificate_expiration': False, 'timeout': 1, }, { 'name': 'http_error_status_code', 'url': 'http://httpbin.org/404', 'check_certificate_expiration': False, 'timeout': 1, }, { 'name': 'status_code_match', 'url': 'http://httpbin.org/404', 'http_response_status_code': '4..', 'check_certificate_expiration': False, 'timeout': 1, 'tags': ["foo:bar"] }, { 'name': 'cnt_mismatch', 'url': 'https://github.com', 'timeout': 1, 'check_certificate_expiration': False, 'content_match': 'thereisnosuchword' }, { 'name': 'cnt_match', 'url': 'https://github.com', 'timeout': 1, 'check_certificate_expiration': False, 'content_match': '(thereisnosuchword|github)' } ] } CONFIG_SSL_ONLY = { 'instances': [{ 'name': 'good_cert', 'url': 'https://github.com', 'timeout': 1, 'check_certificate_expiration': True, 'days_warning': 14 }, { 'name': 'cert_exp_soon', 'url': 'https://github.com', 'timeout': 1, 'check_certificate_expiration': True, 'days_warning': 9999 }, { 'name': 'conn_error', 'url': 'https://thereisnosuchlink.com', 'timeout': 1, 'check_certificate_expiration': True, 'days_warning': 14 } ] } CONFIG_EXPIRED_SSL = { 'instances': [{ 'name': 'expired_cert', 'url': 'https://github.com', 'timeout': 1, 'check_certificate_expiration': True, 'days_warning': 14 }, ] } FAKE_CERT = {'notAfter': 'Apr 12 12:00:00 2006 GMT'} class HTTPCheckTest(AgentCheckTest): CHECK_NAME = 'http_check' def tearDown(self): self.check.stop() def wait_for_async_service_checks(self, count): i = 0 while i < RESULTS_TIMEOUT: self.check._process_results() if len(self.check.service_checks) >= count: return self.check.get_service_checks() time.sleep(1) i += 1 raise Exception("Didn't get the right count of service checks in time {0}" .format(self.check.service_checks)) def test_check(self): self.run_check(CONFIG) # Overrides self.service_checks attribute when values are available\ self.service_checks = self.wait_for_async_service_checks(5) # HTTP connection error tags = ['url:https://thereisnosuchlink.com', 'instance:conn_error'] self.assertServiceCheck("http.can_connect", status=AgentCheck.CRITICAL, tags=tags ) # Wrong HTTP response status code tags = ['url:http://httpbin.org/404', 'instance:http_error_status_code'] self.assertServiceCheck("http.can_connect", status=AgentCheck.CRITICAL, tags=tags) self.assertServiceCheck("http.can_connect", status=AgentCheck.OK, tags=tags, count=0) # HTTP response status code match tags = ['url:http://httpbin.org/404', 'instance:status_code_match', 'foo:bar'] self.assertServiceCheck("http.can_connect", status=AgentCheck.OK, tags=tags) # Content match & mismatching tags = ['url:https://github.com', 'instance:cnt_mismatch'] self.assertServiceCheck("http.can_connect", status=AgentCheck.CRITICAL, tags=tags) self.assertServiceCheck("http.can_connect", status=AgentCheck.OK, tags=tags, count=0) tags = ['url:https://github.com', 'instance:cnt_match'] self.assertServiceCheck("http.can_connect", status=AgentCheck.OK, tags=tags) self.coverage_report() def test_check_ssl(self): self.run_check(CONFIG_SSL_ONLY) # Overrides self.service_checks attribute when values are available self.service_checks = self.wait_for_async_service_checks(6) tags = ['url:https://github.com', 'instance:good_cert'] self.assertServiceCheck("http.ssl_cert", status=AgentCheck.OK, tags=tags) tags = ['url:https://github.com', 'instance:cert_exp_soon'] self.assertServiceCheck("http.ssl_cert", status=AgentCheck.WARNING, tags=tags) tags = ['url:https://thereisnosuchlink.com', 'instance:conn_error'] self.assertServiceCheck("http.ssl_cert", status=AgentCheck.CRITICAL, tags=tags) self.coverage_report() @mock.patch('ssl.SSLSocket.getpeercert', return_value=FAKE_CERT) def test_mock_case(self, getpeercert_func): self.run_check(CONFIG_EXPIRED_SSL) # Overrides self.service_checks attribute when values are av # Needed for the HTTP headers self.service_checks = self.wait_for_async_service_checks(2) tags = ['url:https://github.com', 'instance:expired_cert'] self.assertServiceCheck("http.ssl_cert", status=AgentCheck.CRITICAL, tags=tags) self.coverage_report()
Shopify/dd-agent
tests/checks/integration/test_http_check.py
Python
bsd-3-clause
5,550
# -*- coding: utf-8 -*- __author__ = """Chris Tabor (dxdstudio@gmail.com)""" if __name__ == '__main__': from os import getcwd from os import sys sys.path.append(getcwd()) from MOAL.helpers.text import gibberish3 from MOAL.helpers.display import Section from MOAL.data_structures.hashes.hashtable import NaiveHashTable from pprint import pprint as ppr DEBUG = True if __name__ == '__main__' else False class HashList(NaiveHashTable): """Stores a list of computed hashes for each block of data (specified by offset) for a given string block representing a file-like object.""" def __init__(self, filedata, chunk_size=4): super(HashList, self).__init__() self.hash_list = [] self.compute_hashes(filedata, chunk_size) def __str__(self): ppr(self.hash_list) return '' def hash(self, data): return self.hash_fnv1a(data) def compute_hashes(self, filedata, chunk_size): """Compute a list of hashes for each block of data given by `filedata`. Chunk size will affect performance since each chunk has to be hashed. This is largely dependent on the hashing algorithm used. """ current_offset = 0 while len(filedata) > 0: # Add hashed chunk to the list self.hash_list.append( self.hash(filedata[current_offset:])) current_offset += chunk_size # Continue with substring value filedata = filedata[current_offset:] class MockFile: def __init__(self): self.data = ''.join(map(gibberish3, range(10))) def __str__(self): return self.data if DEBUG: with Section('Hash list'): fakefile = MockFile() hashlist = HashList(fakefile.data, chunk_size=12) print(hashlist)
christabor/MoAL
MOAL/data_structures/hashes/hash_list.py
Python
apache-2.0
1,820
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # isort:skip_file """Unit tests for Superset""" import json import prison from sqlalchemy.sql import func from superset import db, security_manager from superset.connectors.sqla.models import SqlaTable from superset.models.core import Database from superset.utils.core import get_example_database, get_main_database from tests.base_tests import SupersetTestCase from tests.fixtures.certificates import ssl_certificate from tests.test_app import app class TestDatabaseApi(SupersetTestCase): def insert_database( self, database_name: str, sqlalchemy_uri: str, extra: str = "", encrypted_extra: str = "", server_cert: str = "", expose_in_sqllab: bool = False, ) -> Database: database = Database( database_name=database_name, sqlalchemy_uri=sqlalchemy_uri, extra=extra, encrypted_extra=encrypted_extra, server_cert=server_cert, expose_in_sqllab=expose_in_sqllab, ) db.session.add(database) db.session.commit() return database def test_get_items(self): """ Database API: Test get items """ self.login(username="admin") uri = "api/v1/database/" rv = self.client.get(uri) self.assertEqual(rv.status_code, 200) response = json.loads(rv.data.decode("utf-8")) expected_columns = [ "allow_csv_upload", "allow_ctas", "allow_cvas", "allow_dml", "allow_multi_schema_metadata_fetch", "allow_run_async", "allows_cost_estimate", "allows_subquery", "allows_virtual_table_explore", "backend", "changed_on", "changed_on_delta_humanized", "created_by", "database_name", "explore_database_id", "expose_in_sqllab", "force_ctas_schema", "function_names", "id", ] self.assertEqual(response["count"], 2) self.assertEqual(list(response["result"][0].keys()), expected_columns) def test_get_items_filter(self): """ Database API: Test get items with filter """ example_db = get_example_database() test_database = self.insert_database( "test-database", example_db.sqlalchemy_uri_decrypted, expose_in_sqllab=True ) dbs = db.session.query(Database).filter_by(expose_in_sqllab=True).all() self.login(username="admin") arguments = { "keys": ["none"], "filters": [{"col": "expose_in_sqllab", "opr": "eq", "value": True}], "order_columns": "database_name", "order_direction": "asc", "page": 0, "page_size": -1, } uri = f"api/v1/database/?q={prison.dumps(arguments)}" rv = self.client.get(uri) response = json.loads(rv.data.decode("utf-8")) self.assertEqual(rv.status_code, 200) self.assertEqual(response["count"], len(dbs)) # Cleanup db.session.delete(test_database) db.session.commit() def test_get_items_not_allowed(self): """ Database API: Test get items not allowed """ self.login(username="gamma") uri = f"api/v1/database/" rv = self.client.get(uri) self.assertEqual(rv.status_code, 200) response = json.loads(rv.data.decode("utf-8")) self.assertEqual(response["count"], 0) def test_create_database(self): """ Database API: Test create """ extra = { "metadata_params": {}, "engine_params": {}, "metadata_cache_timeout": {}, "schemas_allowed_for_csv_upload": [], } self.login(username="admin") example_db = get_example_database() if example_db.backend == "sqlite": return database_data = { "database_name": "test-database", "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, "server_cert": ssl_certificate, "extra": json.dumps(extra), } uri = "api/v1/database/" rv = self.client.post(uri, json=database_data) response = json.loads(rv.data.decode("utf-8")) self.assertEqual(rv.status_code, 201) # Cleanup model = db.session.query(Database).get(response.get("id")) db.session.delete(model) db.session.commit() def test_create_database_server_cert_validate(self): """ Database API: Test create server cert validation """ example_db = get_example_database() if example_db.backend == "sqlite": return self.login(username="admin") database_data = { "database_name": "test-database", "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, "server_cert": "INVALID CERT", } uri = "api/v1/database/" rv = self.client.post(uri, json=database_data) response = json.loads(rv.data.decode("utf-8")) expected_response = {"message": {"server_cert": ["Invalid certificate"]}} self.assertEqual(rv.status_code, 400) self.assertEqual(response, expected_response) def test_create_database_json_validate(self): """ Database API: Test create encrypted extra and extra validation """ example_db = get_example_database() if example_db.backend == "sqlite": return self.login(username="admin") database_data = { "database_name": "test-database", "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, "encrypted_extra": '{"A": "a", "B", "C"}', "extra": '["A": "a", "B", "C"]', } uri = "api/v1/database/" rv = self.client.post(uri, json=database_data) response = json.loads(rv.data.decode("utf-8")) expected_response = { "message": { "encrypted_extra": [ "Field cannot be decoded by JSON. Expecting ':' " "delimiter: line 1 column 15 (char 14)" ], "extra": [ "Field cannot be decoded by JSON. Expecting ','" " delimiter: line 1 column 5 (char 4)" ], } } self.assertEqual(rv.status_code, 400) self.assertEqual(response, expected_response) def test_create_database_extra_metadata_validate(self): """ Database API: Test create extra metadata_params validation """ example_db = get_example_database() if example_db.backend == "sqlite": return extra = { "metadata_params": {"wrong_param": "some_value"}, "engine_params": {}, "metadata_cache_timeout": {}, "schemas_allowed_for_csv_upload": [], } self.login(username="admin") database_data = { "database_name": "test-database", "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, "extra": json.dumps(extra), } uri = "api/v1/database/" rv = self.client.post(uri, json=database_data) response = json.loads(rv.data.decode("utf-8")) expected_response = { "message": { "extra": [ "The metadata_params in Extra field is not configured correctly." " The key wrong_param is invalid." ] } } self.assertEqual(rv.status_code, 400) self.assertEqual(response, expected_response) def test_create_database_unique_validate(self): """ Database API: Test create database_name already exists """ example_db = get_example_database() if example_db.backend == "sqlite": return self.login(username="admin") database_data = { "database_name": "examples", "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, } uri = "api/v1/database/" rv = self.client.post(uri, json=database_data) response = json.loads(rv.data.decode("utf-8")) expected_response = { "message": {"database_name": "A database with the same name already exists"} } self.assertEqual(rv.status_code, 422) self.assertEqual(response, expected_response) def test_create_database_uri_validate(self): """ Database API: Test create fail validate sqlalchemy uri """ self.login(username="admin") database_data = { "database_name": "test-database", "sqlalchemy_uri": "wrong_uri", } uri = "api/v1/database/" rv = self.client.post(uri, json=database_data) response = json.loads(rv.data.decode("utf-8")) self.assertEqual(rv.status_code, 400) expected_response = { "message": { "sqlalchemy_uri": [ "Invalid connection string, a valid string usually " "follows:'DRIVER://USER:PASSWORD@DB-HOST/DATABASE-NAME'" "<p>Example:'postgresql://user:password@your-postgres-db/database'" "</p>" ] } } self.assertEqual(response, expected_response) def test_create_database_fail_sqllite(self): """ Database API: Test create fail with sqllite """ database_data = { "database_name": "test-database", "sqlalchemy_uri": "sqlite:////some.db", } uri = "api/v1/database/" self.login(username="admin") response = self.client.post(uri, json=database_data) response_data = json.loads(response.data.decode("utf-8")) expected_response = { "message": { "sqlalchemy_uri": [ "SQLite database cannot be used as a data source " "for security reasons." ] } } self.assertEqual(response.status_code, 400) self.assertEqual(response_data, expected_response) def test_create_database_conn_fail(self): """ Database API: Test create fails connection """ example_db = get_example_database() if example_db.backend in ("sqlite", "hive", "presto"): return example_db.password = "wrong_password" database_data = { "database_name": "test-database", "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, } uri = "api/v1/database/" self.login(username="admin") response = self.client.post(uri, json=database_data) response_data = json.loads(response.data.decode("utf-8")) expected_response = {"message": "Could not connect to database."} self.assertEqual(response.status_code, 422) self.assertEqual(response_data, expected_response) def test_update_database(self): """ Database API: Test update """ example_db = get_example_database() test_database = self.insert_database( "test-database", example_db.sqlalchemy_uri_decrypted ) self.login(username="admin") database_data = {"database_name": "test-database-updated"} uri = f"api/v1/database/{test_database.id}" rv = self.client.put(uri, json=database_data) self.assertEqual(rv.status_code, 200) # Cleanup model = db.session.query(Database).get(test_database.id) db.session.delete(model) db.session.commit() def test_update_database_conn_fail(self): """ Database API: Test update fails connection """ example_db = get_example_database() if example_db.backend in ("sqlite", "hive", "presto"): return test_database = self.insert_database( "test-database1", example_db.sqlalchemy_uri_decrypted ) example_db.password = "wrong_password" database_data = { "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, } uri = f"api/v1/database/{test_database.id}" self.login(username="admin") rv = self.client.put(uri, json=database_data) response = json.loads(rv.data.decode("utf-8")) expected_response = {"message": "Could not connect to database."} self.assertEqual(rv.status_code, 422) self.assertEqual(response, expected_response) # Cleanup model = db.session.query(Database).get(test_database.id) db.session.delete(model) db.session.commit() def test_update_database_uniqueness(self): """ Database API: Test update uniqueness """ example_db = get_example_database() test_database1 = self.insert_database( "test-database1", example_db.sqlalchemy_uri_decrypted ) test_database2 = self.insert_database( "test-database2", example_db.sqlalchemy_uri_decrypted ) self.login(username="admin") database_data = {"database_name": "test-database2"} uri = f"api/v1/database/{test_database1.id}" rv = self.client.put(uri, json=database_data) response = json.loads(rv.data.decode("utf-8")) expected_response = { "message": {"database_name": "A database with the same name already exists"} } self.assertEqual(rv.status_code, 422) self.assertEqual(response, expected_response) # Cleanup db.session.delete(test_database1) db.session.delete(test_database2) db.session.commit() def test_update_database_invalid(self): """ Database API: Test update invalid request """ self.login(username="admin") database_data = {"database_name": "test-database-updated"} uri = f"api/v1/database/invalid" rv = self.client.put(uri, json=database_data) self.assertEqual(rv.status_code, 404) def test_update_database_uri_validate(self): """ Database API: Test update sqlalchemy_uri validate """ example_db = get_example_database() test_database = self.insert_database( "test-database", example_db.sqlalchemy_uri_decrypted ) self.login(username="admin") database_data = { "database_name": "test-database-updated", "sqlalchemy_uri": "wrong_uri", } uri = f"api/v1/database/{test_database.id}" rv = self.client.put(uri, json=database_data) response = json.loads(rv.data.decode("utf-8")) self.assertEqual(rv.status_code, 400) expected_response = { "message": { "sqlalchemy_uri": [ "Invalid connection string, a valid string usually " "follows:'DRIVER://USER:PASSWORD@DB-HOST/DATABASE-NAME'" "<p>Example:'postgresql://user:password@your-postgres-db/database'" "</p>" ] } } self.assertEqual(response, expected_response) def test_delete_database(self): """ Database API: Test delete """ database_id = self.insert_database("test-database", "test_uri").id self.login(username="admin") uri = f"api/v1/database/{database_id}" rv = self.delete_assert_metric(uri, "delete") self.assertEqual(rv.status_code, 200) model = db.session.query(Database).get(database_id) self.assertEqual(model, None) def test_delete_database_not_found(self): """ Database API: Test delete not found """ max_id = db.session.query(func.max(Database.id)).scalar() self.login(username="admin") uri = f"api/v1/database/{max_id + 1}" rv = self.delete_assert_metric(uri, "delete") self.assertEqual(rv.status_code, 404) def test_delete_database_with_datasets(self): """ Database API: Test delete fails because it has depending datasets """ database_id = ( db.session.query(Database).filter_by(database_name="examples").one() ).id self.login(username="admin") uri = f"api/v1/database/{database_id}" rv = self.delete_assert_metric(uri, "delete") self.assertEqual(rv.status_code, 422) def test_get_table_metadata(self): """ Database API: Test get table metadata info """ example_db = get_example_database() self.login(username="admin") uri = f"api/v1/database/{example_db.id}/table/birth_names/null/" rv = self.client.get(uri) self.assertEqual(rv.status_code, 200) response = json.loads(rv.data.decode("utf-8")) self.assertEqual(response["name"], "birth_names") self.assertIsNone(response["comment"]) self.assertTrue(len(response["columns"]) > 5) self.assertTrue(response.get("selectStar").startswith("SELECT")) def test_get_invalid_database_table_metadata(self): """ Database API: Test get invalid database from table metadata """ database_id = 1000 self.login(username="admin") uri = f"api/v1/database/{database_id}/table/some_table/some_schema/" rv = self.client.get(uri) self.assertEqual(rv.status_code, 404) uri = f"api/v1/database/some_database/table/some_table/some_schema/" rv = self.client.get(uri) self.assertEqual(rv.status_code, 404) def test_get_invalid_table_table_metadata(self): """ Database API: Test get invalid table from table metadata """ example_db = get_example_database() uri = f"api/v1/database/{example_db.id}/wrong_table/null/" self.login(username="admin") rv = self.client.get(uri) self.assertEqual(rv.status_code, 404) def test_get_table_metadata_no_db_permission(self): """ Database API: Test get table metadata from not permitted db """ self.login(username="gamma") example_db = get_example_database() uri = f"api/v1/database/{example_db.id}/birth_names/null/" rv = self.client.get(uri) self.assertEqual(rv.status_code, 404) def test_get_select_star(self): """ Database API: Test get select star """ self.login(username="admin") example_db = get_example_database() uri = f"api/v1/database/{example_db.id}/select_star/birth_names/" rv = self.client.get(uri) self.assertEqual(rv.status_code, 200) response = json.loads(rv.data.decode("utf-8")) self.assertIn("gender", response["result"]) def test_get_select_star_not_allowed(self): """ Database API: Test get select star not allowed """ self.login(username="gamma") example_db = get_example_database() uri = f"api/v1/database/{example_db.id}/select_star/birth_names/" rv = self.client.get(uri) self.assertEqual(rv.status_code, 404) def test_get_select_star_datasource_access(self): """ Database API: Test get select star with datasource access """ session = db.session table = SqlaTable( schema="main", table_name="ab_permission", database=get_main_database() ) session.add(table) session.commit() tmp_table_perm = security_manager.find_permission_view_menu( "datasource_access", table.get_perm() ) gamma_role = security_manager.find_role("Gamma") security_manager.add_permission_role(gamma_role, tmp_table_perm) self.login(username="gamma") main_db = get_main_database() uri = f"api/v1/database/{main_db.id}/select_star/ab_permission/" rv = self.client.get(uri) self.assertEqual(rv.status_code, 200) # rollback changes security_manager.del_permission_role(gamma_role, tmp_table_perm) db.session.delete(table) db.session.delete(main_db) db.session.commit() def test_get_select_star_not_found_database(self): """ Database API: Test get select star not found database """ self.login(username="admin") max_id = db.session.query(func.max(Database.id)).scalar() uri = f"api/v1/database/{max_id + 1}/select_star/birth_names/" rv = self.client.get(uri) self.assertEqual(rv.status_code, 404) def test_get_select_star_not_found_table(self): """ Database API: Test get select star not found database """ self.login(username="admin") example_db = get_example_database() # sqllite will not raise a NoSuchTableError if example_db.backend == "sqlite": return uri = f"api/v1/database/{example_db.id}/select_star/table_does_not_exist/" rv = self.client.get(uri) # TODO(bkyryliuk): investigate why presto returns 500 self.assertEqual(rv.status_code, 404 if example_db.backend != "presto" else 500) def test_database_schemas(self): """ Database API: Test database schemas """ self.login("admin") database = db.session.query(Database).first() schemas = database.get_all_schema_names() rv = self.client.get(f"api/v1/database/{database.id}/schemas/") response = json.loads(rv.data.decode("utf-8")) self.assertEqual(schemas, response["result"]) rv = self.client.get( f"api/v1/database/{database.id}/schemas/?q={prison.dumps({'force': True})}" ) response = json.loads(rv.data.decode("utf-8")) self.assertEqual(schemas, response["result"]) def test_database_schemas_not_found(self): """ Database API: Test database schemas not found """ self.logout() self.login(username="gamma") example_db = get_example_database() uri = f"api/v1/database/{example_db.id}/schemas/" rv = self.client.get(uri) self.assertEqual(rv.status_code, 404) def test_database_schemas_invalid_query(self): """ Database API: Test database schemas with invalid query """ self.login("admin") database = db.session.query(Database).first() rv = self.client.get( f"api/v1/database/{database.id}/schemas/?q={prison.dumps({'force': 'nop'})}" ) self.assertEqual(rv.status_code, 400) def test_test_connection(self): """ Database API: Test test connection """ extra = { "metadata_params": {}, "engine_params": {}, "metadata_cache_timeout": {}, "schemas_allowed_for_csv_upload": [], } # need to temporarily allow sqlite dbs, teardown will undo this app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = False self.login("admin") example_db = get_example_database() # validate that the endpoint works with the password-masked sqlalchemy uri data = { "database_name": "examples", "encrypted_extra": "{}", "extra": json.dumps(extra), "impersonate_user": False, "sqlalchemy_uri": example_db.safe_sqlalchemy_uri(), "server_cert": ssl_certificate, } url = f"api/v1/database/test_connection" rv = self.post_assert_metric(url, data, "test_connection") self.assertEqual(rv.status_code, 200) self.assertEqual(rv.headers["Content-Type"], "application/json; charset=utf-8") # validate that the endpoint works with the decrypted sqlalchemy uri data = { "sqlalchemy_uri": example_db.sqlalchemy_uri_decrypted, "database_name": "examples", "impersonate_user": False, "extra": json.dumps(extra), "server_cert": None, } rv = self.post_assert_metric(url, data, "test_connection") self.assertEqual(rv.status_code, 200) self.assertEqual(rv.headers["Content-Type"], "application/json; charset=utf-8") def test_test_connection_failed(self): """ Database API: Test test connection failed """ self.login("admin") data = { "sqlalchemy_uri": "broken://url", "database_name": "examples", "impersonate_user": False, "server_cert": None, } url = f"api/v1/database/test_connection" rv = self.post_assert_metric(url, data, "test_connection") self.assertEqual(rv.status_code, 400) self.assertEqual(rv.headers["Content-Type"], "application/json; charset=utf-8") response = json.loads(rv.data.decode("utf-8")) expected_response = { "driver_name": "broken", "message": "Could not load database driver: broken", } self.assertEqual(response, expected_response) data = { "sqlalchemy_uri": "mssql+pymssql://url", "database_name": "examples", "impersonate_user": False, "server_cert": None, } rv = self.post_assert_metric(url, data, "test_connection") self.assertEqual(rv.status_code, 400) self.assertEqual(rv.headers["Content-Type"], "application/json; charset=utf-8") response = json.loads(rv.data.decode("utf-8")) expected_response = { "driver_name": "mssql+pymssql", "message": "Could not load database driver: mssql+pymssql", } self.assertEqual(response, expected_response) def test_test_connection_unsafe_uri(self): """ Database API: Test test connection with unsafe uri """ self.login("admin") app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = True data = { "sqlalchemy_uri": "sqlite:///home/superset/unsafe.db", "database_name": "unsafe", "impersonate_user": False, "server_cert": None, } url = f"api/v1/database/test_connection" rv = self.post_assert_metric(url, data, "test_connection") self.assertEqual(rv.status_code, 400) response = json.loads(rv.data.decode("utf-8")) expected_response = { "message": { "sqlalchemy_uri": [ "SQLite database cannot be used as a data source for security reasons." ] } } self.assertEqual(response, expected_response) def test_get_database_related_objects(self): """ Database API: Test get chart and dashboard count related to a database :return: """ self.login(username="admin") database = get_example_database() uri = f"api/v1/database/{database.id}/related_objects/" rv = self.get_assert_metric(uri, "related_objects") self.assertEqual(rv.status_code, 200) response = json.loads(rv.data.decode("utf-8")) self.assertEqual(response["charts"]["count"], 33) self.assertEqual(response["dashboards"]["count"], 6) def test_get_database_related_objects_not_found(self): """ Database API: Test related objects not found """ max_id = db.session.query(func.max(Database.id)).scalar() # id does not exist and we get 404 invalid_id = max_id + 1 uri = f"api/v1/database/{invalid_id}/related_objects/" self.login(username="admin") rv = self.client.get(uri) self.assertEqual(rv.status_code, 404) self.logout() self.login(username="gamma") database = get_example_database() uri = f"api/v1/database/{database.id}/related_objects/" rv = self.client.get(uri) self.assertEqual(rv.status_code, 404)
airbnb/superset
tests/databases/api_tests.py
Python
apache-2.0
28,975
import StringIO from pygments.formatters import HtmlFormatter from pygments import highlight from kallithea.lib.vcs.exceptions import VCSError from kallithea.lib.vcs.nodes import FileNode def annotate_highlight(filenode, annotate_from_changeset_func=None, order=None, headers=None, **options): """ Returns html portion containing annotated table with 3 columns: line numbers, changeset information and pygmentized line of code. :param filenode: FileNode object :param annotate_from_changeset_func: function taking changeset and returning single annotate cell; needs break line at the end :param order: ordered sequence of ``ls`` (line numbers column), ``annotate`` (annotate column), ``code`` (code column); Default is ``['ls', 'annotate', 'code']`` :param headers: dictionary with headers (keys are whats in ``order`` parameter) """ options['linenos'] = True formatter = AnnotateHtmlFormatter(filenode=filenode, order=order, headers=headers, annotate_from_changeset_func=annotate_from_changeset_func, **options) lexer = filenode.lexer highlighted = highlight(filenode.content, lexer, formatter) return highlighted class AnnotateHtmlFormatter(HtmlFormatter): def __init__(self, filenode, annotate_from_changeset_func=None, order=None, **options): """ If ``annotate_from_changeset_func`` is passed it should be a function which returns string from the given changeset. For example, we may pass following function as ``annotate_from_changeset_func``:: def changeset_to_anchor(changeset): return '<a href="/changesets/%s/">%s</a>\n' %\ (changeset.id, changeset.id) :param annotate_from_changeset_func: see above :param order: (default: ``['ls', 'annotate', 'code']``); order of columns; :param options: standard pygment's HtmlFormatter options, there is extra option tough, ``headers``. For instance we can pass:: formatter = AnnotateHtmlFormatter(filenode, headers={ 'ls': '#', 'annotate': 'Annotate', 'code': 'Code', }) """ super(AnnotateHtmlFormatter, self).__init__(**options) self.annotate_from_changeset_func = annotate_from_changeset_func self.order = order or ('ls', 'annotate', 'code') headers = options.pop('headers', None) if headers and not ('ls' in headers and 'annotate' in headers and 'code' in headers): raise ValueError("If headers option dict is specified it must " "all 'ls', 'annotate' and 'code' keys") self.headers = headers if isinstance(filenode, FileNode): self.filenode = filenode else: raise VCSError("This formatter expect FileNode parameter, not %r" % type(filenode)) def annotate_from_changeset(self, changeset): """ Returns full html line for single changeset per annotated line. """ if self.annotate_from_changeset_func: return self.annotate_from_changeset_func(changeset) else: return ''.join((changeset.id, '\n')) def _wrap_tablelinenos(self, inner): dummyoutfile = StringIO.StringIO() lncount = 0 for t, line in inner: if t: lncount += 1 dummyoutfile.write(line) fl = self.linenostart mw = len(str(lncount + fl - 1)) sp = self.linenospecial st = self.linenostep la = self.lineanchors aln = self.anchorlinenos if sp: lines = [] for i in range(fl, fl + lncount): if i % st == 0: if i % sp == 0: if aln: lines.append('<a href="#%s-%d" class="special">' '%*d</a>' % (la, i, mw, i)) else: lines.append('<span class="special">' '%*d</span>' % (mw, i)) else: if aln: lines.append('<a href="#%s-%d">' '%*d</a>' % (la, i, mw, i)) else: lines.append('%*d' % (mw, i)) else: lines.append('') ls = '\n'.join(lines) else: lines = [] for i in range(fl, fl + lncount): if i % st == 0: if aln: lines.append('<a href="#%s-%d">%*d</a>' \ % (la, i, mw, i)) else: lines.append('%*d' % (mw, i)) else: lines.append('') ls = '\n'.join(lines) annotate_changesets = [tup[1] for tup in self.filenode.annotate] # If pygments cropped last lines break we need do that too ln_cs = len(annotate_changesets) ln_ = len(ls.splitlines()) if ln_cs > ln_: annotate_changesets = annotate_changesets[:ln_ - ln_cs] annotate = ''.join((self.annotate_from_changeset(changeset) for changeset in annotate_changesets)) # in case you wonder about the seemingly redundant <div> here: # since the content in the other cell also is wrapped in a div, # some browsers in some configurations seem to mess up the formatting. ''' yield 0, ('<table class="%stable">' % self.cssclass + '<tr><td class="linenos"><div class="linenodiv"><pre>' + ls + '</pre></div></td>' + '<td class="code">') yield 0, dummyoutfile.getvalue() yield 0, '</td></tr></table>' ''' headers_row = [] if self.headers: headers_row = ['<tr class="annotate-header">'] for key in self.order: td = ''.join(('<td>', self.headers[key], '</td>')) headers_row.append(td) headers_row.append('</tr>') body_row_start = ['<tr>'] for key in self.order: if key == 'ls': body_row_start.append( '<td class="linenos"><div class="linenodiv"><pre>' + ls + '</pre></div></td>') elif key == 'annotate': body_row_start.append( '<td class="annotate"><div class="annotatediv"><pre>' + annotate + '</pre></div></td>') elif key == 'code': body_row_start.append('<td class="code">') yield 0, ('<table class="%stable">' % self.cssclass + ''.join(headers_row) + ''.join(body_row_start) ) yield 0, dummyoutfile.getvalue() yield 0, '</td></tr></table>'
zhumengyuan/kallithea
kallithea/lib/vcs/utils/annotate.py
Python
gpl-3.0
7,104
"""Weather information for air and road temperature (by Trafikverket).""" import asyncio from datetime import timedelta import logging import aiohttp from pytrafikverket.trafikverket_weather import TrafikverketWeather import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( ATTR_ATTRIBUTION, CONF_API_KEY, CONF_MONITORED_CONDITIONS, CONF_NAME, DEGREE, DEVICE_CLASS_HUMIDITY, DEVICE_CLASS_TEMPERATURE, SPEED_METERS_PER_SECOND, TEMP_CELSIUS, UNIT_PERCENTAGE, ) from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle _LOGGER = logging.getLogger(__name__) ATTRIBUTION = "Data provided by Trafikverket" ATTR_MEASURE_TIME = "measure_time" ATTR_ACTIVE = "active" CONF_STATION = "station" MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=10) SCAN_INTERVAL = timedelta(seconds=300) SENSOR_TYPES = { "air_temp": [ "Air temperature", TEMP_CELSIUS, "air_temp", "mdi:thermometer", DEVICE_CLASS_TEMPERATURE, ], "road_temp": [ "Road temperature", TEMP_CELSIUS, "road_temp", "mdi:thermometer", DEVICE_CLASS_TEMPERATURE, ], "precipitation": [ "Precipitation type", None, "precipitationtype", "mdi:weather-snowy-rainy", None, ], "wind_direction": [ "Wind direction", DEGREE, "winddirection", "mdi:flag-triangle", None, ], "wind_direction_text": [ "Wind direction text", None, "winddirectiontext", "mdi:flag-triangle", None, ], "wind_speed": [ "Wind speed", SPEED_METERS_PER_SECOND, "windforce", "mdi:weather-windy", None, ], "wind_speed_max": [ "Wind speed max", SPEED_METERS_PER_SECOND, "windforcemax", "mdi:weather-windy-variant", None, ], "humidity": [ "Humidity", UNIT_PERCENTAGE, "humidity", "mdi:water-percent", DEVICE_CLASS_HUMIDITY, ], "precipitation_amount": [ "Precipitation amount", "mm", "precipitation_amount", "mdi:cup-water", None, ], "precipitation_amountname": [ "Precipitation name", None, "precipitation_amountname", "mdi:weather-pouring", None, ], } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_NAME): cv.string, vol.Required(CONF_API_KEY): cv.string, vol.Required(CONF_STATION): cv.string, vol.Required(CONF_MONITORED_CONDITIONS, default=[]): [vol.In(SENSOR_TYPES)], } ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Trafikverket sensor platform.""" sensor_name = config[CONF_NAME] sensor_api = config[CONF_API_KEY] sensor_station = config[CONF_STATION] web_session = async_get_clientsession(hass) weather_api = TrafikverketWeather(web_session, sensor_api) dev = [] for condition in config[CONF_MONITORED_CONDITIONS]: dev.append( TrafikverketWeatherStation( weather_api, sensor_name, condition, sensor_station ) ) if dev: async_add_entities(dev, True) class TrafikverketWeatherStation(Entity): """Representation of a Trafikverket sensor.""" def __init__(self, weather_api, name, sensor_type, sensor_station): """Initialize the sensor.""" self._client = name self._name = SENSOR_TYPES[sensor_type][0] self._type = sensor_type self._state = None self._unit = SENSOR_TYPES[sensor_type][1] self._station = sensor_station self._weather_api = weather_api self._icon = SENSOR_TYPES[sensor_type][3] self._device_class = SENSOR_TYPES[sensor_type][4] self._weather = None @property def name(self): """Return the name of the sensor.""" return f"{self._client} {self._name}" @property def icon(self): """Icon to use in the frontend.""" return self._icon @property def device_state_attributes(self): """Return the state attributes of Trafikverket Weatherstation.""" return { ATTR_ATTRIBUTION: ATTRIBUTION, ATTR_ACTIVE: self._weather.active, ATTR_MEASURE_TIME: self._weather.measure_time, } @property def device_class(self): """Return the device class of the sensor.""" return self._device_class @property def state(self): """Return the state of the device.""" return self._state @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return self._unit @Throttle(MIN_TIME_BETWEEN_UPDATES) async def async_update(self): """Get the latest data from Trafikverket and updates the states.""" try: self._weather = await self._weather_api.async_get_weather(self._station) self._state = getattr(self._weather, SENSOR_TYPES[self._type][2]) except (asyncio.TimeoutError, aiohttp.ClientError, ValueError) as error: _LOGGER.error("Could not fetch weather data: %s", error)
titilambert/home-assistant
homeassistant/components/trafikverket_weatherstation/sensor.py
Python
apache-2.0
5,526
#!/usr/bin/env python # encoding: utf-8 # # Copyright (c) 2017 Dean Jackson <deanishe@deanishe.net> # # MIT Licence. See http://opensource.org/licenses/MIT # # Created on 2017-12-10 # """Parse an OpenSearch for search and autosuggest URLs.""" from __future__ import print_function, absolute_import import re from urlparse import urljoin, urlparse from xml.etree import ElementTree as ET from bs4 import BeautifulSoup as BS from workflow import web from searchio import util log = util.logger(__name__) NS = { 'os': 'http://a9.com/-/spec/opensearch/1.1/', 'moz': 'http://www.mozilla.org/2006/browser/search/', } class OpenSearchError(Exception): """Base exception.""" class NotFound(OpenSearchError): """No OpenSearch found.""" class Invalid(OpenSearchError): """Missing required attributes.""" class NoAutoSuggest(Invalid): """Doesn't support autosuggestions.""" class OpenSearch(object): """OpenSearch parameters.""" def __init__(self): self.name = None self.description = None self.suggest_url = None self.search_url = None self.icon_url = None self.uid = None self.jsonpath = '$[1][*]' def validate(self): if not self.name: raise Invalid('missing "name" attribute') if not self.search_url: raise Invalid('missing "search_url" attribute') if not self.suggest_url: raise NoAutoSuggest() def __unicode__(self): return (u'OpenSearch({o.name}) search={o.search_url}, ' 'suggest={o.suggest_url}'.format(o=self)) def __str__(self): return unicode(self).encode('utf-8') def __repr__(self): return str(self) def _parse_html(s, baseurl): """Extract OpenSearch link and icon from HTML.""" # TODO: find an icon, e.g. apple-touch-icon defurl = iconurl = None matchsize = re.compile(r'(\d+)x.*').match soup = BS(s, 'html.parser') link = soup.find('link', type='application/opensearchdescription+xml') if not link: return None, None defurl = urljoin(baseurl, link['href']) log.debug('[opensearch] definition URL: %s', defurl) # Find icon icons = [] for elem in soup.find_all('link', rel='apple-touch-icon'): size = elem.get('sizes') or '0x0' m = matchsize(size) if m: size = int(m.group(1)) else: size = 0 url = elem['href'] icons.append((size, url)) if icons: # choose largest icon icons.sort() iconurl = urljoin(baseurl, icons[-1][1]) return defurl, iconurl def _parse_definition(s): """Parse an OpenSearch definition.""" search = OpenSearch() root = ET.fromstring(s.encode('utf-8')) def tag2attrib(tag, attrib): elem = root.find(tag, NS) if elem is not None: setattr(search, attrib, elem.text.strip()) tag2attrib('os:ShortName', 'name') tag2attrib('os:Description', 'description') for elem in root.findall('os:Url', NS): t = elem.get('type') tpl = elem.get('template') if not tpl: log.warning('[opensearch] Url has no template') continue if t == 'text/html': search.search_url = tpl.replace('{searchTerms}', '{query}') if t == 'application/x-suggestions+json': search.suggest_url = tpl.replace('{searchTerms}', '{query}') log.debug('[opensearch] %s', search) return search def _is_xml(s): """Return ``True`` if string is an XML document.""" return s.lower().strip().startswith('<?xml ') def _url2uid(url): """Generate a UID for a search based on its URL.""" p = urlparse(url) return 'opensearch-' + p.netloc.replace(':', '-') def parse(url): """Parse a URL for OpenSearch specification.""" log.info('[opensearch] fetching "%s" ...', url) defurl = iconurl = None # Fetch and parse URL r = web.get(url) r.raise_for_status() s = r.text if not _is_xml(s): # find URL of OpenSearch definition defurl, iconurl = _parse_html(s, url) if not defurl: log.error('[opensearch] no OpenSearch link found') raise NotFound(url) r = web.get(defurl) r.raise_for_status() s = r.text # Parse OpenSearch definition search = _parse_definition(s) search.validate() search.uid = _url2uid(url) search.icon_url = iconurl return search
deanishe/alfred-searchio
src/lib/searchio/opensearch.py
Python
mit
4,499
#!/usr/bin/env python3 # Copyright (c) 2016-2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the dumpwallet RPC.""" import os from test_framework.test_framework import PivxTestFramework from test_framework.util import (assert_equal, assert_raises_rpc_error) def read_dump(file_name, addrs, hd_master_addr_old): """ Read the given dump, count the addrs that match, count change and reserve. Also check that the old hd_master is inactive """ with open(file_name, encoding='utf8') as inputfile: found_addr = 0 found_addr_chg = 0 found_addr_rsv = 0 hd_master_addr_ret = None for line in inputfile: # only read non comment lines if line[0] != "#" and len(line) > 10: # split out some data key_label, comment = line.split("#") # key = key_label.split(" ")[0] keytype = key_label.split(" ")[2] addr = comment.split(" addr=")[1].strip() # count key types if addr in addrs: found_addr += 1 elif keytype == "change=1": found_addr_chg += 1 elif keytype == "reserve=1": found_addr_rsv += 1 return found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_ret class WalletDumpTest(PivxTestFramework): def set_test_params(self): self.num_nodes = 1 self.extra_args = [["-keypool=90"]] def setup_network(self, split=False): # Use 1 minute timeout because the initial getnewaddress RPC can take # longer than the default 30 seconds due to an expensive # CWallet::TopUpKeyPool call, and the encryptwallet RPC made later in # the test often takes even longer. self.add_nodes(self.num_nodes, self.extra_args, timewait=60) self.start_nodes() def run_test (self): tmpdir = self.options.tmpdir # generate 20 addresses to compare against the dump test_addr_count = 20 addrs = [] for i in range(0,test_addr_count): addr = self.nodes[0].getnewaddress() #vaddr= self.nodes[0].validateaddress(addr) #required to get hd keypath addrs.append(addr) # Should be a no-op: self.nodes[0].keypoolrefill() # dump unencrypted wallet result = self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.unencrypted.dump") assert_equal(result['filename'], os.path.abspath(tmpdir + "/node0/wallet.unencrypted.dump")) found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_unenc = \ read_dump(tmpdir + "/node0/wallet.unencrypted.dump", addrs, None) assert_equal(found_addr, test_addr_count) # all keys must be in the dump assert_equal(found_addr_chg, 0) # 0 blocks where mined assert_equal(found_addr_rsv, 90 + 1) # keypool size (TODO: fix off-by-one) #encrypt wallet, restart, unlock and dump self.nodes[0].node_encrypt_wallet('test') self.start_node(0) self.nodes[0].walletpassphrase('test', 10) # Should be a no-op: self.nodes[0].keypoolrefill() self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.encrypted.dump") found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_enc = \ read_dump(tmpdir + "/node0/wallet.encrypted.dump", addrs, hd_master_addr_unenc) assert_equal(found_addr, test_addr_count) assert_equal(found_addr_chg, 90 + 1) # old reserve keys are marked as change now assert_equal(found_addr_rsv, 90 + 1) # keypool size (TODO: fix off-by-one) if __name__ == '__main__': WalletDumpTest().main ()
Mrs-X/PIVX
test/functional/wallet_dump.py
Python
mit
3,849
import pytest import pytz from datetime import date, datetime, timedelta, time import icalendar from khal.khalendar import backend from khal.khalendar.event import LocalizedEvent from khal.khalendar.exceptions import OutdatedDbVersionError, UpdateFailed from .aux import _get_text BERLIN = pytz.timezone('Europe/Berlin') LONDON = pytz.timezone('Europe/London') SAMOA = pytz.timezone('Pacific/Samoa') SYDNEY = pytz.timezone('Australia/Sydney') LOCALE_BERLIN = {'local_timezone': BERLIN, 'default_timezone': BERLIN} LOCALE_SAMOA = {'local_timezone': SAMOA, 'default_timezone': SAMOA} LOCALE_SYDNEY = {'local_timezone': SYDNEY, 'default_timezone': SYDNEY} calname = 'home' def test_new_db_version(): dbi = backend.SQLiteDb(calname, ':memory:', locale=LOCALE_BERLIN) backend.DB_VERSION += 1 with pytest.raises(OutdatedDbVersionError): dbi._check_table_version() def test_event_rrule_recurrence_id(): dbi = backend.SQLiteDb([calname], ':memory:', locale=LOCALE_BERLIN) assert dbi.list(calname) == list() events = dbi.get_localized(BERLIN.localize(datetime(2014, 6, 30, 0, 0)), BERLIN.localize(datetime(2014, 8, 26, 0, 0))) assert list(events) == list() dbi.update(_get_text('event_rrule_recuid'), href='12345.ics', etag='abcd', calendar=calname) assert dbi.list(calname) == [('12345.ics', 'abcd')] events = dbi.get_localized(BERLIN.localize(datetime(2014, 6, 30, 0, 0)), BERLIN.localize(datetime(2014, 8, 26, 0, 0))) events = sorted(events, key=lambda x: x.start) assert len(events) == 6 assert events[0].start == BERLIN.localize(datetime(2014, 6, 30, 7, 0)) assert events[1].start == BERLIN.localize(datetime(2014, 7, 7, 9, 0)) assert events[2].start == BERLIN.localize(datetime(2014, 7, 14, 7, 0)) assert events[3].start == BERLIN.localize(datetime(2014, 7, 21, 7, 0)) assert events[4].start == BERLIN.localize(datetime(2014, 7, 28, 7, 0)) assert events[5].start == BERLIN.localize(datetime(2014, 8, 4, 7, 0)) def test_event_different_timezones(): dbi = backend.SQLiteDb([calname], ':memory:', locale=LOCALE_BERLIN) dbi.update(_get_text('event_dt_london'), href='12345.ics', etag='abcd', calendar=calname) events = dbi.get_localized(BERLIN.localize(datetime(2014, 4, 9, 0, 0)), BERLIN.localize(datetime(2014, 4, 9, 23, 59))) events = list(events) assert len(events) == 1 event = events[0] assert event.start_local == LONDON.localize(datetime(2014, 4, 9, 14)) assert event.end_local == LONDON.localize(datetime(2014, 4, 9, 19)) # no event scheduled on the next day events = dbi.get_localized(BERLIN.localize(datetime(2014, 4, 10, 0, 0)), BERLIN.localize(datetime(2014, 4, 10, 23, 59))) events = list(events) assert len(events) == 0 # now setting the local_timezone to Sydney dbi.locale = LOCALE_SYDNEY events = dbi.get_localized(SYDNEY.localize(datetime(2014, 4, 9, 0, 0)), SYDNEY.localize(datetime(2014, 4, 9, 23, 59))) events = list(events) assert len(events) == 1 event = events[0] assert event.start_local == SYDNEY.localize(datetime(2014, 4, 9, 23)) assert event.end_local == SYDNEY.localize(datetime(2014, 4, 10, 4)) # the event spans midnight Sydney, therefor it should also show up on the # next day events = dbi.get_localized(SYDNEY.localize(datetime(2014, 4, 10, 0, 0)), SYDNEY.localize(datetime(2014, 4, 10, 23, 59))) events = list(events) assert len(events) == 1 assert event.start_local == SYDNEY.localize(datetime(2014, 4, 9, 23)) assert event.end_local == SYDNEY.localize(datetime(2014, 4, 10, 4)) def test_event_rrule_recurrence_id_invalid_tzid(): dbi = backend.SQLiteDb([calname], ':memory:', locale=LOCALE_BERLIN) dbi.update(_get_text('event_rrule_recuid_invalid_tzid'), href='12345.ics', etag='abcd', calendar=calname) events = dbi.get_localized(BERLIN.localize(datetime(2014, 4, 30, 0, 0)), BERLIN.localize(datetime(2014, 9, 26, 0, 0))) events = sorted(events) assert len(events) == 6 assert events[0].start == BERLIN.localize(datetime(2014, 6, 30, 7, 0)) assert events[1].start == BERLIN.localize(datetime(2014, 7, 7, 9, 0)) assert events[2].start == BERLIN.localize(datetime(2014, 7, 14, 7, 0)) assert events[3].start == BERLIN.localize(datetime(2014, 7, 21, 7, 0)) assert events[4].start == BERLIN.localize(datetime(2014, 7, 28, 7, 0)) assert events[5].start == BERLIN.localize(datetime(2014, 8, 4, 7, 0)) event_rrule_recurrence_id_reverse = """ BEGIN:VCALENDAR BEGIN:VEVENT UID:event_rrule_recurrence_id SUMMARY:Arbeit RECURRENCE-ID:20140707T050000Z DTSTART;TZID=Europe/Berlin:20140707T090000 DTEND;TZID=Europe/Berlin:20140707T140000 END:VEVENT BEGIN:VEVENT UID:event_rrule_recurrence_id SUMMARY:Arbeit RRULE:FREQ=WEEKLY;COUNT=6 DTSTART;TZID=Europe/Berlin:20140630T070000 DTEND;TZID=Europe/Berlin:20140630T120000 END:VEVENT END:VCALENDAR """ def test_event_rrule_recurrence_id_reverse(): """as icalendar elements can be saved in arbitrary order, we also have to deal with `reverse` ordered icalendar files """ dbi = backend.SQLiteDb([calname], ':memory:', locale=LOCALE_BERLIN) assert dbi.list(calname) == list() events = dbi.get_localized(BERLIN.localize(datetime(2014, 6, 30, 0, 0)), BERLIN.localize(datetime(2014, 8, 26, 0, 0))) assert list(events) == list() dbi.update(event_rrule_recurrence_id_reverse, href='12345.ics', etag='abcd', calendar=calname) assert dbi.list(calname) == [('12345.ics', 'abcd')] events = dbi.get_localized(BERLIN.localize(datetime(2014, 6, 30, 0, 0)), BERLIN.localize(datetime(2014, 8, 26, 0, 0))) events = sorted(events, key=lambda x: x.start) assert len(events) == 6 assert events[0].start == BERLIN.localize(datetime(2014, 6, 30, 7, 0)) assert events[1].start == BERLIN.localize(datetime(2014, 7, 7, 9, 0)) assert events[2].start == BERLIN.localize(datetime(2014, 7, 14, 7, 0)) assert events[3].start == BERLIN.localize(datetime(2014, 7, 21, 7, 0)) assert events[4].start == BERLIN.localize(datetime(2014, 7, 28, 7, 0)) assert events[5].start == BERLIN.localize(datetime(2014, 8, 4, 7, 0)) def test_event_rrule_recurrence_id_update_with_exclude(): """ test if updates work as they should. The updated event has the extra RECURRENCE-ID event removed and one recurrence date excluded via EXDATE """ dbi = backend.SQLiteDb([calname], ':memory:', locale=LOCALE_BERLIN) dbi.update(_get_text('event_rrule_recuid'), href='12345.ics', etag='abcd', calendar=calname) dbi.update(_get_text('event_rrule_recuid_update'), href='12345.ics', etag='abcd', calendar=calname) events = dbi.get_localized(BERLIN.localize(datetime(2014, 4, 30, 0, 0)), BERLIN.localize(datetime(2014, 9, 26, 0, 0))) events = sorted(events, key=lambda x: x.start) assert len(events) == 5 assert events[0].start == BERLIN.localize(datetime(2014, 6, 30, 7, 0)) assert events[1].start == BERLIN.localize(datetime(2014, 7, 7, 7, 0)) assert events[2].start == BERLIN.localize(datetime(2014, 7, 21, 7, 0)) assert events[3].start == BERLIN.localize(datetime(2014, 7, 28, 7, 0)) assert events[4].start == BERLIN.localize(datetime(2014, 8, 4, 7, 0)) def test_no_valid_timezone(): dbi = backend.SQLiteDb([calname], ':memory:', locale=LOCALE_BERLIN) dbi.update(_get_text('event_dt_local_missing_tz'), href='12345.ics', etag='abcd', calendar=calname) events = dbi.get_localized(BERLIN.localize(datetime(2014, 4, 9, 0, 0)), BERLIN.localize(datetime(2014, 4, 10, 0, 0))) events = sorted(list(events)) assert len(events) == 1 event = events[0] assert event.start == BERLIN.localize(datetime(2014, 4, 9, 9, 30)) def test_event_delete(): dbi = backend.SQLiteDb([calname], ':memory:', locale=LOCALE_BERLIN) assert dbi.list(calname) == list() events = dbi.get_localized(BERLIN.localize(datetime(2014, 6, 30, 0, 0)), BERLIN.localize(datetime(2014, 8, 26, 0, 0))) assert list(events) == list() dbi.update(event_rrule_recurrence_id_reverse, href='12345.ics', etag='abcd', calendar=calname) assert dbi.list(calname) == [('12345.ics', 'abcd')] events = dbi.get_localized(BERLIN.localize(datetime(2014, 6, 30, 0, 0)), BERLIN.localize(datetime(2014, 9, 26, 0, 0))) assert len(list(events)) == 6 dbi.delete('12345.ics', calendar=calname) events = dbi.get_localized(BERLIN.localize(datetime(2014, 6, 30, 0, 0)), BERLIN.localize(datetime(2014, 9, 26, 0, 0))) assert len(list(events)) == 0 event_rrule_this_and_prior = """ BEGIN:VCALENDAR BEGIN:VEVENT UID:event_rrule_recurrence_id_this_and_prior SUMMARY:Arbeit RRULE:FREQ=WEEKLY;UNTIL=20140806T060000Z DTSTART;TZID=Europe/Berlin:20140630T070000 DTEND;TZID=Europe/Berlin:20140630T120000 END:VEVENT BEGIN:VEVENT UID:event_rrule_recurrence_id_this_and_prior SUMMARY:Arbeit RECURRENCE-ID;RANGE=THISANDPRIOR:20140707T050000Z DTSTART;TZID=Europe/Berlin:20140707T090000 DTEND;TZID=Europe/Berlin:20140707T140000 END:VEVENT END:VCALENDAR """ def test_this_and_prior(): """we do not support THISANDPRIOR, therefore this should fail""" dbi = backend.SQLiteDb([calname], ':memory:', locale=LOCALE_BERLIN) with pytest.raises(UpdateFailed): dbi.update(event_rrule_this_and_prior, href='12345.ics', etag='abcd', calendar=calname) event_rrule_this_and_future_temp = """ BEGIN:VCALENDAR BEGIN:VEVENT UID:event_rrule_recurrence_id SUMMARY:Arbeit RRULE:FREQ=WEEKLY;UNTIL=20140806T060000Z DTSTART;TZID=Europe/Berlin:20140630T070000 DTEND;TZID=Europe/Berlin:20140630T120000 END:VEVENT BEGIN:VEVENT UID:event_rrule_recurrence_id SUMMARY:Arbeit (lang) RECURRENCE-ID;RANGE=THISANDFUTURE:20140707T050000Z DTSTART;TZID=Europe/Berlin:{0} DTEND;TZID=Europe/Berlin:{1} END:VEVENT END:VCALENDAR """ event_rrule_this_and_future = \ event_rrule_this_and_future_temp.format('20140707T090000', '20140707T180000') def test_event_rrule_this_and_future(): dbi = backend.SQLiteDb([calname], ':memory:', locale=LOCALE_BERLIN) dbi.update(event_rrule_this_and_future, href='12345.ics', etag='abcd', calendar=calname) assert dbi.list(calname) == [('12345.ics', 'abcd')] events = dbi.get_localized(BERLIN.localize(datetime(2014, 4, 30, 0, 0)), BERLIN.localize(datetime(2014, 9, 26, 0, 0))) events = sorted(events, key=lambda x: x.start) assert len(events) == 6 assert events[0].start == BERLIN.localize(datetime(2014, 6, 30, 7, 0)) assert events[1].start == BERLIN.localize(datetime(2014, 7, 7, 9, 0)) assert events[2].start == BERLIN.localize(datetime(2014, 7, 14, 9, 0)) assert events[3].start == BERLIN.localize(datetime(2014, 7, 21, 9, 0)) assert events[4].start == BERLIN.localize(datetime(2014, 7, 28, 9, 0)) assert events[5].start == BERLIN.localize(datetime(2014, 8, 4, 9, 0)) assert events[0].end == BERLIN.localize(datetime(2014, 6, 30, 12, 0)) assert events[1].end == BERLIN.localize(datetime(2014, 7, 7, 18, 0)) assert events[2].end == BERLIN.localize(datetime(2014, 7, 14, 18, 0)) assert events[3].end == BERLIN.localize(datetime(2014, 7, 21, 18, 0)) assert events[4].end == BERLIN.localize(datetime(2014, 7, 28, 18, 0)) assert events[5].end == BERLIN.localize(datetime(2014, 8, 4, 18, 0)) assert str(events[0].summary) == 'Arbeit' for num, event in enumerate(events[1:]): event.raw assert str(event.summary) == 'Arbeit (lang)' event_rrule_this_and_future_multi_day_shift = \ event_rrule_this_and_future_temp.format('20140708T090000', '20140709T150000') def test_event_rrule_this_and_future_multi_day_shift(): dbi = backend.SQLiteDb([calname], ':memory:', locale=LOCALE_BERLIN) dbi.update(event_rrule_this_and_future_multi_day_shift, href='12345.ics', etag='abcd', calendar=calname) assert dbi.list(calname) == [('12345.ics', 'abcd')] events = dbi.get_localized(BERLIN.localize(datetime(2014, 4, 30, 0, 0)), BERLIN.localize(datetime(2014, 9, 26, 0, 0))) events = sorted(events, key=lambda x: x.start) assert len(events) == 6 assert events[0].start == BERLIN.localize(datetime(2014, 6, 30, 7, 0)) assert events[1].start == BERLIN.localize(datetime(2014, 7, 8, 9, 0)) assert events[2].start == BERLIN.localize(datetime(2014, 7, 15, 9, 0)) assert events[3].start == BERLIN.localize(datetime(2014, 7, 22, 9, 0)) assert events[4].start == BERLIN.localize(datetime(2014, 7, 29, 9, 0)) assert events[5].start == BERLIN.localize(datetime(2014, 8, 5, 9, 0)) assert events[0].end == BERLIN.localize(datetime(2014, 6, 30, 12, 0)) assert events[1].end == BERLIN.localize(datetime(2014, 7, 9, 15, 0)) assert events[2].end == BERLIN.localize(datetime(2014, 7, 16, 15, 0)) assert events[3].end == BERLIN.localize(datetime(2014, 7, 23, 15, 0)) assert events[4].end == BERLIN.localize(datetime(2014, 7, 30, 15, 0)) assert events[5].end == BERLIN.localize(datetime(2014, 8, 6, 15, 0)) assert str(events[0].summary) == 'Arbeit' for event in events[1:]: assert str(event.summary) == 'Arbeit (lang)' event_rrule_this_and_future_allday_temp = """ BEGIN:VCALENDAR BEGIN:VEVENT UID:event_rrule_recurrence_id_allday SUMMARY:Arbeit RRULE:FREQ=WEEKLY;UNTIL=20140806 DTSTART;VALUE=DATE:20140630 DTEND;VALUE=DATE:20140701 END:VEVENT BEGIN:VEVENT UID:event_rrule_recurrence_id_allday SUMMARY:Arbeit (lang) RECURRENCE-ID;RANGE=THISANDFUTURE;VALUE=DATE:20140707 DTSTART;VALUE=DATE:{} DTEND;VALUE=DATE:{} END:VEVENT END:VCALENDAR """ event_rrule_this_and_future_allday = \ event_rrule_this_and_future_allday_temp.format(20140708, 20140709) def test_event_rrule_this_and_future_allday(): dbi = backend.SQLiteDb([calname], ':memory:', locale=LOCALE_BERLIN) dbi.update(event_rrule_this_and_future_allday, href='rrule_this_and_future_allday.ics', etag='abcd', calendar=calname) assert dbi.list(calname) == [('rrule_this_and_future_allday.ics', 'abcd')] events = list(dbi.get_floating(datetime(2014, 4, 30, 0, 0), datetime(2014, 9, 27, 0, 0))) assert len(events) == 6 assert events[0].start == date(2014, 6, 30) assert events[1].start == date(2014, 7, 8) assert events[2].start == date(2014, 7, 15) assert events[3].start == date(2014, 7, 22) assert events[4].start == date(2014, 7, 29) assert events[5].start == date(2014, 8, 5) assert events[0].end == date(2014, 6, 30) assert events[1].end == date(2014, 7, 8) assert events[2].end == date(2014, 7, 15) assert events[3].end == date(2014, 7, 22) assert events[4].end == date(2014, 7, 29) assert events[5].end == date(2014, 8, 5) assert str(events[0].summary) == 'Arbeit' for event in events[1:]: assert str(event.summary) == 'Arbeit (lang)' def test_event_rrule_this_and_future_allday_prior(): event_rrule_this_and_future_allday_prior = \ event_rrule_this_and_future_allday_temp.format(20140705, 20140706) dbi = backend.SQLiteDb([calname], ':memory:', locale=LOCALE_BERLIN) dbi.update(event_rrule_this_and_future_allday_prior, href='rrule_this_and_future_allday.ics', etag='abcd', calendar=calname) assert dbi.list(calname) == [('rrule_this_and_future_allday.ics', 'abcd')] events = list(dbi.get_floating(datetime(2014, 4, 30, 0, 0), datetime(2014, 9, 27, 0, 0))) assert len(events) == 6 assert events[0].start == date(2014, 6, 30) assert events[1].start == date(2014, 7, 5) assert events[2].start == date(2014, 7, 12) assert events[3].start == date(2014, 7, 19) assert events[4].start == date(2014, 7, 26) assert events[5].start == date(2014, 8, 2) assert events[0].end == date(2014, 6, 30) assert events[1].end == date(2014, 7, 5) assert events[2].end == date(2014, 7, 12) assert events[3].end == date(2014, 7, 19) assert events[4].end == date(2014, 7, 26) assert events[5].end == date(2014, 8, 2) assert str(events[0].summary) == 'Arbeit' for event in events[1:]: assert str(event.summary) == 'Arbeit (lang)' event_rrule_multi_this_and_future_allday = """BEGIN:VCALENDAR BEGIN:VEVENT UID:event_multi_rrule_recurrence_id_allday SUMMARY:Arbeit RRULE:FREQ=WEEKLY;UNTIL=20140806 DTSTART;VALUE=DATE:20140630 DTEND;VALUE=DATE:20140701 END:VEVENT BEGIN:VEVENT UID:event_multi_rrule_recurrence_id_allday SUMMARY:Arbeit (neu) RECURRENCE-ID;RANGE=THISANDFUTURE;VALUE=DATE:20140721 DTSTART;VALUE=DATE:20140717 DTEND;VALUE=DATE:20140718 END:VEVENT BEGIN:VEVENT UID:event_multi_rrule_recurrence_id_allday SUMMARY:Arbeit (lang) RECURRENCE-ID;RANGE=THISANDFUTURE;VALUE=DATE:20140707 DTSTART;VALUE=DATE:20140712 DTEND;VALUE=DATE:20140714 END:VEVENT END:VCALENDAR""" def test_event_rrule_multi_this_and_future_allday(): dbi = backend.SQLiteDb([calname], ':memory:', locale=LOCALE_BERLIN) dbi.update(event_rrule_multi_this_and_future_allday, href='event_rrule_multi_this_and_future_allday.ics', etag='abcd', calendar=calname) assert dbi.list(calname) == [('event_rrule_multi_this_and_future_allday.ics', 'abcd')] events = sorted(dbi.get_floating(datetime(2014, 4, 30, 0, 0), datetime(2014, 9, 27, 0, 0))) assert len(events) == 6 assert events[0].start == date(2014, 6, 30) assert events[1].start == date(2014, 7, 12) assert events[2].start == date(2014, 7, 17) assert events[3].start == date(2014, 7, 19) assert events[4].start == date(2014, 7, 24) assert events[5].start == date(2014, 7, 31) assert events[0].end == date(2014, 6, 30) assert events[1].end == date(2014, 7, 13) assert events[2].end == date(2014, 7, 17) assert events[3].end == date(2014, 7, 20) assert events[4].end == date(2014, 7, 24) assert events[5].end == date(2014, 7, 31) assert str(events[0].summary) == 'Arbeit' for event in [events[1], events[3]]: assert str(event.summary) == 'Arbeit (lang)' for event in [events[2], events[4], events[5]]: assert str(event.summary) == 'Arbeit (neu)' master = """BEGIN:VEVENT UID:event_rrule_recurrence_id SUMMARY:Arbeit RRULE:FREQ=WEEKLY;UNTIL=20140806T060000Z DTSTART;TZID=Europe/Berlin:20140630T070000 DTEND;TZID=Europe/Berlin:20140630T120000 END:VEVENT""" recuid_this_future = icalendar.Event.from_ical("""BEGIN:VEVENT UID:event_rrule_recurrence_id SUMMARY:Arbeit RECURRENCE-ID;RANGE=THISANDFUTURE:20140707T050000Z DTSTART;TZID=Europe/Berlin:20140707T090000 DTEND;TZID=Europe/Berlin:20140707T140000 END:VEVENT""") recuid_this_future_duration = icalendar.Event.from_ical("""BEGIN:VEVENT UID:event_rrule_recurrence_id SUMMARY:Arbeit RECURRENCE-ID;RANGE=THISANDFUTURE:20140707T050000Z DTSTART;TZID=Europe/Berlin:20140707T090000 DURATION:PT4H30M END:VEVENT""") def test_calc_shift_deltas(): assert (timedelta(hours=2), timedelta(hours=5)) == \ backend.calc_shift_deltas(recuid_this_future) assert (timedelta(hours=2), timedelta(hours=4, minutes=30)) == \ backend.calc_shift_deltas(recuid_this_future_duration) event_a = """BEGIN:VEVENT UID:123 SUMMARY:event a RRULE:FREQ=WEEKLY;UNTIL=20140806T060000Z DTSTART;TZID=Europe/Berlin:20140630T070000 DTEND;TZID=Europe/Berlin:20140630T120000 END:VEVENT""" event_b = """BEGIN:VEVENT UID:123 SUMMARY:event b RRULE:FREQ=WEEKLY;UNTIL=20140806T060000Z DTSTART;TZID=Europe/Berlin:20140630T070000 DTEND;TZID=Europe/Berlin:20140630T120000 END:VEVENT""" def test_two_calendars_same_uid(): home = 'home' work = 'work' dbi = backend.SQLiteDb([home, work], ':memory:', locale=LOCALE_BERLIN) assert dbi.list(home) == [] assert dbi.list(work) == [] dbi.update(event_a, href='12345.ics', etag='abcd', calendar=home) assert dbi.list(home) == [('12345.ics', 'abcd')] assert dbi.list(work) == [] dbi.update(event_b, href='12345.ics', etag='abcd', calendar=work) assert dbi.list(home) == [('12345.ics', 'abcd')] assert dbi.list(work) == [('12345.ics', 'abcd')] dbi.calendars = [home] events_a = list(dbi.get_localized(BERLIN.localize(datetime(2014, 6, 30, 0, 0)), BERLIN.localize(datetime(2014, 7, 26, 0, 0)))) dbi.calendars = [work] events_b = list(dbi.get_localized(BERLIN.localize(datetime(2014, 6, 30, 0, 0)), BERLIN.localize(datetime(2014, 7, 26, 0, 0)))) assert len(events_a) == 4 assert len(events_b) == 4 dbi.calendars = [work, home] events_c = list(dbi.get_localized(BERLIN.localize(datetime(2014, 6, 30, 0, 0)), BERLIN.localize(datetime(2014, 7, 26, 0, 0)))) assert len(events_c) == 8 assert [event.calendar for event in events_c].count(home) == 4 assert [event.calendar for event in events_c].count(work) == 4 dbi.delete('12345.ics', calendar=home) dbi.calendars = [home] events_a = list(dbi.get_localized(BERLIN.localize(datetime(2014, 6, 30, 0, 0)), BERLIN.localize(datetime(2014, 7, 26, 0, 0)))) dbi.calendars = [work] events_b = list(dbi.get_localized(BERLIN.localize(datetime(2014, 6, 30, 0, 0)), BERLIN.localize(datetime(2014, 7, 26, 0, 0)))) assert len(events_a) == 0 assert len(events_b) == 4 dbi.calendars = [work, home] events_c = list(dbi.get_localized(BERLIN.localize(datetime(2014, 6, 30, 0, 0)), BERLIN.localize(datetime(2014, 7, 26, 0, 0)))) assert [event.calendar for event in events_c].count('home') == 0 assert [event.calendar for event in events_c].count('work') == 4 assert dbi.list(home) == [] assert dbi.list(work) == [('12345.ics', 'abcd')] def test_update_one_should_not_affect_others(): """test if an THISANDFUTURE param effects other events as well""" db = backend.SQLiteDb([calname], ':memory:', locale=LOCALE_BERLIN) db.update(_get_text('event_d_15'), href='first', calendar=calname) events = db.get_floating(datetime(2015, 4, 9, 0, 0), datetime(2015, 4, 10, 0, 0)) assert len(list(events)) == 1 db.update(event_rrule_multi_this_and_future_allday, href='second', calendar=calname) events = list(db.get_floating(datetime(2015, 4, 9, 0, 0), datetime(2015, 4, 10, 0, 0))) assert len(events) == 1 def test_zuluv_events(): """test if events in Zulu time are correctly recognized as locaized events""" db = backend.SQLiteDb([calname], ':memory:', locale=LOCALE_BERLIN) db.update(_get_text('event_dt_simple_zulu'), href='event_zulu', calendar=calname) events = db.get_localized(BERLIN.localize(datetime(2014, 4, 9, 0, 0)), BERLIN.localize(datetime(2014, 4, 10, 0, 0))) events = list(events) assert len(events) == 1 event = events[0] assert type(event) == LocalizedEvent assert event.start_local == BERLIN.localize(datetime(2014, 4, 9, 11, 30)) def test_no_dtend(): """test support for events with no dtend""" db = backend.SQLiteDb([calname], ':memory:', locale=LOCALE_BERLIN) db.update(_get_text('event_dt_no_end'), href='event_dt_no_end', calendar=calname) events = db.get_floating(datetime(2016, 1, 16, 0, 0), datetime(2016, 1, 17, 0, 0)) event = list(events)[0] assert event.start == date(2016, 1, 16) assert event.end == date(2016, 1, 16) event_rdate_period = """BEGIN:VEVENT SUMMARY:RDATE period DTSTART:19961230T020000Z DTEND:19961230T060000Z UID:rdate_period RDATE;VALUE=PERIOD:19970101T180000Z/19970102T070000Z,19970109T180000Z/PT5H30M END:VEVENT""" supported_events = [ event_a, event_b, event_rrule_this_and_future, event_rrule_this_and_future_allday, event_rrule_this_and_future_multi_day_shift ] def test_check_support(): for cal_str in supported_events: ical = icalendar.Calendar.from_ical(cal_str) [backend.check_support(event, '', '') for event in ical.walk()] ical = icalendar.Calendar.from_ical(event_rrule_this_and_prior) with pytest.raises(UpdateFailed): [backend.check_support(event, '', '') for event in ical.walk()] # icalendar 3.9.2 changed how it deals with unsupported components if tuple([int(i) for i in icalendar.__version__.split('.')[:3]]) <= (3, 9, 1): ical = icalendar.Calendar.from_ical(event_rdate_period) with pytest.raises(UpdateFailed): [backend.check_support(event, '', '') for event in ical.walk()] card = """BEGIN:VCARD VERSION:3.0 FN:Unix BDAY:19710311 END:VCARD """ card_no_year = """BEGIN:VCARD VERSION:3.0 FN:Unix BDAY:--0311 END:VCARD """ card_does_not_parse = """BEGIN:VCARD VERSION:3.0 FN:Unix BDAY:x END:VCARD """ card_no_fn = """BEGIN:VCARD VERSION:3.0 N:Ritchie;Dennis;MacAlistair;; BDAY:19410909 END:VCARD """ card_two_birthdays = """BEGIN:VCARD VERSION:3.0 N:Ritchie;Dennis;MacAlistair;; BDAY:19410909 BDAY:--0311 END:VCARD """ day = date(1971, 3, 11) start = datetime.combine(day, time.min) end = datetime.combine(day, time.max) def test_birthdays(): db = backend.SQLiteDb([calname], ':memory:', locale=LOCALE_BERLIN) assert list(db.get_floating(start, end)) == list() db.update_birthday(card, 'unix.vcf', calendar=calname) events = list(db.get_floating(start, end)) assert len(events) == 1 assert events[0].summary == 'Unix\'s 0th birthday' events = list(db.get_floating(datetime(2016, 3, 11, 0, 0), datetime(2016, 3, 11, 23, 59, 59, 999))) assert events[0].summary == 'Unix\'s 45th birthday' def test_birthdays_no_year(): db = backend.SQLiteDb([calname], ':memory:', locale=LOCALE_BERLIN) assert list(db.get_floating(start, end)) == list() db.update_birthday(card_no_year, 'unix.vcf', calendar=calname) events = list(db.get_floating(start, end)) assert len(events) == 1 assert events[0].summary == 'Unix\'s birthday' def test_birthdays_no_fn(): db = backend.SQLiteDb(['home'], ':memory:', locale=LOCALE_BERLIN) assert list(db.get_floating(datetime(1941, 9, 9, 0, 0), datetime(1941, 9, 9, 23, 59, 59, 9999))) == list() db.update_birthday(card_no_fn, 'unix.vcf', calendar=calname) events = list(db.get_floating(datetime(1941, 9, 9, 0, 0), datetime(1941, 9, 9, 23, 59, 59, 9999))) assert len(events) == 1 assert events[0].summary == 'Dennis MacAlistair Ritchie\'s 0th birthday' def test_birthday_does_not_parse(): db = backend.SQLiteDb([calname], ':memory:', locale=LOCALE_BERLIN) assert list(db.get_floating(start, end)) == list() db.update_birthday(card_does_not_parse, 'unix.vcf', calendar=calname) events = list(db.get_floating(start, end)) assert len(events) == 0 def test_vcard_two_birthdays(): db = backend.SQLiteDb([calname], ':memory:', locale=LOCALE_BERLIN) assert list(db.get_floating(start, end)) == list() db.update_birthday(card_two_birthdays, 'unix.vcf', calendar=calname) events = list(db.get_floating(start, end)) assert len(events) == 0
dzoep/khal
tests/backend_test.py
Python
mit
27,566
#!/usr/local/bin/python3 import logging import time import urllib3 import boto.route53 start_time = time.time() # configuration cfg_profile = 'Credential' cfg_region = 'us-east-1' cfg_zone = 'YOUR.DOMAIN' cfg_record = 'A.YOUR.DOMAIN' cfg_timeout = 300 aws_key = 'YOUR AWS KEY' aws_secret = 'YOUR_AWS_SECRET' # set up logging log = logging.getLogger(__name__) log.setLevel(logging.DEBUG) handler = logging.handlers.SysLogHandler(address = '/dev/log') formatter = logging.Formatter('%(module)s: %(message)s') handler.setFormatter(formatter) log.addHandler(handler) # get ip address http = urllib3.PoolManager() request = http.request('GET', 'http://curlmyip.com') ip_addr = request.data.decode("utf-8")[:-1] log.info('Current IP address: ' + ip_addr) # check current ip address conn = boto.route53.connect_to_region( cfg_region, aws_access_key_id = aws_key, aws_secret_access_key = aws_secret ) zone = conn.get_zone(cfg_zone) a_record = zone.get_a(cfg_record) a_record_value = a_record.resource_records[0] log.info('Route53 DNS record address: %s' % a_record_value) # check whether it's the same if ip_addr != a_record_value: # needs update log.warning('IP Changed, update needed.') status = zone.update_a(cfg_record, ip_addr) log.info('Record updated, waiting to sync, timeout in %s seconds' % str(cfg_timeout)) # check for status update count = 0 while (count < cfg_timeout): count = count + 1 time.sleep(1) status.update() if status.status == 'INSYNC': log.info('Record synced, exit') break if count == cfg_timeout: log.error('Sync timeout, exit') else: # should be fine log.info('No need to update, exit.') log.info('Total time spent: %s seconds' % str(round(time.time() - start_time, 4)))
xinsnake/route53ddns-py
route53ddns.py
Python
mit
1,825
# Copyright 2010-2011 OpenStack Foundation # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # Copyright (c) 2015 Cloud Brewery Inc. (cloudbrewery.io) # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Base unit test classes """ import os import fixtures import testtools class TestCase(testtools.TestCase): """Test case base class for all unit tests.""" def setUp(self): """Run before each test method to initialize test environment.""" super(TestCase, self).setUp() test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0) try: test_timeout = int(test_timeout) except ValueError: # If timeout value is invalid do not set a timeout. test_timeout = 0 if test_timeout > 0: self.useFixture(fixtures.Timeout(test_timeout, gentle=True)) self.useFixture(fixtures.NestedTempfile()) self.useFixture(fixtures.TempHomeDir()) if (os.environ.get('OS_STDOUT_CAPTURE') == 'True' or os.environ.get('OS_STDOUT_CAPTURE') == '1'): stdout = self.useFixture(fixtures.StringStream('stdout')).stream self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout)) if (os.environ.get('OS_STDERR_CAPTURE') == 'True' or os.environ.get('OS_STDERR_CAPTURE') == '1'): stderr = self.useFixture(fixtures.StringStream('stderr')).stream self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr)) self.log_fixture = self.useFixture(fixtures.FakeLogger()) self.addCleanup(self._clear_attrs) def _clear_attrs(self): # Delete attributes that don't start with _ so they don't pin # memory around unnecessarily for the duration of the test # suite for key in [k for k in self.__dict__.keys() if k[0] != '_']: del self.__dict__[key] def path_get(self, project_file=None): """Get the absolute path to a file. Used for testing the API. :param project_file: File whose path to return. Default: None. :returns: path to the specified file, or path to project root. """ root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..',)) if project_file: return os.path.join(root, project_file) else: return root
CloudBrewery/swift-container-keys
containerkeys/tests/base.py
Python
apache-2.0
2,905
''' This file holds globally useful utility classes and functions, i.e., classes and functions that are generic enough not to be specific to one app. ''' import logging import os import re import sys from datetime import tzinfo, timedelta from django.conf import settings # Setup logging support. LOGGER = logging.getLogger(__name__) LOGGER.addHandler(settings.LOG_HANDLER) # try to import the `fcntl` module for locking support through the `Lock` class # below try: import fcntl except ImportError: LOGGER.warn("Locking support is not available for your (non-Unix?) system. " "Using multiple processes might not be safe.") def get_class_by_name(module_name, class_name): ''' Given the name of a module (e.g., 'metashare.resedit.admin') and the name of a class (e.g., 'ContactSMI'), return the class type object (in the example, the class ContactSMI). If no such class exists, throws an AttributeError ''' try: class_type = getattr(sys.modules[module_name], class_name) return class_type except AttributeError: raise AttributeError("Module '{0}' has no class '{1}'".format(module_name, class_name)) def verify_subclass(subclass, superclass): ''' Verify that subclass is indeed a subclass of superclass. If that is not the case, a TypeError is raised. ''' if not issubclass(subclass, superclass): raise TypeError('class {0} is not a subclass of class {1}'.format(subclass, superclass)) def prettify_camel_case_string(cc_str): ''' Prettifies the given camelCase string so that it is better readable. For example, "speechAnnotation-soundToTextAlignment" is converted to "Speech Annotation - Sound To Text Alignment". N.B.: The conversion currently only recognizes boundaries with ASCII letters. ''' result = cc_str if len(result) > 1: # result = result.replace('-', ' - ') AtA result = result.replace('_', ' ') result = result.replace('AtA', 'At a') result = re.sub(r'(..)(?=[A-Z][a-z])', r'\1 ', result) result = ' '.join([(len(token) > 1 and (token[0].upper() + token[1:])) or token[0].upper() for token in result.split()]) return result def create_breadcrumb_template_params(model, action): ''' Create a dictionary for breadcrumb templates. ''' opts = model._meta dictionary = { 'app_label': opts.app_label, 'verbose_name': opts.verbose_name, 'action': action, } return dictionary class Lock(): """ Each instance of this class can be used to acquire an exclusive, system-wide (multi-process) lock on a particular name. This class will only work on Unix systems viz. systems that provide the `fcntl` module. On other systems the class will silently do nothing. """ def __init__(self, lock_name): """ Create a `Lock` object which can create an exclusive lock on the given name. """ if 'fcntl' in sys.modules: self.handle = open(os.path.join(settings.LOCK_DIR, lock_name), 'w') else: self.handle = None def acquire(self): """ Acquire a lock on the name for which this `Lock` was created. """ if self.handle: fcntl.flock(self.handle, fcntl.LOCK_EX) def release(self): """ Release any lock on the name for which this `Lock` was created. """ if self.handle: fcntl.flock(self.handle, fcntl.LOCK_UN) def __del__(self): if self.handle: self.handle.close() class SimpleTimezone(tzinfo): """ A fixed offset timezone with an unknown name and an unknown DST adjustment. """ def __init__(self, offset): self.__offset = timedelta(minutes=offset) def utcoffset(self, dt): return self.__offset def tzname(self, dt): return None def dst(self, dt): return None
MiltosD/CEF-ELRC
metashare/utils.py
Python
bsd-3-clause
4,061
""" CLUSTALW wrapper for python author: Matt Rasmussen date: 2/4/2007 """ # python libs import math import os # rasmus libs from rasmus import treelib from rasmus import util # compbio imports from . import fasta # TODO: change removetmp to saveOutput def clustalw(seqs, verbose=True, removetmp=True, options=""): """Align sequences 'seqs' with clustalw""" if len(seqs) < 2: return seqs # make input file for clustalw infilename = util.tempfile(".", "clustalw-in", ".fa") fasta.writeFasta(infilename, seqs) # run clustalw outfilename = util.tempfile(".", "clustalw-out", ".aln") cmd = "clustalw " + options + " -quicktree -infile=" + infilename + \ " -outfile=" + outfilename if not verbose: cmd += " > /dev/null" os.system(cmd) # parse output aln = readClustalwAlign(outfilename) # cleanup tempfiles if removetmp: os.remove(infilename) os.remove(outfilename) os.remove(infilename.replace(".fa", ".dnd")) # convert output return aln def buildTree(seqs, verbose=True, removetmp=True, options=""): # make input file for clustalw infilename = util.tempfile(".", "clustalw-in", ".fa") fasta.writeFasta(infilename, seqs) # run clustalw outfilename = infilename.replace(".fa", ".ph") cmd = "clustalw " + options + " -tree -infile=" + infilename + \ " -outfile=" + outfilename if not verbose: cmd += " > /dev/null" os.system(cmd) # parse output tree = treelib.Tree() tree.read_newick(outfilename) # cleanup tempfiles if removetmp: os.remove(infilename) os.remove(outfilename) return tree def clustalwProfiles(aln1, aln2, verbose=True, removetmp=True, options=""): # make input file for clustalw infilename1 = util.tempfile(".", "clustalw-in", ".fa") infilename2 = util.tempfile(".", "clustalw-in", ".fa") fasta.writeFasta(infilename1, aln1) fasta.writeFasta(infilename2, aln2) # run clustalw outfilename = util.tempfile(".", "clustalw-out", ".aln") cmd = "clustalw " + options + " -quicktree -profile1=" + infilename1 + \ " -profile2=" + infilename2 + " -outfile=" + outfilename if not verbose: cmd += " > /dev/null" os.system(cmd) # parse output aln = readClustalwAlign(outfilename) # cleanup tempfiles if removetmp: os.remove(infilename1) os.remove(infilename2) os.remove(outfilename) try: os.remove(infilename1.replace(".fa", ".dnd")) except: pass try: os.remove(infilename2.replace(".fa", ".dnd")) except: pass return aln def readClustalwAlign(filename): infile = util.open_stream(filename) seqs = fasta.FastaDict() # skip first three lines infile.next() infile.next() infile.next() # parse remaining lines for line in infile: if line[0].isdigit() or line[0].isalpha(): (name, seq) = line.split()[:2] if name not in seqs: seqs[name] = seq else: seqs[name] += seq return seqs #============================================================================= # Alignment stats # TODO: either move to alignlib or get rid of it # def alignInfo(aln): score = 0 entropyBefore = - len(aln) * .2 * math.log(.2, 2) for i in xrange(len(aln[0])): # count characters in a column charsums = {"A": 0, "C": 0, "G": 0, "T": 0, "-": 0} for seq in aln: charsums[seq[i]] += 1 # calc entropy of column entropyAfter = 0 for char in "ACGT-": p = charsums[char] / float(len(aln)) if p != 0: entropyAfter += - p * math.log(p, 2) score += entropyBefore - entropyAfter return score def alignAvgInfo(aln): return alignInfo(aln) / len(aln[0]) def alignScore(seqs, match=1, mismatch=-1, gapopen=-5, gapext=-1): score = 0 for i in xrange(len(seqs)): for j in xrange(0, i): for k in xrange(len(seqs[i])): if seqs[i][k] == "-" or seqs[j][k] == "-": continue if seqs[i][k] == seqs[j][k]: score += match else: score += mismatch # find end of sequence size = len(seqs[i]) while size > 0 and seqs[i][size-1] == "-": size -= 1 # skip first gap k = 0 while k < size and seqs[i][k] == "-": k += 1 # count gaps while k < size: if seqs[i][k] == "-": score += gapopen k += 1 while k < size and seqs[i][k] == "-": score += gapext k += 1 return score #============================================================================= # testing # if __name__ == "__main__": pass
abhishekgahlot/compbio
compbio/clustalw.py
Python
mit
5,028
# -*- coding: utf-8 -*- """Various text used throughout the website, e.g. status messages, errors, etc. """ # Status Messages ################# # NOTE: in status messages, newlines are not preserved, so triple-quotes strings # are ok # Status message shown at settings page on first login # (upon clicking primary email confirmation link) WELCOME_MESSAGE = ''' <h1>Welcome to the OSF!</h1> <p>Visit our <a href="https://osf.io/getting-started/">getting started page</a> to learn about creating a project, or get inspiration from <a href="https://osf.io/explore/activity/#popularPublicProjects">popular public projects</a></p> ''' REGISTRATION_SUCCESS = '''Registration successful. Please check {email} to confirm your email address.''' # Shown if registration is turned off in website.settings REGISTRATION_UNAVAILABLE = 'Registration currently unavailable.' ALREADY_REGISTERED = '''The email <em>{email}</em> has already been registered.''' AFTER_SUBMIT_FOR_REVIEW = "Your submission has been received. You will be notified within ten business days regarding the status of your submission. If you have questions you may contact us at prereg@cos.io." # Shown if user tries to login with an email that is not yet confirmed UNCONFIRMED = ('This login email has been registered but not confirmed. Please check your email (and spam folder).' ' <a href="/resend/">Click here</a> to resend your confirmation email.') # Shown if the user's account is disabled DISABLED = ''' Log-in failed: Deactivated account. ''' # Shown on incorrect password attempt LOGIN_FAILED = ''' Log-in failed. Please try again or reset your password. ''' # Shown at login page if user tries to access a resource that requires auth MUST_LOGIN = ''' You must log in to access this resource. ''' # Shown on logout LOGOUT = ''' You have successfully logged out. ''' EMAIL_NOT_FOUND = ''' <strong>{email}</strong> was not found in our records. ''' # Shown after an unregistered user claims an account and is redirected to the # settings page CLAIMED_CONTRIBUTOR = ('<strong>Welcome to the OSF!</strong> Edit your display name below and then check your ' '<a href="/dashboard/">dashboard</a> to see projects to which you have been added as a ' 'contributor by someone else.') # Error Pages # ########### # Shown at error page if an expired/revokes email confirmation link is clicked EXPIRED_EMAIL_CONFIRM_TOKEN = 'This confirmation link has expired. Please <a href="/login/">log in</a> to continue.' INVALID_EMAIL_CONFIRM_TOKEN = 'This confirmation link is invalid. Please <a href="/login/">log in</a> to continue.' CANNOT_MERGE_ACCOUNTS_SHORT = 'Cannot Merge Accounts' CANNOT_MERGE_ACCOUNTS_LONG = ( 'Accounts cannot be merged due to a possible conflict with add-ons. ' 'Before you continue, please <a href="/settings/addons/"> deactivate ' 'any add-ons</a> to be merged into your primary account.' ) MERGE_COMPLETE = 'Accounts successfully merged.' MERGE_CONFIRMATION_REQUIRED_SHORT = 'Confirmation Required: Merge Accounts' MERGE_CONFIRMATION_REQUIRED_LONG = ( '<p>This email is confirmed to another account. ' 'Would you like to merge <em>{user_to_merge.username}</em> with the account ' '<em>{user.username}</em>?<p>' '<a class="btn btn-primary" href="?confirm_merge">Confirm merge</a> ' ) # Node Actions AFTER_REGISTER_ARCHIVING = ( 'Files are being copied to the newly created registration, and you will receive an email ' 'notification when the copying is finished.' ) BEFORE_REGISTER_HAS_POINTERS = ( 'This {category} contains links to other projects. Links will be copied ' 'into your registration, but the projects that they link to will not be ' 'registered. If you wish to register the linked projects, you must fork ' 'them from the original project before registering.' ) BEFORE_FORK_HAS_POINTERS = ( 'This {category} contains links to other projects. Links will be copied ' 'into your fork, but the projects that they link to will not be forked. ' 'If you wish to fork the linked projects, they need to be forked from the ' 'original project.' ) REGISTRATION_INFO = ''' <p>Registration creates a frozen version of the project that can never be edited or deleted but can be retracted. You can register your project by selecting a registration form, entering information about your project, and then confirming. You will be able to continue editing the original project, however, and the frozen version with timestamps will always be linked to the original. Retracting a registration will leave behind metadata about when the registration was created and retracted but removes the contents of the registration.</p> <ul> <li>A registration can be made public immediately or entered into an embargo period of up to four years. At the end of the embargo period, the registration will automatically become public.</li> <li>Before initiating a registration, make sure that the project is in the state that you wish to freeze. Consider turning links into forks.</li> <li>Start by selecting a registration form from the list below. You can hit your browser's back button if the selected form is not appropriate for your use.</li> </ul> ''' REGISTRATION_EMBARGO_INFO = ''' You can choose whether to make your registration public immediately or embargo it for up to four years. At the end of the embargo period the registration is automatically made public. After becoming public, the only way to remove a registration is to retract it. Retractions show only the registration title, contributors, and description to indicate that a registration was made and later retracted. <br /><br /> If you choose to embargo your registration, a notification will be sent to all other project contributors. Other administrators will have 48 hours to approve or cancel creating the registration. If any other administrator rejects the registration, it will be canceled. If all other administrators approve or do nothing, the registration will be confirmed and enter its embargo period. ''' BEFORE_REGISTRATION_INFO = ''' Registration cannot be undone, and the archived content and files cannot be deleted after registration. Please be sure the project is complete and comprehensive for what you wish to register. ''' # Nodes: forking, templating, linking LINK_ACTION = 'Link to this Project' LINK_DESCRIPTION = """ <p>Linking to this project will reference it in another project, without creating a copy. The link will always point to the most up-to-date version.</p> """ TEMPLATE_ACTION = 'Copy Project Structure' TEMPLATE_DESCRIPTION = """ <p>This option will create a new project, using this project as a template. The new project will be structured in the same way, but contain no data.</p> """ FORK_ACTION = 'Fork this Project' FORK_DESCRIPTION = """ <p>Fork this project if you plan to build upon it in your own work. The new project will be an exact duplicate of this project's current state, with you as the only contributor.</p> """ TEMPLATE_DROPDOWN_HELP = """Start typing to search. Selecting project as template will duplicate its structure in the new project without importing the content of that project.""" TEMPLATED_FROM_PREFIX = "Templated from " # MFR Error handling ERROR_PREFIX = "Unable to render. <a href='?action=download'>Download</a> file to view it." SUPPORT = "Contact support@osf.io for further assistance." # Custom Error Messages w/ support STATA_VERSION_ERROR = 'Version of given Stata file is not 104, 105, 108, 113 (Stata 8/9), 114 (Stata 10/11) or 115 (Stata 12)<p>{0}</p>'.format(SUPPORT) BLANK_OR_CORRUPT_TABLE_ERROR = 'Is this a valid instance of this file type?<p>{0}</p>'.format(SUPPORT) #disk saving mode DISK_SAVING_MODE = 'Forks, registrations, and uploads to OSF Storage uploads are temporarily disabled while we are undergoing a server upgrade. These features will return shortly.' #log out and revisit the link to confirm emails CONFIRM_ALTERNATE_EMAIL_ERROR = 'The email address has <b>NOT</b> been added to your account. Please log out and revisit the link in your email. Thank you.'
billyhunt/osf.io
website/language.py
Python
apache-2.0
8,196
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for `tf.data.Dataset.from_sparse_tensor_slices()`.""" from absl.testing import parameterized import numpy as np from tensorflow.python.data.kernel_tests import checkpoint_test_base from tensorflow.python.data.kernel_tests import test_base from tensorflow.python.data.ops import dataset_ops from tensorflow.python.framework import combinations from tensorflow.python.framework import dtypes from tensorflow.python.framework import errors from tensorflow.python.framework import sparse_tensor from tensorflow.python.ops import array_ops from tensorflow.python.platform import test class FromSparseTensorSlicesTest(test_base.DatasetTestBase, parameterized.TestCase): @combinations.generate( combinations.times( combinations.combine(tf_api_version=1, mode=["graph"]), combinations.combine(slices=[[ [1., 2., 3.], [1.], [1.], [1., 2.], [], [1., 2.], [], [], [] ], [[1., 2.], [], [1., 2.], [1.], [1., 2.], [], [1., 2.]]]))) def testFromSparseTensorSlices(self, slices): """Test a dataset based on slices of a `tf.sparse.SparseTensor`.""" st = array_ops.sparse_placeholder(dtypes.float64) iterator = dataset_ops.make_initializable_iterator( dataset_ops.Dataset.from_sparse_tensor_slices(st)) init_op = iterator.initializer get_next = sparse_tensor.SparseTensor(*iterator.get_next()) with self.cached_session() as sess: # Test with sparse tensor in the appropriate order. # pylint: disable=g-complex-comprehension indices = np.array( [[i, j] for i in range(len(slices)) for j in range(len(slices[i]))]) values = np.array([val for s in slices for val in s]) # pylint: enable=g-complex-comprehension dense_shape = np.array([len(slices), max(len(s) for s in slices) + 1]) sparse_feed = sparse_tensor.SparseTensorValue(indices, values, dense_shape) sess.run(init_op, feed_dict={st: sparse_feed}) for i, s in enumerate(slices): results = sess.run(get_next) self.assertAllEqual(s, results.values) expected_indices = np.array( [[j] for j in range(len(slices[i]))]).reshape([-1, 1]) self.assertAllEqual(expected_indices, results.indices) self.assertAllEqual(dense_shape[1:], results.dense_shape) with self.assertRaises(errors.OutOfRangeError): sess.run(get_next) @combinations.generate( combinations.times( combinations.combine(tf_api_version=1, mode=["graph"]), combinations.combine(slices=[[ [1., 2., 3.], [1.], [1.], [1., 2.], [], [1., 2.], [], [], [] ], [[1., 2.], [], [1., 2.], [1.], [1., 2.], [], [1., 2.]]]))) def testFromSparseTensorSlicesInReverse(self, slices): """Test a dataset based on slices of a `tf.sparse.SparseTensor` in reverse order.""" st = array_ops.sparse_placeholder(dtypes.float64) iterator = dataset_ops.make_initializable_iterator( dataset_ops.Dataset.from_sparse_tensor_slices(st)) init_op = iterator.initializer with self.cached_session() as sess: # pylint: disable=g-complex-comprehension indices = np.array( [[i, j] for i in range(len(slices)) for j in range(len(slices[i]))]) values = np.array([val for s in slices for val in s]) # pylint: enable=g-complex-comprehension dense_shape = np.array([len(slices), max(len(s) for s in slices) + 1]) # Test with sparse tensor in the reverse order, which is not # currently supported. reverse_order_indices = indices[::-1, :] reverse_order_values = values[::-1] sparse_feed = sparse_tensor.SparseTensorValue( reverse_order_indices, reverse_order_values, dense_shape) with self.assertRaises(errors.UnimplementedError): sess.run(init_op, feed_dict={st: sparse_feed}) @combinations.generate(combinations.combine(tf_api_version=1, mode=["graph"])) def testEmptySparseTensorSlices(self): """Test a dataset based on slices of an empty `tf.sparse.SparseTensor`.""" st = array_ops.sparse_placeholder(dtypes.float64) iterator = dataset_ops.make_initializable_iterator( dataset_ops.Dataset.from_sparse_tensor_slices(st)) init_op = iterator.initializer get_next = sparse_tensor.SparseTensor(*iterator.get_next()) with self.cached_session() as sess: # Test with an empty sparse tensor. empty_indices = np.empty((0, 4), dtype=np.int64) empty_values = np.empty((0,), dtype=np.float64) empty_dense_shape = [0, 4, 37, 9] sparse_feed = sparse_tensor.SparseTensorValue(empty_indices, empty_values, empty_dense_shape) sess.run(init_op, feed_dict={st: sparse_feed}) with self.assertRaises(errors.OutOfRangeError): sess.run(get_next) @combinations.generate(combinations.combine(tf_api_version=1, mode=["graph"])) def testEmptySparseTensorSlicesInvalid(self): """Test a dataset based on invalid `tf.sparse.SparseTensor`.""" st = array_ops.sparse_placeholder(dtypes.float64) iterator = dataset_ops.make_initializable_iterator( dataset_ops.Dataset.from_sparse_tensor_slices(st)) init_op = iterator.initializer with self.cached_session() as sess: # Test with an empty sparse tensor but with non empty values. empty_indices = np.empty((0, 4), dtype=np.int64) non_empty_values = [1, 2, 3, 4] empty_dense_shape = [0, 4, 37, 9] sparse_feed = sparse_tensor.SparseTensorValue(empty_indices, non_empty_values, empty_dense_shape) # Here, we expect the test to fail when running the feed. with self.assertRaises(errors.InvalidArgumentError): sess.run(init_op, feed_dict={st: sparse_feed}) @combinations.generate(combinations.combine(tf_api_version=1, mode=["graph"])) def testEmptySparseTensorSlicesInvalid2(self): """Test a dataset based on invalid `tf.sparse.SparseTensor`.""" st = array_ops.sparse_placeholder(dtypes.float64) iterator = dataset_ops.make_initializable_iterator( dataset_ops.Dataset.from_sparse_tensor_slices(st)) init_op = iterator.initializer with self.cached_session() as sess: # Test with an empty sparse tensor but with non empty values. empty_indices = [[]] empty_values = [] dense_shape = [1, 1] sparse_feed = sparse_tensor.SparseTensorValue(empty_indices, empty_values, dense_shape) # Here, we expect the test to fail when running the feed. with self.assertRaises(errors.InvalidArgumentError): sess.run(init_op, feed_dict={st: sparse_feed}) @combinations.generate(combinations.combine(tf_api_version=2, mode=["eager"])) def testFromSparseTensorSlicesError(self): with self.assertRaises(AttributeError): dataset_ops.Dataset.from_sparse_tensor_slices(None) class FromSparseTensorSlicesCheckpointTest( checkpoint_test_base.CheckpointTestBase, parameterized.TestCase): def _build_sparse_tensor_slice_dataset(self, slices): # pylint: disable=g-complex-comprehension indices = np.array( [[i, j] for i in range(len(slices)) for j in range(len(slices[i]))], dtype=np.int64) values = np.array([val for s in slices for val in s], dtype=np.float64) # pylint: enable=g-complex-comprehension dense_shape = np.array( [len(slices), max(len(s) for s in slices) + 1], dtype=np.int64) sparse_components = sparse_tensor.SparseTensor(indices, values, dense_shape) return dataset_ops.Dataset.from_sparse_tensor_slices(sparse_components) @combinations.generate( combinations.times(test_base.v1_only_combinations(), checkpoint_test_base.default_test_combinations())) def test(self, verify_fn): slices = [[1., 2., 3.], [1.], [1.], [1., 2.], [], [1., 2.], [], [], []] verify_fn( self, lambda: self._build_sparse_tensor_slice_dataset(slices), num_outputs=9, sparse_tensors=True) if __name__ == "__main__": test.main()
tensorflow/tensorflow
tensorflow/python/data/kernel_tests/from_sparse_tensor_slices_test.py
Python
apache-2.0
8,944
import snap import sys num_nodes = int(sys.argv[1]) forward_prob = float(sys.argv[2]) backward_prob = float(sys.argv[3]) g = snap.GenForestFire(num_nodes, forward_prob,backward_prob) print num_nodes adj_list = [] for i in range(num_nodes): adj_list.append([]) for EI in g.Edges(): source = EI.GetSrcNId() sink = EI.GetDstNId() if sink not in adj_list[source]: adj_list[source].append(sink) if source not in adj_list[sink]: adj_list[sink].append(source) for i in range(num_nodes): ans = str(len(adj_list[i])) + " " for v in adj_list[i]: ans = ans + " " + str(v) print ans
simp1eton/CS224W_Final_Project
OLD/forest_fire_5.py
Python
mit
602
"""Unit tests for the ``gpgkeys`` paths. @Requirement: Gpgkey @CaseAutomation: Automated @CaseLevel: Acceptance @CaseComponent: API @TestType: Functional @CaseImportance: High @Upstream: No """ from fauxfactory import gen_string from nailgun import entities from requests import HTTPError from robottelo.constants import VALID_GPG_KEY_BETA_FILE, VALID_GPG_KEY_FILE from robottelo.datafactory import invalid_values_list, valid_data_list from robottelo.decorators import run_only_on, tier1 from robottelo.helpers import read_data_file from robottelo.test import APITestCase class GPGKeyTestCase(APITestCase): """Tests for ``katello/api/v2/gpg_keys``.""" @classmethod def setUpClass(cls): """Create an organization which can be re-used in tests.""" super(GPGKeyTestCase, cls).setUpClass() cls.org = entities.Organization().create() cls.key_content = read_data_file(VALID_GPG_KEY_FILE) @tier1 @run_only_on('sat') def test_positive_search_in_org(self): """Search for a GPG key and specify just ``organization_id``. @id: ff5e047c-404b-4379-8d28-3ad8cb39b6a9 @Steps: 1. Create an organization. 1. Create a GPG key belonging to the organization. 2. Search for GPG keys in the organization. @Assert: Only one GPG key is in the search results: the created GPG key. """ org = entities.Organization().create() gpg_key = entities.GPGKey(organization=org).create() gpg_keys = gpg_key.search({'organization'}) self.assertEqual(len(gpg_keys), 1) self.assertEqual(gpg_key.id, gpg_keys[0].id) @tier1 @run_only_on('sat') def test_positive_create_with_name(self): """Create a GPG key with valid name. @id: 741d969b-28ef-481f-bcf7-ed4cd920b030 @Assert: A GPG key is created with the given name. """ for name in valid_data_list(): with self.subTest(name): gpg_key = entities.GPGKey( organization=self.org, name=name).create() self.assertEqual(name, gpg_key.name) @tier1 @run_only_on('sat') def test_positive_create_with_content(self): """Create a GPG key with valid name and valid gpg key text. @id: cfa6690e-fed7-49cf-94f9-fd2deed941c0 @Assert: A GPG key is created with the expected content. """ gpg_key = entities.GPGKey( organization=self.org, content=self.key_content).create() self.assertEqual(self.key_content, gpg_key.content) @tier1 @run_only_on('sat') def test_negative_create_name(self): """Attempt to create GPG key with invalid names only. @id: 904a3ed0-7d50-495e-a700-b4f1ae913599 @Assert: A GPG key is not created and error is raised. """ for name in invalid_values_list(): with self.subTest(name): with self.assertRaises(HTTPError): entities.GPGKey(name=name).create() @tier1 @run_only_on('sat') def test_negative_create_with_same_name(self): """Attempt to create a GPG key providing a name of already existent entity @id: 78299f13-5977-4409-9bc7-844e54349926 @Assert: A GPG key is not created and error is raised. """ name = gen_string('alphanumeric') entities.GPGKey(organization=self.org, name=name).create() with self.assertRaises(HTTPError): entities.GPGKey(organization=self.org, name=name).create() @tier1 @run_only_on('sat') def test_negative_create_with_content(self): """Attempt to create GPG key with empty content. @id: fc79c840-6bcb-4d97-9145-c0008d5b028d @Assert: A GPG key is not created and error is raised. """ with self.assertRaises(HTTPError): entities.GPGKey(content='').create() @tier1 @run_only_on('sat') def test_positive_update_name(self): """Update GPG key name to another valid name. @id: 9868025d-5346-42c9-b850-916ce37a9541 @Assert: The GPG key name can be updated. """ gpg_key = entities.GPGKey(organization=self.org).create() for new_name in valid_data_list(): with self.subTest(new_name): gpg_key.name = new_name gpg_key = gpg_key.update(['name']) self.assertEqual(new_name, gpg_key.name) @tier1 @run_only_on('sat') def test_positive_update_content(self): """Update GPG key content text to another valid one. @id: 62fdaf55-c931-4be6-9857-68cc816046ad @Assert: The GPG key content text can be updated. """ gpg_key = entities.GPGKey( organization=self.org, content=read_data_file(VALID_GPG_KEY_BETA_FILE), ).create() gpg_key.content = self.key_content gpg_key = gpg_key.update(['content']) self.assertEqual(self.key_content, gpg_key.content) @tier1 @run_only_on('sat') def test_negative_update_name(self): """Attempt to update GPG key name to invalid one @id: 1a43f610-8969-4f08-967f-fb6af0fca31b @Assert: GPG key is not updated """ gpg_key = entities.GPGKey(organization=self.org).create() for new_name in invalid_values_list(): gpg_key.name = new_name with self.subTest(new_name): with self.assertRaises(HTTPError): gpg_key.update(['name']) @tier1 @run_only_on('sat') def test_negative_update_same_name(self): """Attempt to update GPG key name to the name of existing GPG key entity @id: e294e3b2-1125-4ad9-969a-eb3f1966419e @Assert: GPG key is not updated """ name = gen_string('alpha') entities.GPGKey(organization=self.org, name=name).create() new_gpg_key = entities.GPGKey(organization=self.org).create() new_gpg_key.name = name with self.assertRaises(HTTPError): new_gpg_key.update(['name']) @tier1 @run_only_on('sat') def test_negative_update_content(self): """Attempt to update GPG key content to invalid one @id: fee30ef8-370a-4fdd-9e45-e7ab95dade8b @Assert: GPG key is not updated """ gpg_key = entities.GPGKey( organization=self.org, content=self.key_content).create() gpg_key.content = '' with self.assertRaises(HTTPError): gpg_key.update(['content']) self.assertEqual(self.key_content, gpg_key.read().content) @tier1 @run_only_on('sat') def test_positive_delete(self): """Create a GPG key with different names and then delete it. @id: b06d211f-2827-40f7-b627-8b1fbaee2eb4 @Assert: The GPG key deleted successfully. """ gpg_key = entities.GPGKey(organization=self.org).create() gpg_key.delete() with self.assertRaises(HTTPError): gpg_key.read()
sthirugn/robottelo
tests/foreman/api/test_gpgkey.py
Python
gpl-3.0
7,069
# -*- coding: utf-8 -*- # '''Script to convert Matplotlib generated figures into TikZ/PGFPlots figures. ''' from matplotlib2tikz.__about__ import ( __author__, __email__, __copyright__, __credits__, __license__, __version__, __maintainer__, __status__ ) from matplotlib2tikz.save import save import pipdated if pipdated.needs_checking('matplotlib2tikz'): msg = pipdated.check('matplotlib2tikz', __version__) if msg: print(msg)
dougnd/matplotlib2tikz
matplotlib2tikz/__init__.py
Python
mit
518
pedidos = [] def criar_pedido(nome, sabor, observacao=None): pedido = {} pedido['nome'] = nome pedido['sabor'] = sabor pedido['observacao'] = observacao return pedido pedidos.append(criar_pedido('mario', 'pepperoni')) pedidos.append(criar_pedido('marco', 'presunto', 'dobro de presunto')) for pedido in pedidos: template = 'Nome: {nome}\nSabor: {sabor}'.format(**pedido) print(template.format(**pedido)) if pedido['observacao']: print('Observacao: {}'.format(pedido['observacao'])) print('-'*20)
americomflores/django-pizza
pyexamples/functions.py
Python
cc0-1.0
545
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. def memoize(fn): '''Decorates |fn| to memoize. ''' memory = {} def impl(*args, **optargs): full_args = args + tuple(optargs.iteritems()) if full_args not in memory: memory[full_args] = fn(*args, **optargs) return memory[full_args] return impl
jaruba/chromium.src
tools/json_schema_compiler/memoize.py
Python
bsd-3-clause
434
import struct import termios import fcntl import logging import ctypes import os from functools import wraps def set_size(fd, row, col, xpix=0, ypix=0): winsize = struct.pack("HHHH", row, col, xpix, ypix) fcntl.ioctl(fd, termios.TIOCSWINSZ, winsize) def get_size(fd): data = bytearray(8) fcntl.ioctl(fd, termios.TIOCGWINSZ, data, True) return struct.unpack("HHHH", data) STDIN = 0 CLONE_NEWNS = 0x00020000 CLONE_NEWUTS = 0x04000000 CLONE_NEWIPC = 0x08000000 CLONE_NEWUSER = 0x10000000 CLONE_NEWPID = 0x20000000 CLONE_NEWNET = 0x40000000 CLONE_NEWCGROUP = 0x02000000 # from include/uapi/linux/sched.h in Linux source code. MS_RDONLY = 1 MS_NOSUID = 2 MS_NODEV = 4 MS_NOEXEC = 8 MS_BIND = 4096 MS_REC = 16384 MS_PRIVATE = 262144 MS_STRICTATIME = 16777216 MNT_FORCE = 1 MNT_DETACH = 2 MNT_EXPIRE = 4 UMOUNT_NOFOLLOW = 8 _libc = ctypes.CDLL("libc.so.6", use_errno=True) def require_root(fn): @wraps(fn) def wrapper(*kargs, **kwargs): if os.geteuid() != 0: raise Exception("This operation requires root permission") return fn(*kargs, **kwargs) return wrapper def sys_unshare(flags): if _libc.unshare(flags) != 0: raise OSError(ctypes.get_errno(), os.strerror(ctypes.get_errno())) @require_root def sys_mount(*kargs): logging.debug(repr(kargs)) kargs = [(karg.encode("utf-8") if isinstance(karg, str) else karg) for karg in kargs] if _libc.mount(*kargs) != 0: raise OSError(ctypes.get_errno(), os.strerror(ctypes.get_errno())) @require_root def sys_umount(target, flags=0): logging.debug(repr(target)) target = target.encode("utf-8") if _libc.umount2(target, flags) != 0: raise OSError(ctypes.get_errno(), os.strerror(ctypes.get_errno())) @require_root def sethostname(hostname: str): cstr = hostname.encode("utf-8") if _libc.sethostname(cstr, len(cstr)) != 0: raise OSError(ctypes.get_errno(), os.strerror(ctypes.get_errno())) class Pipe: """Inheritable pipes for IPC""" def __init__(self): self.r, self.w = os.pipe() os.set_inheritable(self.r, True) os.set_inheritable(self.w, True) def close_read(self): os.close(self.r) def close_write(self): os.close(self.w) def read(self, n): return os.read(self.r, n) def write(self, bs): os.write(self.w, bs)
zydiig/Container
syscalls.py
Python
gpl-3.0
2,382
CSRF_ENABLED = True DEBUG = True SECRET_KEY = '6e9d6e59ad52278806294952fbd3a263' SQLALCHEMY_DATABASE_URI = 'sqlite:///bizdb.db'
akaak/flask-mega-tutorial
part-iii-forms/config.py
Python
bsd-3-clause
130
import numpy as np v1 = np.array([1, 2, 3]) v2 = np.array([4, 5, 6]) p = np.cross(v1, v2) print(p) """< [ 1 70 96] >"""
pythonpatterns/patterns
p0141.py
Python
unlicense
122
import os import sys from fnmatch import fnmatchcase from distutils.util import convert_path from setuptools import setup, find_packages def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() # Provided as an attribute, so you can append to these instead # of replicating them: standard_exclude = ('*.py', '*.pyc', '*$py.class', '*~', '.*', '*.bak') standard_exclude_directories = ('.*', 'CVS', '_darcs', './build', './dist', 'EGG-INFO', '*.egg-info') # (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org) # Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php # Note: you may want to copy this into your setup.py file verbatim, as # you can't import this from another package, when you don't know if # that package is installed yet. def find_package_data( where='.', package='', exclude=standard_exclude, exclude_directories=standard_exclude_directories, only_in_packages=True, show_ignored=False): """ Return a dictionary suitable for use in ``package_data`` in a distutils ``setup.py`` file. The dictionary looks like:: {'package': [files]} Where ``files`` is a list of all the files in that package that don't match anything in ``exclude``. If ``only_in_packages`` is true, then top-level directories that are not packages won't be included (but directories under packages will). Directories matching any pattern in ``exclude_directories`` will be ignored; by default directories with leading ``.``, ``CVS``, and ``_darcs`` will be ignored. If ``show_ignored`` is true, then all the files that aren't included in package data are shown on stderr (for debugging purposes). Note patterns use wildcards, or can be exact paths (including leading ``./``), and all searching is case-insensitive. """ out = {} stack = [(convert_path(where), '', package, only_in_packages)] while stack: where, prefix, package, only_in_packages = stack.pop(0) for name in os.listdir(where): fn = os.path.join(where, name) if os.path.isdir(fn): bad_name = False for pattern in exclude_directories: if (fnmatchcase(name, pattern) or fn.lower() == pattern.lower()): bad_name = True if show_ignored: print >> sys.stderr, ( "Directory %s ignored by pattern %s" % (fn, pattern)) break if bad_name: continue if (os.path.isfile(os.path.join(fn, '__init__.py')) and not prefix): if not package: new_package = name else: new_package = package + '.' + name stack.append((fn, '', new_package, False)) else: stack.append((fn, prefix + name + '/', package, only_in_packages)) elif package or not only_in_packages: # is a file bad_name = False for pattern in exclude: if (fnmatchcase(name, pattern) or fn.lower() == pattern.lower()): bad_name = True if show_ignored: print >> sys.stderr, ( "File %s ignored by pattern %s" % (fn, pattern)) break if bad_name: continue out.setdefault(package, []).append(prefix+name) return out README = read('README.rst') VERSION = __import__("servee_document").__version__ setup( name = "django-servee-document", version = VERSION, url = 'http://github.com/servee/django-servee-document/', license = 'BSD', description = "Document Plugin for django-servee", long_description = README, author = 'Issac Kelly', author_email = 'issac@servee.com', packages = find_packages(exclude=["example_project*",]), package_data = find_package_data('servee_document',only_in_packages=False), install_requires = [ # 'django-servee', 'Pillow', 'easy-thumbnails', ], classifiers = [ 'Development Status :: 3 - Alpha', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP', ], zip_safe = False, )
servee/django-servee-document
setup.py
Python
bsd-3-clause
4,836
"""config.py Parse the daemon config file""" __author__ = "Wim Leers (work@wimleers.com)" __version__ = "$Rev$" __date__ = "$Date$" __license__ = "GPL" import os import os.path import xml.etree.ElementTree as etree from xml.parsers.expat import ExpatError import re import logging from filter import * # Define exceptions. class ConfigError(Exception): pass class SourceDoesNotExist(ConfigError): pass class Config(object): def __init__(self, parent_logger): self.ignored_dirs = [] self.sources = {} self.servers = {} self.rules = {} self.logger = logging.getLogger(".".join([parent_logger, "Config"])) self.errors = 0 self.source_name_regex = re.compile('^[a-zA-Z0-9-_]*$', re.UNICODE) @classmethod def __ensure_unicode(cls, string): # If the string is already in Unicode, there's nothing we need to do. if type(string) == type(u'.'): return string # Otherwise, decode it from UTF-8 (which is config.xml's encoding). elif type(string) == type('.'): return string.decode('utf-8') # Finally, we may not really be receiving a string. else: return string def load(self, filename): try: doc = etree.parse(filename) root = doc.getroot() self.logger.info("Parsing sources.") self.__parse_sources(root) self.logger.info("Parsing servers.") self.__parse_servers(root) self.logger.info("Parsing rules.") self.__parse_rules(root) except ExpatError, e: self.logger.error("The XML file is invalid; %s." % (e)) self.errors += 1 return self.errors def __parse_sources(self, root): sources = root.find("sources") # Globally ignored directories. self.ignored_dirs = Config.__ensure_unicode(sources.get("ignoredDirs", "")) # If set, validate the globally ignored directories by trying to # create a Filter object for it. if self.ignored_dirs != "": try: conditions = {"ignoredDirs" : self.ignored_dirs} f = Filter(conditions) except FilterError, e: message = e.message if message == "": message = "none" self.logger.error("Invalid ignoredDirs attribute for the sources node: %s (details: \"%s\")." % (e.__class__.__name__, message)) self.errors += 1 for source in sources: name = Config.__ensure_unicode(source.get("name")) scan_path = Config.__ensure_unicode(source.get("scanPath")) document_root = Config.__ensure_unicode(source.get("documentRoot")) base_path = Config.__ensure_unicode(source.get("basePath")) self.sources[name] = { "name" : name, "scan_path" : scan_path, "document_root" : document_root, "base_path" : base_path, } # Validate. if not self.source_name_regex.match(name): self.logger.error("The name '%s' for a source is invalid. Only use alphanumeric characters, the dash and the underscore." % (name)) self.errors += 1 if scan_path is None: self.logger.error("The %s scan path is not configured." % (name)) self.errors += 1 elif not os.path.exists(scan_path): self.logger.error("The %s scan path ('%s') does not exist." % (name, scan_path)) self.errors += 1 if not document_root is None and not os.path.exists(document_root): self.logger.error("The %s document root ('%s') does not exist." % (name, document_root)) self.errors += 1 if not base_path is None and (base_path[0] != "/" or base_path[-1] != "/"): self.logger.error("The %s base path ('%s') is invalid. It should have both leading and trailing slashes." % (name, base_path)) self.errors += 1 if not document_root is None and not base_path is None: site_path = os.path.join(document_root, base_path[1:]) if not os.path.exists(site_path): self.logger.warning("The %s site path (the base path within the document root, '%s') does not exist. It is assumed that this is a logical base path then, due to usage of symbolic links." % (name, site_path)) def __parse_servers(self, root): servers_node = root.find("servers") for server_node in servers_node: settings = {} name = Config.__ensure_unicode(server_node.get("name")) transporter = Config.__ensure_unicode(server_node.get("transporter")) maxConnections = server_node.get("maxConnections", 0) for setting in server_node.getchildren(): settings[setting.tag] = Config.__ensure_unicode(setting.text) self.servers[name] = { "maxConnections" : int(maxConnections), "transporter" : transporter, "settings" : settings, } def __parse_rules(self, root): rules_node = root.find("rules") for rule_node in rules_node: for_source = Config.__ensure_unicode(rule_node.get("for")) label = Config.__ensure_unicode(rule_node.get("label")) deletion_delay = rule_node.get("fileDeletionDelayAfterSync", None) if deletion_delay is not None: deletion_delay = int(deletion_delay) # 1: filter (optional) conditions = None filter_node = rule_node.find("filter") if not filter_node is None: conditions = self.__parse_filter(filter_node, label) # 2: processorChain (optional) processor_chain = None processor_chain_node = rule_node.find("processorChain") if not processor_chain_node is None: processor_chain = self.__parse_processor_chain(processor_chain_node, label) # 3: destinations (required) destinations = {} destinations_node = rule_node.find("destinations") if destinations_node is None or len(destinations_node) == 0: self.logger.error("In rule '%s': at least one destination must be configured." % (label)) self.errors += 1 else: for destination_node in destinations_node: destination = self.__parse_destination(destination_node, label) destinations[destination["server"]] = {"path" : destination["path"]} if not self.rules.has_key(for_source): self.rules[for_source] = [] self.rules[for_source].append({ "label" : Config.__ensure_unicode(label), "deletionDelay" : deletion_delay, "filterConditions": conditions, "processorChain" : processor_chain, "destinations" : destinations, }) def __parse_filter(self, filter_node, rule_label): conditions = {} for condition_node in filter_node.getchildren(): if condition_node.tag == "size": conditions[condition_node.tag] = { "conditionType" : Config.__ensure_unicode(condition_node.get("conditionType")), "treshold" : Config.__ensure_unicode(condition_node.text), } else: conditions[condition_node.tag] = Config.__ensure_unicode(condition_node.text) # Validate the conditions by trying to create a Filter object with it. try: f = Filter(conditions) except FilterError, e: message = e.message if message == "": message = "none" self.logger.error("In rule '%s': invalid filter condition: %s (details: \"%s\")." % (rule_label, e.__class__.__name__, message)) self.errors += 1 return conditions def __parse_processor_chain(self, processor_chain_node, rule_label): processor_chain = [] for processor_node in processor_chain_node.getchildren(): processor_chain.append(Config.__ensure_unicode(processor_node.get("name"))) return processor_chain def __parse_destination(self, destination_node, rule_label): destination = {} destination["server"] = Config.__ensure_unicode(destination_node.get("server")) destination["path"] = Config.__ensure_unicode(destination_node.get("path", None)) # Validate "server" attribute. if destination["server"] is None: self.logger.error("In rule '%s': invalid destination: 'server' attribute is missing." % (rule_label)) self.errors += 1 elif destination["server"] not in self.servers.keys(): self.logger.error("In rule '%s': invalid destination: 'server' attribute references a non-existing server." % (rule_label)) self.errors += 1 return destination if __name__ == '__main__': import logging.handlers # Set up logging. logger = logging.getLogger("test") logger.setLevel(logging.DEBUG) handler = logging.handlers.RotatingFileHandler("config.log") logger.addHandler(handler) # Use the Config class. config = Config("test") config.load("config.xml") print "ignoredDirs", config.ignored_dirs print "sources", config.sources print "servers", config.servers print "rules", config.rules
edx/fileconveyor
fileconveyor/config.py
Python
unlicense
9,854
# Errors # (C) Poren Chiang 2020 class WeatherParseError(ValueError): """Raised when the module failed to parse the source string.""" def __init__(self, *args, **kwargs): super().__init__(*args) self.text = kwargs.get('text')
rschiang/ntu-weather
ntuweather/exceptions.py
Python
agpl-3.0
251
from querylist.dict import BetterDict from querylist.fieldlookup import field_lookup class QueryList(list): """A QueryList is an extension of Python's built in list data structure that adds easy filtering, excluding, and retrieval of member objects. >>> from querylist import QueryList >>> sites = QueryList(get_sites()) >>> sites.exclude(published=True) [{'url': 'http://site3.tld/', 'published': False}] Keywrod arguments: * data -- an iterable reprsenting the data that to be to queried. * wrapper -- a callable that can convert data's elements to objects that are compatbile with QueryList * wrap -- Boolean toggle to indicate whether or not to call wrapper on each element in data on instantiation. Set to false if data's elements are already compatible with QueryList. """ def __init__(self, data=None, wrapper=BetterDict, wrap=True): """Create a QueryList from an iterable and a wrapper object.""" self._wrapper = wrapper self._wrap = wrap self.src_data = data converted_data = data or [] # Wrap our src_data with wrapper if self._wrap: converted_data = self._convert_iterable(data) if data else [] super(QueryList, self).__init__(converted_data) def __add__(self, y): """Return a new QueryList containing itself and the passed iterable. Note that addition operations may result in QueryLists with mixed wrappers. Consider >>> a = QueryList(some_data) >>> b = QueryList(some_other_data, wrap=False) >>> c = a + b The resulting QueryList `c` will contain a mixture of BetterDicts ( QueryList a's members) and dicts (QueryList b's members) assuming both `some_data` and `some_other_data` are lists of dictionaries. """ return QueryList(data=super(QueryList, self).__add__(y), wrap=False) @property def count(self): """Returns the nubmer of objects in the QueryList.""" return len(self) def _convert_iterable(self, iterable): """Converts elements returned by an iterable into instances of self._wrapper """ # Return original if _wrapper isn't callable if not callable(self._wrapper): return iterable return [self._wrapper(x) for x in iterable] def _check_element(self, lookup_strings, instance): """Return True if lookup string/value pairs match against the passed object. """ for q, val in lookup_strings.items(): if not field_lookup(instance, q, val, True): return False return True def get(self, **kwargs): """Returns the first object encountered that matches the specified lookup parameters. >>> site_list.get(id=1) {'url': 'http://site1.tld/', 'published': False, 'id': 1} >>> site_list.get(published=True, id__lt=3) {'url': 'http://site1.tld/', 'published': True, 'id': 2} >>> site_list.filter(published=True).get(id__lt=3) {'url': 'http://site1.tld/', 'published': True, 'id': 2} If the QueryList contains multiple elements that match the criteria, only the first match will be returned. Use ``filter()`` to retrieve the entire set. If no match is found in the QueryList, the method will raise a ``NotFound`` exception. >>> site_list.get(id=None) Traceback (most recent call last): File "<stdin>", line 1, in <module> File "querylist/list.py", line 113, in get "Element not found with attributes: %s" % kv_str) querylist.list.NotFound: Element not found with attributes: id=None """ for x in self: if self._check_element(kwargs, x): return x kv_str = self._stringify_kwargs(kwargs) raise QueryList.NotFound( "Element not found with attributes: %s" % kv_str) def exclude(self, **kwargs): """Generates a QueryList containing the subset of objects from this QueryList that do **not** match the provided field lookups. The following example returns the subset of a QueryList named ``site_list`` where the id is greather than 1000. >>> site_list.exclude(id__gt=1000) [{'url': 'http://site1001.tld/',...}, {...}], In the next example, ``exclude()`` returns the subset of objects from site_list that aren't published and don't have "test" in their title >>> site_list.exclude(published=True, title__icontains="test") [{'url': 'http://site1.tld/',...}, {...}] If all objects match the provided field lookups, then an empty QueryList is returned: >>> site_list.exclude(id__gt=0) [] """ return QueryList( data=(x for x in self if not self._check_element(kwargs, x)), wrapper=self._wrapper, wrap=False) def filter(self, **kwargs): """Generates a QueryList containing the subset of objects from this QueryList that match the provided set of field lookups. The following example returns the subset of a QueryList named ``site_list`` where published is equal to False: >>> site_list.filter(published=True) [{'url': 'http://site1.tld/',...}, {...}], Similarly, in the next example, ``filter()`` returns the subset of objects where object.meta.keywords contains the string 'kittens' and where the id property is greater than 100. >>> site_list.filter(meta__keywords__contains='kittens', id__gt=100) [{'url': 'http://site101.tld/',...}, {...}], If no objects match the provided field lookups, an empty QueryList is returned. >>> site_list.filter(id__gte=1000, published=False) [] """ return QueryList( data=(x for x in self if self._check_element(kwargs, x)), wrapper=self._wrapper, wrap=False) def _stringify_kwargs(self, kwargs): return ', '.join('%s=%s' % kv for kv in kwargs.items()) class NotFound(Exception): pass
thomasw/querylist
querylist/list.py
Python
mit
6,241