max_stars_repo_path
stringlengths
3
269
max_stars_repo_name
stringlengths
4
119
max_stars_count
int64
0
191k
id
stringlengths
1
7
content
stringlengths
6
1.05M
score
float64
0.23
5.13
int_score
int64
0
5
src/gan-dogs/model/loss.py
caciolai/Generative-Dog-Images-with-BigGan
0
12783951
import tensorflow as tf from .utils import noisy_labels, smooth_fake_labels, smooth_real_labels, CONFIG class SGANDiscriminatorLoss(tf.keras.losses.Loss): def __init__(self): """Standard GAN loss for discriminator. """ super().__init__() self.bce = tf.keras.losses.BinaryCrossentropy(from_logits=True) def call(self, real_output, fake_output): """Loss for the discriminator. Applies technique from GAN hacks to stabilize training: - Label smoothing - Label noise Args: real_output (tf.Tensor): output of discriminator on real images fake_output (tf.Tensor): output of discriminator on fake images Returns: float: discriminator loss """ # Real images must be predicted 1 (noised and smoothed) real_labels = tf.ones_like(real_output) if CONFIG["smooth_labels"]: real_labels = noisy_labels(real_labels, CONFIG["label_noise"]) real_labels = smooth_real_labels(real_labels) # Fake images must be predicted 0 (noised and smoothed) fake_labels = tf.zeros_like(fake_output) if CONFIG["smooth_labels"]: fake_labels = noisy_labels(fake_labels, CONFIG["label_noise"]) fake_labels = smooth_fake_labels(fake_labels) real_loss = self.bce(real_labels, real_output) fake_loss = self.bce(fake_labels, fake_output) total_loss = real_loss + fake_loss return total_loss class SGANGeneratorLoss(tf.keras.losses.Loss): def __init__(self): """Standard GAN loss for generator. """ super().__init__() self.bce = tf.keras.losses.BinaryCrossentropy(from_logits=True) def call(self, real_output, fake_output): """Loss for the generator. The generator must fool the discriminator, making it predict fake images as real. Args: real_output (tf.Tensor): output of the discriminator on real images (actually not used, just to comply with interface function signature) fake_output (tf.Tensor): output of the discriminator on fake (generated) images Returns: float: generator loss """ loss = self.bce(tf.ones_like(fake_output), fake_output) return loss class WGANDiscriminatorLoss(tf.keras.losses.Loss): def __init__(self) -> None: """Wasserstein loss for the 'critic' from `Wasserstein GAN` (https://arxiv.org/abs/1701.07875). """ super().__init__() def call(self, real_output, gen_output): # loss for the output of the discriminator on real images real_loss = tf.reduce_mean(real_output) # loss for the output of the discriminator on generated images gen_loss = tf.reduce_mean(gen_output) loss = gen_loss - real_loss return loss class WGANGeneratorLoss(tf.keras.losses.Loss): def __init__(self) -> None: """Wasserstein loss for the generator from `Wasserstein GAN` (https://arxiv.org/abs/1701.07875). """ super().__init__() def call(self, real_output, gen_output): loss = -tf.reduce_mean(gen_output) return loss class RaLSGANGeneratorLoss(tf.keras.losses.Loss): def __init__(self) -> None: """Loss for Relativistic average Least Square GAN (arXiv:1901.02474). """ super().__init__() def call(self, real_output, fake_output): real_loss = tf.reduce_mean( real_output - tf.reduce_mean(fake_output) + 1)**2 fake_loss = tf.reduce_mean( fake_output - tf.reduce_mean(real_output) - 1)**2 loss = (real_loss + fake_loss) / 2 return loss class RaLSGANDiscriminatorLoss(tf.keras.losses.Loss): def __init__(self) -> None: """Loss for Relativistic average Least Square GAN (arXiv:1901.02474). """ super().__init__() def call(self, real_output, fake_output): real_loss = tf.reduce_mean( real_output - tf.reduce_mean(fake_output) - 1)**2 fake_loss = tf.reduce_mean( fake_output - tf.reduce_mean(real_output) + 1)**2 loss = (real_loss + fake_loss) / 2 return loss
2.6875
3
SV_Detection/SourceCode/TestingCase/RunSniffles/RunSniffles.py
NCI-CGR/PacbioPipeline
0
12783952
import sys import subprocess import os runSnifflesScript = "./call_sniffles.sh" resultDir = "/CGF/Bioinformatics/Production/Wen/20200117_pacbio_snp_call/29461_WGS_cell_line/bam_location_ngmlr/SV/Sniffles" class ClsSample: def __init__(self): self.strName = "" self.strPath = "" self.strBAM = "" def Init(self, strFullPathBAM): self.strName = os.path.basename(strFullPathBAM).split(".")[0] self.strPath = os.path.dirname(strFullPathBAM) self.strBAM = strFullPathBAM def Print(self): print("*************") print("strName:", self.strName) print("strPath:", self.strPath) print("strBAM :", self.strBAM) print("*************") print() def SubmitJob(self): strCurSampleDir = resultDir + "/" + self.strName if not os.path.exists(strCurSampleDir): CMD = "mkdir -p " + strCurSampleDir os.system(CMD) strCurSampleLogDir = strCurSampleDir + "/Log" if not os.path.exists(strCurSampleLogDir): CMD = "mkdir -p " + strCurSampleLogDir os.system(CMD) # --> Submit sge job -> Go! # CMD = ("bash " + runSnifflesScript + " " + "\"" + self.strName + "\" " + # "\"" + self.strBAM + "\" " + # "\"" + resultDir + "\"") # os.system(CMD) # # <-- QUEUE = "all.q" CORES = "12" strLogStdOut = strCurSampleLogDir + "/_call_sv_" + self.strName + ".stdout" strLogStdErr = strCurSampleLogDir + "/_call_sv_" + self.strName + ".stderr" if os.path.exists(strLogStdOut): CMD = "rm " + strLogStdOut os.system(CMD) if os.path.exists(strLogStdErr): CMD = "rm " + strLogStdErr os.system(CMD) CMD = ("qsub -cwd -q " + QUEUE + " -pe by_node " + CORES + " " + "-o " + strLogStdOut + " " + "-e " + strLogStdErr + " " + "-N " + "SV.Sniffles." + self.strName + " " + "-S /bin/bash " + runSnifflesScript + " " + "\"" + self.strName + "\" " + "\"" + self.strBAM + "\" " + "\"" + strCurSampleDir + "\" " + "\"" + CORES + "\"") print("CMD:", CMD) print() os.system(CMD) print("\n", "***", "\n") def main(): strDir = sys.argv[1] #Find all bam in current fastq CMD = "find " + strDir + " -maxdepth 1 -type f -iname '*.bam'" vBAM = subprocess.getoutput(CMD).split('\n') vSample = [] for strBAM in vBAM: objSample = ClsSample() objSample.Init(strBAM) vSample.append(objSample) for objSample in vSample: objSample.SubmitJob() if __name__ == "__main__": main()
2.484375
2
qppwg/utils/features.py
bigpon/QPPWG
46
12783953
<filename>qppwg/utils/features.py<gh_stars>10-100 # -*- coding: utf-8 -*- # Copyright 2020 <NAME> (Nagoya University) # MIT License (https://opensource.org/licenses/MIT) """Feature-related functions.""" import numpy as np def validate_length(x, y, hop_size=None): """ Validate length Args: x (ndarray): numpy array with x.shape[0] = len_x y (ndarray): numpy array with y.shape[0] = len_y hop_size (int): upsampling factor Returns: (ndarray): length adjusted x with same length y (ndarray): length adjusted y with same length x """ if hop_size is None: if x.shape[0] < y.shape[0]: y = y[:x.shape[0]] if x.shape[0] > y.shape[0]: x = x[:y.shape[0]] assert len(x) == len(y) else: if x.shape[0] > y.shape[0] * hop_size: x = x[:y.shape[0] * hop_size] if x.shape[0] < y.shape[0] * hop_size: mod_y = y.shape[0] * hop_size - x.shape[0] mod_y_frame = mod_y // hop_size + 1 y = y[:-mod_y_frame] x = x[:y.shape[0] * hop_size] assert len(x) == len(y) * hop_size return x, y def batch_f0(h, f0_threshold=0, f0_cont=True, f0_idx=1, uv_idx=0): """ load f0 Args: h (ndarray): the auxiliary acoustic features (T x D) f0_threshold (float): the lower bound of pitch f0_cont (bool): True: return continuous f0; False return discrete f0 f0_idx: the dimension index of f0 uv_idx: the dimension index of U/V Return: f0(ndarray): float array of the f0 sequence (T) """ if (f0_idx < 0) or (uv_idx < 0): f0 = np.zeros(h.shape[0]) else: f0 = h[:, f0_idx].copy(order='C') f0[f0 < f0_threshold] = f0_threshold if not f0_cont: uv = h[:, uv_idx].copy(order='C') # voice/unvoice feature f0[uv == 0] = 0 return f0 def dilated_factor(batch_f0, fs, dense_factor): """Pitch-dependent dilated factor Args: batch_f0 (ndarray): the f0 sequence (T) fs (int): sampling rate dense_factor (int): the number of taps in one cycle Return: dilated_factors(np array): float array of the pitch-dependent dilated factors (T) """ batch_f0[batch_f0 == 0] = fs / dense_factor dilated_factors = np.ones(batch_f0.shape) * fs dilated_factors /= batch_f0 dilated_factors /= dense_factor assert np.all(dilated_factors > 0) return dilated_factors
2.546875
3
lib/improver/psychrometric_calculations/psychrometric_calculations.py
TomekTrzeciak/improver
0
12783954
<reponame>TomekTrzeciak/improver<filename>lib/improver/psychrometric_calculations/psychrometric_calculations.py # -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # (C) British Crown Copyright 2017-2019 Met Office. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # * Neither the name of the copyright holder nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. """Module to contain Psychrometric Calculations.""" import warnings import iris import numpy as np from cf_units import Unit from scipy.interpolate import griddata from scipy.spatial.qhull import QhullError from scipy.stats import linregress from stratify import interpolate import improver.constants as consts from improver.psychrometric_calculations import svp_table from improver.utilities.cube_checker import check_cube_coordinates from improver.utilities.mathematical_operations import Integration from improver.utilities.spatial import ( OccurrenceWithinVicinity, convert_number_of_grid_cells_into_distance) class Utilities(object): """ Utilities for psychrometric calculations. """ def __init__(self): """ Initialise class. """ pass def __repr__(self): """Represent the configured plugin instance as a string.""" result = ('<Utilities>') return result @staticmethod def specific_heat_of_moist_air(mixing_ratio): """ Calculate the specific heat capacity for moist air by combining that of dry air and water vapour in proportion given by the specific humidity. Args: mixing_ratio (iris.cube.Cube): Cube of specific humidity (fractional). Returns: iris.cube.Cube: Specific heat capacity of moist air (J kg-1 K-1). """ specific_heat = ((-1.*mixing_ratio + 1.) * consts.U_CP_DRY_AIR + mixing_ratio * consts.U_CP_WATER_VAPOUR) specific_heat.rename('specific_heat_capacity_of_moist_air') return specific_heat @staticmethod def latent_heat_of_condensation(temperature_input): """ Calculate a temperature adjusted latent heat of condensation for water vapour using the relationship employed by the UM. Args: temperature_input (iris.cube.Cube): A cube of air temperatures (Celsius, converted if not). Returns: iris.cube.Cube: Temperature adjusted latent heat of condensation (J kg-1). """ temperature = temperature_input.copy() temperature.convert_units('celsius') latent_heat = (-1. * consts.U_LATENT_HEAT_T_DEPENDENCE * temperature + consts.U_LH_CONDENSATION_WATER) latent_heat.units = consts.U_LH_CONDENSATION_WATER.units latent_heat.rename('latent_heat_of_condensation') return latent_heat @staticmethod def calculate_enthalpy(mixing_ratio, specific_heat, latent_heat, temperature): """ Calculate the enthalpy (total energy per unit mass) of air (J kg-1). Method from referenced UM documentation. References: Met Office UM Documentation Paper 080, UM Version 10.8, last updated 2014-12-05. Args: mixing_ratio (iris.cube.Cube): Cube of mixing ratios. specific_heat (iris.cube.Cube): Cube of specific heat capacities of moist air (J kg-1 K-1). latent_heat (iris.cube.Cube): Cube of latent heats of condensation of water vapour (J kg-1). temperature (iris.cube.Cube): A cube of air temperatures (K). Returns: enthalpy (iris.cube.Cube): A cube of enthalpy values calculated at the same points as the input cubes (J kg-1). """ enthalpy = latent_heat * mixing_ratio + specific_heat * temperature enthalpy.rename('enthalpy_of_air') return enthalpy @staticmethod def calculate_d_enthalpy_dt(mixing_ratio, specific_heat, latent_heat, temperature_input): """ Calculate the enthalpy gradient with respect to temperature. Method from referenced UM documentation. References: Met Office UM Documentation Paper 080, UM Version 10.8, last updated 2014-12-05. Args: mixing_ratio (iris.cube.Cube): Cube of mixing ratios. specific_heat (iris.cube.Cube): Cube of specific heat capacities of moist air (J kg-1 K-1). latent_heat (iris.cube.Cube): Cube of latent heats of condensation of water vapour (J kg-1). temperature_input (iris.cube.Cube): A cube of temperatures (K, or converted). Returns: iris.cube.Cube: A cube of the enthalpy gradient with respect to temperature. """ temperature = temperature_input.copy() temperature.convert_units('K') numerator = (mixing_ratio * latent_heat ** 2) denominator = consts.U_R_WATER_VAPOUR * temperature ** 2 return numerator/denominator + specific_heat @staticmethod def saturation_vapour_pressure_goff_gratch(temperature): """ Saturation Vapour pressure in a water vapour system calculated using the Goff-Gratch Equation (WMO standard method). Args: temperature (iris.cube.Cube): Cube of temperature which will be converted to Kelvin prior to calculation. Valid from 173K to 373K Returns: svp (iris.cube.Cube): Cube containing the saturation vapour pressure of a pure water vapour system. A correction must be applied to the data when used to convert this to the SVP in air; see the WetBulbTemperature.pressure_correct_svp function. References: Numerical data and functional relationships in science and technology. New series. Group V. Volume 4. Meteorology. Subvolume b. Physical and chemical properties of the air, P35. """ constants = {1: 10.79574, 2: 5.028, 3: 1.50475E-4, 4: -8.2969, 5: 0.42873E-3, 6: 4.76955, 7: 0.78614, 8: -9.09685, 9: 3.56654, 10: 0.87682, 11: 0.78614} triple_pt = consts.TRIPLE_PT_WATER # Values for which method is considered valid (see reference). WetBulbTemperature.check_range(temperature, 173., 373.) data = temperature.data.copy() for cell in np.nditer(data, op_flags=['readwrite']): if cell > triple_pt: n0 = constants[1] * (1. - triple_pt / cell) n1 = constants[2] * np.log10(cell / triple_pt) n2 = constants[3] * (1. - np.power(10., (constants[4] * (cell / triple_pt - 1.)))) n3 = constants[5] * (np.power(10., (constants[6] * (1. - triple_pt / cell))) - 1.) log_es = n0 - n1 + n2 + n3 + constants[7] cell[...] = (np.power(10., log_es)) else: n0 = constants[8] * ((triple_pt / cell) - 1.) n1 = constants[9] * np.log10(triple_pt / cell) n2 = constants[10] * (1. - (cell / triple_pt)) log_es = n0 - n1 + n2 + constants[11] cell[...] = (np.power(10., log_es)) # Create SVP cube svp = iris.cube.Cube( data, long_name='saturated_vapour_pressure', units='hPa') # Output of the Goff-Gratch is in hPa, but we want to return in Pa. svp.convert_units('Pa') return svp class WetBulbTemperature(object): """ A plugin to calculate wet bulb temperatures from air temperature, relative humidity, and pressure data. Calculations are performed using a Newton iterator, with saturated vapour pressures drawn from a lookup table using linear interpolation. The svp_table used in this plugin is imported (see top of file). It is a table of saturated vapour pressures calculated for a range of temperatures. The import also brings in attributes that describe the range of temperatures covered by the table and the increments in the table. """ def __init__(self, precision=0.005): """ Initialise class. Args: precision (float): The precision to which the Newton iterator must converge before returning wet bulb temperatures. """ self.precision = precision def __repr__(self): """Represent the configured plugin instance as a string.""" result = ('<WetBulbTemperature: precision: {}>'.format(self.precision)) return result @staticmethod def check_range(cube, low, high): """Function to wrap functionality for throwing out temperatures too low or high for a method to use safely. Args: cube (iris.cube.Cube): A cube of temperature. low (int or float): Lowest allowable temperature for check high (int or float): Highest allowable temperature for check Raises: UserWarning : If any of the values in cube.data are outside the bounds set by the low and high variables. """ if cube.data.max() > high or cube.data.min() < low: emsg = ("Wet bulb temperatures are being calculated for conditions" " beyond the valid range of the saturated vapour pressure" " lookup table (< {}K or > {}K). Input cube has\n" "Lowest temperature = {}\nHighest temperature = {}") warnings.warn(emsg.format(low, high, cube.data.min(), cube.data.max())) def lookup_svp(self, temperature): """ Looks up a value for the saturation vapour pressure of water vapour using the temperature and a table of values. These tabulated values have been calculated using the utilities.ancillary_creation SaturatedVapourPressureTable plugin that uses the Goff-Gratch method. Args: temperature (iris.cube.Cube): A cube of air temperatures (K). Returns: svp (iris.cube.Cube): A cube of saturated vapour pressures (Pa). """ # We subtract T_INCREMENT from T_MAX to get the upper bound to which we # clip input temperatures. This ensures that we do not attempt an # interpolation that requires a value beyond the SVP table maximum. T_max = svp_table.T_MAX - svp_table.T_INCREMENT T_min = svp_table.T_MIN delta_T = svp_table.T_INCREMENT self.check_range(temperature, T_min, T_max) temperatures = temperature.data T_clipped = np.clip(temperatures, T_min, T_max) # Note the indexing below differs by -1 compared with the UM due to # Python vs. Fortran indexing. table_position = (T_clipped - T_min + delta_T)/delta_T - 1. table_index = table_position.astype(int) interpolation_factor = table_position - table_index svps = ((1.0 - interpolation_factor) * svp_table.DATA[table_index] + interpolation_factor * svp_table.DATA[table_index + 1]) svp = temperature.copy(data=svps) svp.units = Unit('Pa') svp.rename("saturated_vapour_pressure") return svp @staticmethod def pressure_correct_svp(svp, temperature, pressure): """ Convert saturated vapour pressure in a pure water vapour system into the saturated vapour pressure in air. Method from referenced documentation. References: Atmosphere-Ocean Dynamics, <NAME>, International Geophysics Series, Vol. 30; Equation A4.7. Args: svp (iris.cube.Cube): A cube of saturated vapour pressures (Pa). temperature (iris.cube.Cube): A cube of air temperatures (K, converted to Celsius). pressure (iris.cube.Cube): Cube of pressure (Pa). Returns: svp (iris.cube.Cube): The input cube of saturated vapour pressure of air (Pa) is modified by the pressure correction. """ temp = temperature.copy() temp.convert_units('celsius') correction = (1. + 1.0E-8 * pressure.data * (4.5 + 6.0E-4 * temp.data ** 2)) svp.data = svp.data*correction return svp def _calculate_mixing_ratio(self, temperature, pressure): """Function to compute the mixing ratio given temperature and pressure. Args: temperature (iris.cube.Cube): Cube of air temperature (K). pressure (iris.cube.Cube): Cube of air pressure (Pa). Returns mixing_ratio (iris.cube.Cube): Cube of mixing ratios. Method from referenced documentation. Note that EARTH_REPSILON is simply given as an unnamed constant in the reference (0.62198). References: ASHRAE Fundamentals handbook (2005) Equation 22, 24, p6.8 """ svp = self.lookup_svp(temperature) svp = self.pressure_correct_svp(svp, temperature, pressure) # Calculation result_numer = (consts.EARTH_REPSILON * svp.data) max_pressure_term = np.maximum(svp.data, pressure.data) result_denom = (max_pressure_term - ((1. - consts.EARTH_REPSILON) * svp.data)) mixing_ratio = temperature.copy(data=result_numer / result_denom) # Tidying up cube mixing_ratio.rename("humidity_mixing_ratio") mixing_ratio.units = Unit("1") return mixing_ratio def calculate_wet_bulb_temperature(self, temperature, relative_humidity, pressure): """ Perform the calculation of wet bulb temperatures. A Newton iterator is used to minimise the gradient of enthalpy against temperature. Args: temperature (iris.cube.Cube): Cube of air temperatures (K). relative_humidity (iris.cube.Cube): Cube of relative humidities (%, converted to fractional). pressure (iris.cube.Cube): Cube of air pressures (Pa). Returns: wbt (iris.cube.Cube): Cube of wet bulb temperature (K). """ precision = np.full(temperature.data.shape, self.precision) # Set units of input diagnostics. relative_humidity.convert_units(1) pressure.convert_units('Pa') temperature.convert_units('K') # Calculate mixing ratios. saturation_mixing_ratio = self._calculate_mixing_ratio(temperature, pressure) mixing_ratio = relative_humidity * saturation_mixing_ratio # Calculate specific and latent heats. specific_heat = Utilities.specific_heat_of_moist_air(mixing_ratio) latent_heat = Utilities.latent_heat_of_condensation(temperature) # Calculate enthalpy. g_tw = Utilities.calculate_enthalpy(mixing_ratio, specific_heat, latent_heat, temperature) # Use air temperature as a first guess for wet bulb temperature. wbt = temperature.copy() wbt.rename('wet_bulb_temperature') delta_wbt = temperature.copy(data=(10. * precision)) delta_wbt_history = temperature.copy(data=(5. * precision)) max_iterations = 20 iteration = 0 # Iterate to find the wet bulb temperature while (np.abs(delta_wbt.data) > precision).any(): g_tw_new = Utilities.calculate_enthalpy( saturation_mixing_ratio, specific_heat, latent_heat, wbt) dg_dt = Utilities.calculate_d_enthalpy_dt( saturation_mixing_ratio, specific_heat, latent_heat, wbt) delta_wbt = (g_tw - g_tw_new) / dg_dt # Only change values at those points yet to converge to avoid # oscillating solutions (the now fixed points are still calculated # unfortunately). unfinished = np.where(np.abs(delta_wbt.data) > precision) wbt.data[unfinished] = (wbt.data[unfinished] + delta_wbt.data[unfinished]) # If the errors are identical between two iterations, stop. if (np.array_equal(delta_wbt.data, delta_wbt_history.data) or iteration > max_iterations): warnings.warn('No further refinement occurring; breaking out ' 'of Newton iterator and returning result.') break delta_wbt_history = delta_wbt iteration += 1 # Recalculate the saturation mixing ratio saturation_mixing_ratio = self._calculate_mixing_ratio( wbt, pressure) return wbt def process(self, temperature, relative_humidity, pressure): """ Call the calculate_wet_bulb_temperature function to calculate wet bulb temperatures. This process function splits input cubes over vertical levels to mitigate memory issues when trying to operate on multi-level data. Args: temperature (iris.cube.Cube): Cube of air temperatures (K). relative_humidity (iris.cube.Cube): Cube of relative humidities (%, converted to fractional). pressure (iris.cube.Cube): Cube of air pressures (Pa). Returns: wet_bulb_temperature (iris.cube.Cube): Cube of wet bulb temperature (K). """ try: vertical_coords = [cube.coord(axis='z').name() for cube in [temperature, relative_humidity, pressure] if cube.coord_dims(cube.coord(axis='z')) != ()] except iris.exceptions.CoordinateNotFoundError: vertical_coords = [] if len(vertical_coords) == 3 and len(set(vertical_coords)) == 1: level_coord, = set(vertical_coords) temperature_over_levels = temperature.slices_over(level_coord) relative_humidity_over_levels = relative_humidity.slices_over( level_coord) pressure_over_levels = pressure.slices_over(level_coord) slices = zip(temperature_over_levels, relative_humidity_over_levels, pressure_over_levels) elif len(vertical_coords) > 0 and len(set(vertical_coords)) != 1: raise ValueError('WetBulbTemperature: Cubes have differing ' 'vertical coordinates.') else: slices = [(temperature, relative_humidity, pressure)] cubelist = iris.cube.CubeList([]) for t_slice, rh_slice, p_slice in slices: cubelist.append(self.calculate_wet_bulb_temperature( t_slice, rh_slice, p_slice)) wet_bulb_temperature = cubelist.merge_cube() wet_bulb_temperature = check_cube_coordinates(temperature, wet_bulb_temperature) return wet_bulb_temperature class WetBulbTemperatureIntegral(object): """Calculate a wet-bulb temperature integral.""" def __init__(self, precision=0.005, coord_name_to_integrate="height", start_point=None, end_point=None, direction_of_integration="negative"): """ Initialise class. Args: precision (float): The precision to which the Newton iterator must converge before returning wet bulb temperatures. coord_name_to_integrate (str): Name of the coordinate to be integrated. start_point (float or None): Point at which to start the integration. Default is None. If start_point is None, integration starts from the first available point. end_point (float or None): Point at which to end the integration. Default is None. If end_point is None, integration will continue until the last available point. direction_of_integration (str): Description of the direction in which to integrate. Options are 'positive' or 'negative'. 'positive' corresponds to the values within the array increasing as the array index increases. 'negative' corresponds to the values within the array decreasing as the array index increases. """ self.wet_bulb_temperature_plugin = ( WetBulbTemperature(precision=precision)) self.integration_plugin = Integration( coord_name_to_integrate, start_point=start_point, end_point=end_point, direction_of_integration=direction_of_integration) self.coord_name_to_integrate = coord_name_to_integrate def __repr__(self): """Represent the configured plugin instance as a string.""" result = ('<WetBulbTemperatureIntegral: {}, {}>'.format( self.wet_bulb_temperature_plugin, self.integration_plugin)) return result def process(self, temperature, relative_humidity, pressure): """ Calculate the wet bulb temperature integral by firstly calculating the wet bulb temperature from the inputs provided, and then calculating the vertical integral of the wet bulb temperature. Args: temperature (iris.cube.Cube): Cube of air temperatures (K). relative_humidity (iris.cube.Cube): Cube of relative humidities (%, converted to fractional). pressure (iris.cube.Cube): Cube of air pressures (Pa). Returns: (tuple): tuple containing **wet_bulb_temperature** (iris.cube.Cube) - Cube on wet bulb temperatures on height levels (celsius) **wet_bulb_temperature_integral** (iris.cube.Cube) - Cube of wet bulb temperature integral (Kelvin-metres). """ # Calculate wet-bulb temperature. wet_bulb_temperature = ( self.wet_bulb_temperature_plugin.process( temperature, relative_humidity, pressure)) # Convert to Celsius wet_bulb_temperature.convert_units('celsius') # Integrate. wet_bulb_temperature_integral = ( self.integration_plugin.process(wet_bulb_temperature)) wet_bulb_temperature_integral.rename("wet_bulb_temperature_integral") units_string = "K {}".format( wet_bulb_temperature.coord(self.coord_name_to_integrate).units) wet_bulb_temperature_integral.units = Unit(units_string) return wet_bulb_temperature, wet_bulb_temperature_integral class FallingSnowLevel(object): """Calculate a field of continuous falling snow level.""" def __init__(self, precision=0.005, falling_level_threshold=90.0, grid_point_radius=2): """ Initialise class. Args: precision (float): The precision to which the Newton iterator must converge before returning wet bulb temperatures. falling_level_threshold (float): The cutoff threshold for the Wet-bulb integral used to calculate the falling snow level.We are integrating to the threshold that is presumed to indicate the level at which snow has melted back to rain. Above this level we should have falling snow. grid_point_radius (int): The radius in grid points used to calculate the maximum height of the orography in a neighbourhood as part of this calculation. """ self.precision = precision self.wet_bulb_integral_plugin = ( WetBulbTemperatureIntegral(precision=precision)) self.falling_level_threshold = falling_level_threshold self.missing_data = -300.0 self.grid_point_radius = grid_point_radius def __repr__(self): """Represent the configured plugin instance as a string.""" result = ('<FallingSnowLevel: precision:' '{}, falling_level_threshold:{}, ' 'grid_point_radius: {}>'.format( self.precision, self.falling_level_threshold, self.grid_point_radius)) return result def find_falling_level(self, wb_int_data, orog_data, height_points): """ Find the falling snow level by finding the level of the wet-bulb integral data at the required threshold. Wet-bulb integral data is only available above ground level and there may be an insufficient number of levels in the input data, in which case the required threshold may lie outside the Wet-bulb integral data and the value at that point will be set to np.nan. Args: wb_int_data (numpy.ndarray): Wet bulb integral data on heights orog_data (numpy.ndarray): Orographic data height_points (numpy.ndarray): heights agl Returns: snow_level_data (numpy.ndarray): Falling snow level data asl. """ # Create cube of heights above sea level for each height in # the wet bulb integral cube. asl = wb_int_data.copy() for i, height in enumerate(height_points): asl[i, ::] = orog_data + height # Calculate falling snow level above sea level by # finding the level corresponding to the falling_level_threshold. # Interpolate returns an array with height indice # for falling_level_threshold so we take the 0 index snow_level_data = interpolate(np.array([self.falling_level_threshold]), wb_int_data, asl, axis=0)[0] return snow_level_data def fill_in_high_snow_falling_levels( self, snow_level_data, orog_data, highest_wb_int_data, highest_height): """ Fill in any data in the snow falling level where the whole wet bulb temperature integral is above the the threshold. Set these points to the highest height level + orography. Args: snow_level_data (numpy.ndarray): Falling snow level data (m). orog_data (numpy.ndarray): Orographic data (m) highest_wb_int_data (numpy.ndarray): Wet bulb integral data on highest level (K m). highest_height (float): Highest height at which the integral starts (m). """ points_not_freezing = np.where( np.isnan(snow_level_data) & (highest_wb_int_data > self.falling_level_threshold)) snow_level_data[points_not_freezing] = ( highest_height + orog_data[points_not_freezing]) def find_extrapolated_falling_level(self, max_wb_integral, gradient, intercept, snow_falling_level, sea_points): r""" Find the snow falling level below sea level using the linear extrapolation of the wet bulb temperature integral and update the snow falling level array with these values. The snow falling level is calculated from finding the point where the integral of wet bulb temperature crosses the falling level threshold. In cases where the wet bulb temperature integral has not reached the threshold by the time we reach sea level, we can find a fit to the wet bulb temperature profile near the surface, and use this to estimate where the snow falling level would be below sea level. The difference between the wet bulb temperature integral at the threshold and the wet bulb integral at the surface is equal to the integral of the wet bulb temperature between sea level and the negative height corresponding to the snow falling level. As we are using a simple linear fit, we can integrate this to find an expression for the extrapolated snow falling level. The form of this expression depends on whether the linear fit of wet bulb temperature crosses the height axis above or below zero altitude. If we have our linear fit of the form: .. math:: {{wet\:bulb\:temperature} = m \times height + c} and let :math:`I` be the wet bulb temperature integral we have found above sea level. If it crosses above zero, then the limits on the integral are the snow falling level and zero and we find the following expression for the snow falling level: .. math:: {{snow\:falling\:level} = \frac{c \pm \sqrt{ c^2-2 m (threshold-I)}}{-m}} If the linear fit crosses below zero the limits on our integral are the snow falling level and the point where the linear fit crosses the height axis, as only positive wet bulb temperatures count towards the integral. In this case our expression for the snow falling level is: .. math:: {{snow\:falling\:level} = \frac{c \pm \sqrt{ 2 m (I-threshold)}}{-m}} Args: max_wb_integral (numpy.ndarray): The wet bulb temperature integral at sea level. gradient (numpy.ndarray): The gradient of the line of best fit we are using in the extrapolation. intercept (numpy.ndarray): The intercept of the line of best fit we are using in the extrapolation. snow_falling_level (numpy.ndarray): The snow falling level array with values filled in with snow falling levels calculated through extrapolation. sea_points (numpy.ndarray): A boolean array with True where the points are sea points. """ # Make sure we only try to extrapolate points with a valid gradient. index = (gradient < 0.0) & sea_points gradient = gradient[index] intercept = intercept[index] max_wb_int = max_wb_integral[index] snow_fl = snow_falling_level[index] # For points where -intercept/gradient is greater than zero: index2 = (-intercept/gradient >= 0.0) inside_sqrt = ( intercept[index2]**2 - 2*gradient[index2]*( self.falling_level_threshold - max_wb_int[index2])) snow_fl[index2] = ( (intercept[index2] - np.sqrt(inside_sqrt))/-gradient[index2]) # For points where -intercept/gradient is less than zero: index2 = (-intercept/gradient < 0.0) inside_sqrt = ( 2*gradient[index2]*( max_wb_int[index2] - self.falling_level_threshold)) snow_fl[index2] = ( (intercept[index2] - np.sqrt(inside_sqrt))/-gradient[index2]) # Update the snow falling level. Clip to ignore extremely negative # snow falling levels. snow_fl = np.clip(snow_fl, -2000, np.inf) snow_falling_level[index] = snow_fl @staticmethod def linear_wet_bulb_fit(wet_bulb_temperature, heights, sea_points, start_point=0, end_point=5): """ Calculates a linear fit to the wet bulb temperature profile close to the surface to use when we extrapolate the wet bulb temperature below sea level for sea points. We only use a set number of points close to the surface for this fit, specified by a start_point and end_point. Args: wet_bulb_temperature (numpy.ndarray): The wet bulb temperature profile at each grid point, with height as the leading dimension. heights (numpy.ndarray): The vertical height levels above orography, matching the leading dimension of the wet_bulb_temperature. sea_points (numpy.ndarray): A boolean array with True where the points are sea points. start_point (int): The index of the the starting height we want to use in our linear fit. end_point (int): The index of the the end height we want to use in our linear fit. Returns: (tuple): tuple containing **gradient** (numpy.ndarray) - An array, the same shape as a 2D slice of the wet_bulb_temperature input, containing the gradients of the fitted straight line at each point where it could be found, filled with zeros elsewhere. **intercept** (numpy.ndarray) - An array, the same shape as a 2D slice of the wet_bulb_temperature input, containing the intercepts of the fitted straight line at each point where it could be found, filled with zeros elsewhere. """ def fitting_function(wet_bulb_temps): """ A small helper function used to find a linear fit of the wet bulb temperature. """ return linregress( heights[start_point:end_point], wet_bulb_temps[start_point:end_point]) # Set up empty arrays for gradient and intercept gradient = np.zeros(wet_bulb_temperature[0].shape) intercept = np.zeros(wet_bulb_temperature[0].shape) if np.any(sea_points): # Make the 1D sea point array 3D to account for the height axis # on the wet bulb temperature array. index3d = np.broadcast_to(sea_points, wet_bulb_temperature.shape) # Flatten the array to make it more efficient to find a linear fit # for every point of interest. We can apply the fitting function # along the right axis to apply it to all points in one go. wet_bulb_temperature_values = ( wet_bulb_temperature[index3d].reshape(len(heights), -1)) gradient_values, intercept_values, _, _, _, = ( np.apply_along_axis( fitting_function, 0, wet_bulb_temperature_values)) # Fill in the right gradients and intercepts in the 2D array. gradient[sea_points] = gradient_values intercept[sea_points] = intercept_values return gradient, intercept def fill_in_sea_points( self, snow_level_data, land_sea_data, max_wb_integral, wet_bulb_temperature, heights): """ Fill in any sea points where we have not found a snow falling level by the time we get to sea level, i.e. where the whole wet bulb temperature integral is below the threshold. This function finds a linear fit to the wet bulb temperature close to sea level and uses this to find where an extrapolated wet bulb temperature integral would cross the threshold. This results in snow falling levels below sea level for points where we have applied the extrapolation. Assumes that height is the first axis in the wet_bulb_integral array. Args: snow_level_data(numpy.ndarray): The snow falling level array, filled with values for points whose wet bulb temperature integral crossed the theshold. land_sea_data (numpy.ndarray): The binary land-sea mask max_wb_integral (numpy.ndarray): The wet bulb temperature integral at the final height level used in the integration. This has the maximum values for the wet bulb temperature integral at any level. wet_bulb_temperature (numpy.ndarray): The wet bulb temperature profile at each grid point, with height as the leading dimension. heights (numpy.ndarray): The vertical height levels above orography, matching the leading dimension of the wet_bulb_temperature. """ sea_points = ( np.isnan(snow_level_data) & (land_sea_data < 1.0) & (max_wb_integral < self.falling_level_threshold)) if np.all(sea_points is False): return gradient, intercept = self.linear_wet_bulb_fit(wet_bulb_temperature, heights, sea_points) self.find_extrapolated_falling_level(max_wb_integral, gradient, intercept, snow_level_data, sea_points) @staticmethod def fill_in_by_horizontal_interpolation( snow_level_data, max_in_nbhood_orog, orog_data): """ Fill in any remaining unset areas in the snow falling level by using linear horizontal interpolation across the grid. As snow falling levels at the highest height levels will be filled in by this point any points that still don't have a valid snow falling level have the snow falling level at or below the surface orography. This function uses the following steps to help ensure that the filled in values are above or below the orography: 1. Fill in the snow-level for points with no value yet set using horizontal interpolation from surrounding set points. Only interpolate from surrounding set points at which the snow falling level is below the maximum orography height in the region around the unset point. This helps us avoid spreading very high snow falling levels across areas where we had missing data. 2. Fill any gaps that still remain where the linear interpolation has not been able to find a value because there is not enough data (e.g at the corners of the domain). Use nearest neighbour interpolation. 3. Check whether despite our efforts we have still filled in some of the missing points with snow falling levels above the orography. In these cases set the missing points to the height of orography. We then return the filled in array, which hopefully has no more missing data. Args: snow_level_data (numpy.ndarray): The snow falling level array, filled with values for points whose wet bulb temperature integral crossed the theshold. max_in_nbhood_orog (numpy.ndarray): The array containing maximum of the orography field in a given radius. orog_data(numpy.data): The array containing the orography data. Returns: snow_filled (numpy.ndarray): The snow falling level array with missing data filled by horizontal interpolation. """ # Interpolate linearly across the remaining points index = ~np.isnan(snow_level_data) index_valid_data = ( snow_level_data[index] <= max_in_nbhood_orog[index]) index[index] = index_valid_data snow_filled = snow_level_data if np.any(index): ynum, xnum = snow_level_data.shape (y_points, x_points) = np.mgrid[0:ynum, 0:xnum] values = snow_level_data[index] # Try to do the horizontal interpolation to fill in any gaps, # but if there are not enough points or the points are not arranged # in a way that allows the horizontal interpolation, skip # and use nearest neighbour intead. try: snow_level_data_updated = griddata( np.where(index), values, (y_points, x_points), method='linear') except QhullError: snow_level_data_updated = snow_level_data else: snow_filled = snow_level_data_updated # Fill in any remaining missing points using nearest neighbour. # This normally only impact points at the corners of the domain, # where the linear fit doesn't reach. index = ~np.isnan(snow_filled) index_valid_data = ( snow_filled[index] <= max_in_nbhood_orog[index]) index[index] = index_valid_data if np.any(index): values = snow_level_data_updated[index] snow_level_data_updated_2 = griddata( np.where(index), values, (y_points, x_points), method='nearest') snow_filled = snow_level_data_updated_2 # Set the snow falling level at any points that have been filled with # snow falling levels that are above the orography back to the # height of the orography. index = (~np.isfinite(snow_level_data)) snow_level_above_orog = (snow_filled[index] > orog_data[index]) index[index] = snow_level_above_orog snow_filled[index] = orog_data[index] return snow_filled def find_max_in_nbhood_orography(self, orography_cube): """ Find the maximum value of the orography in the region around each grid point in your orography field by finding the maximum in a neighbourhood around that point. Args: orography_cube (iris.cube.Cube): The cube containing a single 2 dimensional array of orography data Returns: max_in_nbhood_orog (iris.cube.Cube): The cube containing the maximum in a neighbourhood of the orography data. """ radius_in_metres = convert_number_of_grid_cells_into_distance( orography_cube, self.grid_point_radius) max_in_nbhood_orog = OccurrenceWithinVicinity( radius_in_metres).process(orography_cube) return max_in_nbhood_orog def process(self, temperature, relative_humidity, pressure, orog, land_sea_mask): """ Calculate the wet bulb temperature integral by firstly calculating the wet bulb temperature from the inputs provided, and then calculating the vertical integral of the wet bulb temperature. Find the falling_snow_level by finding the height above sea level corresponding to the falling_level_threshold in the integral data. Fill in missing data appropriately. Args: temperature (iris.cube.Cube): Cube of air temperatures (K). relative_humidity (iris.cube.Cube): Cube of relative humidities (%, converted to fractional). pressure (iris.cube.Cube): Cube of air pressures (Pa). orog (iris.cube.Cube): Cube of orography (m). land_sea_mask (iris.cube.Cube): Cube containing a binary land-sea mask. Returns: falling_snow_level (iris.cube.Cube): Cube of Falling Snow Level above sea level (asl). """ # Calculate wet-bulb temperature integral. wet_bulb_temperature, wet_bulb_integral = ( self.wet_bulb_integral_plugin.process( temperature, relative_humidity, pressure)) # Find highest height from height bounds. # If these are set to None then use the heights in temperature. height_bounds = wet_bulb_integral.coord('height').bounds heights = temperature.coord('height').points if height_bounds is None: highest_height = heights[-1] else: highest_height = height_bounds[0][-1] # Firstly we need to slice over height, x and y x_coord = wet_bulb_integral.coord(axis='x').name() y_coord = wet_bulb_integral.coord(axis='y').name() orography = next(orog.slices([y_coord, x_coord])) orog_data = orography.data land_sea_data = next(land_sea_mask.slices([y_coord, x_coord])).data snow = iris.cube.CubeList([]) slice_list = ['height', y_coord, x_coord] for wb_integral, wet_bulb_temp in zip( wet_bulb_integral.slices(slice_list), wet_bulb_temperature.slices(slice_list)): height_points = wb_integral.coord('height').points # Calculate falling snow level above sea level. snow_cube = wb_integral[0] snow_cube.rename('falling_snow_level_asl') snow_cube.units = 'm' snow_cube.remove_coord('height') snow_cube.data = self.find_falling_level(wb_integral.data, orog_data, height_points) # Fill in missing data self.fill_in_high_snow_falling_levels( snow_cube.data, orog_data, wb_integral.data.max(axis=0), highest_height) self.fill_in_sea_points( snow_cube.data, land_sea_data, wb_integral.data.max(axis=0), wet_bulb_temp.data, heights) max_nbhood_orog = self.find_max_in_nbhood_orography(orography) updated_snow_level = self.fill_in_by_horizontal_interpolation( snow_cube.data, max_nbhood_orog.data, orog_data) points = np.where(~np.isfinite(snow_cube.data)) snow_cube.data[points] = updated_snow_level[points] # Fill in any remaining points with missing data: remaining_points = np.where(np.isnan(snow_cube.data)) snow_cube.data[remaining_points] = self.missing_data snow.append(snow_cube) falling_snow_level = snow.merge_cube() return falling_snow_level
1.453125
1
eactivities/models/documentation.py
lukegb/ehacktivities
1
12783955
<filename>eactivities/models/documentation.py<gh_stars>1-10 from . import Model, ArrayModel, DictModel from eactivities.parsers.documentation import InventoryParser, KeyListsParser, RiskAssessmentParser class Documentation(Model): def _spawn(self, model_cls, parser_cls): kwargs = { 'club_id': self._data['club_id'] } return model_cls( eactivities=self._eactivities, parent=self, parser=parser_cls, data=None, arguments=kwargs ) def inventory(self): return self._spawn(Inventory, InventoryParser) def risk_assessment(self): return self._spawn(RiskAssessment, RiskAssessmentParser) def key_lists(self): return self._spawn(KeyLists, KeyListsParser) class InventoryItem(Model): pass class Inventory(ArrayModel): _submodel = InventoryItem class Risk(Model): pass class RiskAssessment(ArrayModel): _submodel = Risk class KeyListPerson(Model): pass class KeyListPeople(ArrayModel): _submodel = KeyListPerson class KeyList(Model): _submodels = { 'people': KeyListPeople } class KeyLists(DictModel): _submodel = KeyList
2.5
2
dreamplace/ops/electric_potential/electric_overflow.py
Eternity666/DREAMPlace
323
12783956
## # @file electric_overflow.py # @author <NAME> # @date Aug 2018 # import math import numpy as np import torch from torch import nn from torch.autograd import Function from torch.nn import functional as F import dreamplace.ops.electric_potential.electric_potential_cpp as electric_potential_cpp import dreamplace.configure as configure if configure.compile_configurations["CUDA_FOUND"] == "TRUE": import dreamplace.ops.electric_potential.electric_potential_cuda as electric_potential_cuda import pdb import matplotlib matplotlib.use('Agg') from mpl_toolkits.mplot3d import Axes3D import matplotlib.pyplot as plt class ElectricDensityMapFunction(Function): """ @brief compute density overflow. @param ctx pytorch API to store data for backward proporgation @param pos location of cells, x and then y @param node_size_x_clamped stretched size, max(bin_size*sqrt2, node_size) @param node_size_y_clamped stretched size, max(bin_size*sqrt2, node_size) @param offset_x (stretched size - node_size) / 2 @param offset_y (stretched size - node_size) / 2 @param ratio original area / stretched area @param initial_density_map density_map for fixed cells @param target_density target density @param xl left boundary @param yl lower boundary @param xh right boundary @param yh upper boundary @param bin_size_x bin width @param bin_size_x bin height @param num_movable_nodes number of movable cells @param num_filler_nodes number of filler cells @param padding bin padding to boundary of placement region @param padding_mask padding mask with 0 and 1 to indicate padding bins with padding regions to be 1 @param num_bins_x number of bins in horizontal direction @param num_bins_y number of bins in vertical direction @param num_movable_impacted_bins_x number of impacted bins for any movable cell in x direction @param num_movable_impacted_bins_y number of impacted bins for any movable cell in y direction @param num_filler_impacted_bins_x number of impacted bins for any filler cell in x direction @param num_filler_impacted_bins_y number of impacted bins for any filler cell in y direction @param sorted_node_map the indices of the movable node map """ @staticmethod def forward( pos, node_size_x_clamped, node_size_y_clamped, offset_x, offset_y, ratio, bin_center_x, bin_center_y, initial_density_map, target_density, xl, yl, xh, yh, bin_size_x, bin_size_y, num_movable_nodes, num_filler_nodes, padding, padding_mask, # same dimensions as density map, with padding regions to be 1 num_bins_x, num_bins_y, num_movable_impacted_bins_x, num_movable_impacted_bins_y, num_filler_impacted_bins_x, num_filler_impacted_bins_y, deterministic_flag, sorted_node_map): if pos.is_cuda: output = electric_potential_cuda.density_map( pos.view(pos.numel()), node_size_x_clamped, node_size_y_clamped, offset_x, offset_y, ratio, bin_center_x, bin_center_y, initial_density_map, target_density, xl, yl, xh, yh, bin_size_x, bin_size_y, num_movable_nodes, num_filler_nodes, padding, num_bins_x, num_bins_y, num_movable_impacted_bins_x, num_movable_impacted_bins_y, num_filler_impacted_bins_x, num_filler_impacted_bins_y, deterministic_flag, sorted_node_map) else: output = electric_potential_cpp.density_map( pos.view(pos.numel()), node_size_x_clamped, node_size_y_clamped, offset_x, offset_y, ratio, bin_center_x, bin_center_y, initial_density_map, target_density, xl, yl, xh, yh, bin_size_x, bin_size_y, num_movable_nodes, num_filler_nodes, padding, num_bins_x, num_bins_y, num_movable_impacted_bins_x, num_movable_impacted_bins_y, num_filler_impacted_bins_x, num_filler_impacted_bins_y, deterministic_flag) density_map = output.view([num_bins_x, num_bins_y]) # set padding density if padding > 0: density_map.masked_fill_(padding_mask, target_density * bin_size_x * bin_size_y) return density_map class ElectricOverflow(nn.Module): def __init__( self, node_size_x, node_size_y, bin_center_x, bin_center_y, target_density, xl, yl, xh, yh, bin_size_x, bin_size_y, num_movable_nodes, num_terminals, num_filler_nodes, padding, deterministic_flag, # control whether to use deterministic routine sorted_node_map, movable_macro_mask=None): super(ElectricOverflow, self).__init__() self.node_size_x = node_size_x self.node_size_y = node_size_y self.bin_center_x = bin_center_x self.bin_center_y = bin_center_y self.target_density = target_density self.xl = xl self.yl = yl self.xh = xh self.yh = yh self.bin_size_x = bin_size_x self.bin_size_y = bin_size_y self.num_movable_nodes = num_movable_nodes self.num_terminals = num_terminals self.num_filler_nodes = num_filler_nodes self.padding = padding self.sorted_node_map = sorted_node_map self.movable_macro_mask = movable_macro_mask self.deterministic_flag = deterministic_flag self.reset() def reset(self): sqrt2 = math.sqrt(2) # clamped means stretch a cell to bin size # clamped = max(bin_size*sqrt2, node_size) # offset means half of the stretch size # ratio means the original area over the stretched area self.node_size_x_clamped = self.node_size_x.clamp(min=self.bin_size_x * sqrt2) self.offset_x = (self.node_size_x - self.node_size_x_clamped).mul(0.5) self.node_size_y_clamped = self.node_size_y.clamp(min=self.bin_size_y * sqrt2) self.offset_y = (self.node_size_y - self.node_size_y_clamped).mul(0.5) node_areas = self.node_size_x * self.node_size_y self.ratio = node_areas / (self.node_size_x_clamped * self.node_size_y_clamped) # detect movable macros and scale down the density to avoid halos # the definition of movable macros should be different according to algorithms self.num_movable_macros = 0 if self.target_density < 1 and self.movable_macro_mask is not None: self.num_movable_macros = self.movable_macro_mask.sum().data.item() self.ratio[:self.num_movable_nodes][ self.movable_macro_mask] = self.target_density # compute maximum impacted bins self.num_bins_x = int(math.ceil((self.xh - self.xl) / self.bin_size_x)) self.num_bins_y = int(math.ceil((self.yh - self.yl) / self.bin_size_y)) if self.num_movable_nodes: self.num_movable_impacted_bins_x = int( ((self.node_size_x[:self.num_movable_nodes].max() + 2 * sqrt2 * self.bin_size_x) / self.bin_size_x).ceil().clamp(max=self.num_bins_x)) self.num_movable_impacted_bins_y = int( ((self.node_size_y[:self.num_movable_nodes].max() + 2 * sqrt2 * self.bin_size_y) / self.bin_size_y).ceil().clamp(max=self.num_bins_y)) else: self.num_movable_impacted_bins_x = 0 self.num_movable_impacted_bins_y = 0 if self.num_filler_nodes: self.num_filler_impacted_bins_x = ( (self.node_size_x[-self.num_filler_nodes:].max() + 2 * sqrt2 * self.bin_size_x) / self.bin_size_x).ceil().clamp(max=self.num_bins_x) self.num_filler_impacted_bins_y = ( (self.node_size_y[-self.num_filler_nodes:].max() + 2 * sqrt2 * self.bin_size_y) / self.bin_size_y).ceil().clamp(max=self.num_bins_y) else: self.num_filler_impacted_bins_x = 0 self.num_filler_impacted_bins_y = 0 if self.padding > 0: self.padding_mask = torch.ones(self.num_bins_x, self.num_bins_y, dtype=torch.uint8, device=self.node_size_x.device) self.padding_mask[self.padding:self.num_bins_x - self.padding, self.padding:self.num_bins_y - self.padding].fill_(0) else: self.padding_mask = torch.zeros(self.num_bins_x, self.num_bins_y, dtype=torch.uint8, device=self.node_size_x.device) # initial density_map due to fixed cells self.initial_density_map = None def compute_initial_density_map(self, pos): if self.num_terminals == 0: num_fixed_impacted_bins_x = 0 num_fixed_impacted_bins_y = 0 else: max_size_x = self.node_size_x[self.num_movable_nodes:self. num_movable_nodes + self.num_terminals].max() max_size_y = self.node_size_y[self.num_movable_nodes:self. num_movable_nodes + self.num_terminals].max() num_fixed_impacted_bins_x = ((max_size_x + self.bin_size_x) / self.bin_size_x).ceil().clamp( max=self.num_bins_x) num_fixed_impacted_bins_y = ((max_size_y + self.bin_size_y) / self.bin_size_y).ceil().clamp( max=self.num_bins_y) if pos.is_cuda: func = electric_potential_cuda.fixed_density_map else: func = electric_potential_cpp.fixed_density_map self.initial_density_map = func( pos, self.node_size_x, self.node_size_y, self.bin_center_x, self.bin_center_y, self.xl, self.yl, self.xh, self.yh, self.bin_size_x, self.bin_size_y, self.num_movable_nodes, self.num_terminals, self.num_bins_x, self.num_bins_y, num_fixed_impacted_bins_x, num_fixed_impacted_bins_y, self.deterministic_flag) # scale density of fixed macros self.initial_density_map.mul_(self.target_density) def forward(self, pos): if self.initial_density_map is None: self.compute_initial_density_map(pos) density_map = ElectricDensityMapFunction.forward( pos, self.node_size_x_clamped, self.node_size_y_clamped, self.offset_x, self.offset_y, self.ratio, self.bin_center_x, self.bin_center_y, self.initial_density_map, self.target_density, self.xl, self.yl, self.xh, self.yh, self.bin_size_x, self.bin_size_y, self.num_movable_nodes, self.num_filler_nodes, self.padding, self.padding_mask, self.num_bins_x, self.num_bins_y, self.num_movable_impacted_bins_x, self.num_movable_impacted_bins_y, self.num_filler_impacted_bins_x, self.num_filler_impacted_bins_y, self.deterministic_flag, self.sorted_node_map) bin_area = self.bin_size_x * self.bin_size_y density_cost = (density_map - self.target_density * bin_area).clamp_(min=0.0).sum().unsqueeze(0) return density_cost, density_map.max().unsqueeze(0) / bin_area def plot(plot_count, density_map, padding, name): """ density map contour and heat map """ density_map = density_map[padding:density_map.shape[0] - padding, padding:density_map.shape[1] - padding] print("max density = %g @ %s" % (np.amax(density_map), np.unravel_index(np.argmax(density_map), density_map.shape))) print("mean density = %g" % (np.mean(density_map))) fig = plt.figure() ax = fig.gca(projection='3d') x = np.arange(density_map.shape[0]) y = np.arange(density_map.shape[1]) x, y = np.meshgrid(x, y) # looks like x and y should be swapped ax.plot_surface(y, x, density_map, alpha=0.8) ax.set_xlabel('x') ax.set_ylabel('y') ax.set_zlabel('density') # plt.tight_layout() plt.savefig(name + ".3d.png") plt.close() # plt.clf() #fig, ax = plt.subplots() # ax.pcolor(density_map) # Loop over data dimensions and create text annotations. # for i in range(density_map.shape[0]): # for j in range(density_map.shape[1]): # text = ax.text(j, i, density_map[i, j], # ha="center", va="center", color="w") # fig.tight_layout() #plt.savefig(name+".2d.%d.png" % (plot_count)) # plt.close()
2.125
2
mysite/polls/urls.py
Predator01/potential-adventure
0
12783957
from django.conf.urls import include, url, patterns from polls import views urlpatterns = [ url(r'^home/$', views.home, name='home'), url(r'^about/$', views.about, name='about'), ]
1.695313
2
app/robo-advisor.py
finleycgeorgetown/Robo-Advisor
1
12783958
# app/robo_advisor.py import csv import os import json from dotenv import load_dotenv import requests from datetime import datetime now = datetime.now() datelabel = now.strftime("%d/%m/%Y %H:%M:%S") load_dotenv() # utility function to convert float or integer to usd-formatted string (for printing # ... adapted from: <NAME> project walkthrough https://www.youtube.com/watch?v=UXAVOP1oCog&t=847s def to_usd(my_price): return "${0:,.2f}".format(my_price) api_key = os.environ.get("ALPHAVANTAGE_API_KEY") #stock = str(input("Which stock do you wish to check? ")) #stock_upper = stock.upper() symbol = "" while True: try: stock = str(input("Which stock do you wish to check? ")) stock_upper = stock.upper() symbol = stock_upper except KeyError: print("Please enter a valid stock symbol.") continue if len(symbol) >= 6: print("Please enter a valid stock symbol.") continue else: break #if len(stock_upper) >=1 or 5 >= len(stock_upper): # symbol = stock_upper # elif len(stock_upper) > 5: # print("please enter a valid stock") # quit() #else: # quit() request_url = f"https://www.alphavantage.co/query?function=TIME_SERIES_DAILY&symbol={symbol}&apikey={api_key}" response = requests.get(request_url) parsed_response = json.loads(response.text) last_refreshed = parsed_response["Meta Data"]["3. Last Refreshed"] tsd = parsed_response["Time Series (Daily)"] dates = list(tsd.keys()) latest_day = dates[0] latest_close = tsd[latest_day]["4. close"] #max of all high prices high_prices = [] low_prices = [] for date in dates: high_price = tsd[date]["2. high"] low_price = tsd[date]["3. low"] high_prices.append(float(high_price)) low_prices.append(float(low_price)) recent_high = max(high_prices) recent_low = min(low_prices) csv_file_path = os.path.join(os.path.dirname(__file__), "..", "data", "prices.csv") csv_headers = ["timestamp", "open", "high", "low", "close", "volume"] with open(csv_file_path, "w") as csv_file: writer = csv.DictWriter(csv_file, fieldnames=csv_headers) writer.writeheader() for date in dates: daily_prices = tsd[date] writer.writerow({ "timestamp": date, "open": daily_prices["1. open"], "high": daily_prices["2. high"], "low": daily_prices["3. low"], "close": daily_prices["4. close"], "volume": daily_prices["5. volume"], }) stock_decision = "" decision_reason = "" if float(latest_close) < (1.2 * float(recent_low)): stock_decision = "Buy!" decision_reason = "The latest closing price is within 20 percent of the recent low." else: stock_decision = "Don't Buy." decision_reason = "The latest closing price is not within 20 percent of the recent low." print("-------------------------") print(f"SELECTED SYMBOL: {symbol}") print("-------------------------") print("REQUESTING STOCK MARKET DATA...") print(f"REQUEST AT: {datelabel}") print("-------------------------") print(f"LATEST DAY: {last_refreshed}") print(f"LATEST CLOSE: {to_usd(float(latest_close))}") print(f"RECENT HIGH: {to_usd(float(recent_high))}") print(f"RECENT LOW: {to_usd(float(recent_low))}") print("-------------------------") print(f"RECOMMENDATION: {stock_decision}") print(f"RECOMMENDATION REASON: {decision_reason}") print("-------------------------") print(f"WRITING DATA TO CSV: {csv_file_path}...") print("-------------------------") print("HAPPY INVESTING!") print("-------------------------")
3.390625
3
modules/analyzer/save_txt.py
Kolyamba-mamba/technology-partner-search-module
2
12783959
<reponame>Kolyamba-mamba/technology-partner-search-module import os import sys sys.path.append(os.path.abspath('../../mySite/mySite')) from modules.dbActions.getTables import get_abstract_and_descr_path from modules.helpers.saoSelector import rem, split_description def save_text_db_to_txt(con, filename_base = 'C:/Users/mrkol/Documents/myLog/dataset.txt'): # Берем тексты патентов из БД paths = get_abstract_and_descr_path(con) # Сохраняем в файлы для обучения модели for i, el in enumerate(paths): try: with open(filename_base, 'a', encoding="utf-8") as file: if el[0]: if os.path.isfile(el[0]): with open(str(el[0]), 'r', encoding='utf8') as f: file.write(f.read()) if el[1]: if os.path.isfile(el[1]): with open(str(el[1]), 'r', encoding='utf8') as f: file.write(f.read()) except EnvironmentError: print("Ошибка при записи в файл:" + filename_base) return filename_base def save_text_db_to_txt2(con, filename_base = 'C:/Users/mrkol/Documents/myLog/dataset.txt'): # Берем тексты патентов из БД paths = get_abstract_and_descr_path(con) # Сохраняем в файлы для обучения модели for i, el in enumerate(paths): try: with open(filename_base, 'a', encoding="utf-8") as file: if el[0]: if os.path.isfile(el[0]): with open(str(el[0]), 'r', encoding='utf8') as f: text = f.read() text = rem(text) file.write(text) if el[1]: if os.path.isfile(el[1]): with open(str(el[1]), 'r', encoding='utf8') as f: text = f.read() text = split_description(text) text = rem(text) file.write(text) except EnvironmentError: print("Ошибка при записи в файл:" + filename_base) return filename_base
2.40625
2
ros_comms/ros_pub.py
FabianFalck/de_niro
7
12783960
#!/usr/bin/env python """ Wrapper to ROS publisher. Author: <NAME> Date: 05/18 """ import rospy class ROSPublisher(object): def __init__(self, _topic, _message_type, _queue_size=1, rate=10): """ ROSPublisher constructor. :param _topic: string, ROS topic to publish on :param _message_type: custom message, published on topic :param queue_size: int, How many messages to queue when publishing. With the default, a subscriber will only take the latest message. :param rate: int, how often to publish """ self.topic = _topic self.message_type = _message_type self.pub = rospy.Publisher(self.topic, self.message_type, queue_size=_queue_size) self.rate = rospy.Rate(rate) def publish(self, data=None): """ Publishing one message on the initialized topic. :param data: any data required for publishing e.g. when having publisher in subscriber callback :return: """ rospy.loginfo("Message published on topic %s", self.topic)
3.109375
3
simulator/front_end/src/front_end/grpc/client.py
yrrah/cs6620-fall21-intelligent-assignment-of-data-to-dedup-nodes
1
12783961
<reponame>yrrah/cs6620-fall21-intelligent-assignment-of-data-to-dedup-nodes from __future__ import print_function import grpc from front_end.grpc import assignService_pb2_grpc, assignService_pb2 from front_end.grpc.assignService_pb2 import Acknowledgement from front_end.region_creation.input_streams import HashFile from front_end.region_creation.ae_region_creation import create_ae_regions def kill_backend(back_end_address: str) -> None: with grpc.insecure_channel(back_end_address) as channel: stub = assignService_pb2_grpc.RegionReceiveServiceStub(channel) region_to_send = assignService_pb2.Region() region_to_send.domainNumber = -1 stub.AssignRegion(region_to_send) def sendToBackend(domain: int, back_end_address: str, region) -> Acknowledgement: """ A client code that sends a region to the backend server,based on domainId. """ with grpc.insecure_channel(back_end_address) as channel: stub = assignService_pb2_grpc.RegionReceiveServiceStub(channel) # Assigning to the region class region_to_send = assignService_pb2.Region() region_to_send.fingerPrint.extend(region.fingerprints) region_to_send.domainNumber = domain region_to_send.maxSize = region.max_size region_to_send.currentSize = region.current_size # Get the response from the server(like a callback) response = stub.AssignRegion(region_to_send) return response def shutdown_backend(backend_address): """ A client method that calls the ShutDownBackend method in the server to kill it. """ channel = grpc.insecure_channel(backend_address) stub = assignService_pb2_grpc.RegionReceiveServiceStub(channel) print('Pod with address {0} has been shut down '.format(backend_address)) stub.ShutDownPod(assignService_pb2.Empty()) def hash_file_demo(filename: str, ip_address): """ Send the region to a backend server based on the ip_address sent to it by the simulate code--> which in turn gets it from the assignment code. """ hash_file = HashFile(filename) server_ip = ip_address + ':50051' for region in create_ae_regions(4, hash_file): response = sendToBackend(1, server_ip, region) print(response) print('shutdown pod with {0}'.format(server_ip)) shutdown_backend(ip_address + ':50051') if __name__ == '__main__': hash_file_demo("../../traces/fslhomes-user006-2012-01-02.8kb.hash.anon", "localhost")
2.515625
3
PFERD/tmp_dir.py
ff781/PFERD
0
12783962
"""Helper functions and classes for temporary folders.""" import logging import shutil from pathlib import Path from types import TracebackType from typing import Optional, Type from .location import Location LOGGER = logging.getLogger(__name__) class TmpDir(Location): """A temporary folder that can create files or nested temp folders.""" def __init__(self, path: Path): """Create a new temporary folder for the given path.""" super().__init__(path) self._counter = 0 self.cleanup() self.path.mkdir(parents=True, exist_ok=True) def __str__(self) -> str: """Format the folder as a string.""" return f"Folder at {self.path}" def __enter__(self) -> 'TmpDir': """Context manager entry function.""" return self # pylint: disable=useless-return def __exit__( self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType], ) -> Optional[bool]: """Context manager exit function. Calls cleanup().""" self.cleanup() return None def new_path(self, prefix: Optional[str] = None) -> Path: """ Return a unique path inside the directory. Doesn't create a file or directory. """ name = f"{prefix if prefix else 'tmp'}-{self._inc_and_get_counter():03}" LOGGER.debug("Creating temp file %s", name) return self.resolve(Path(name)) def new_subdir(self, prefix: Optional[str] = None) -> 'TmpDir': """ Create a new nested temporary folder and return it. """ name = f"{prefix if prefix else 'tmp'}-{self._inc_and_get_counter():03}" sub_path = self.resolve(Path(name)) sub_path.mkdir(parents=True) LOGGER.debug("Creating temp dir %s at %s", name, sub_path) return TmpDir(sub_path) def cleanup(self) -> None: """Delete this folder and all contained files.""" LOGGER.debug("Deleting temp folder %s", self.path) if self.path.resolve().exists(): shutil.rmtree(self.path.resolve()) def _inc_and_get_counter(self) -> int: """Get and increment the counter by one.""" counter = self._counter self._counter += 1 return counter
3.5
4
dashboard/dashboard/pinpoint/models/quest/find_isolated_test.py
ravitejavalluri/catapult
0
12783963
<reponame>ravitejavalluri/catapult # Copyright 2016 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import unittest from google.appengine.ext import ndb from google.appengine.ext import testbed from dashboard.pinpoint.models import change as change_module from dashboard.pinpoint.models import isolated from dashboard.pinpoint.models.quest import find_isolated class _FindIsolatedTest(unittest.TestCase): def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() isolated.Put(( ('Mac Builder', 'f9f2b720', 'telemetry_perf_tests', '7c7e90be'),)) def tearDown(self): self.testbed.deactivate() def assertExecutionFailure(self, execution): self.assertTrue(execution.completed) self.assertTrue(execution.failed) self.assertEqual(len(execution.result_values), 1) self.assertIsInstance(execution.result_values[0], Exception) self.assertEqual(execution.result_arguments, {}) def assertExecutionSuccess(self, execution): self.assertTrue(execution.completed) self.assertFalse(execution.failed) self.assertEqual(execution.result_values, (0,)) class IsolateLookupTest(_FindIsolatedTest): def testIsolateLookupSuccess(self): change = change_module.Change(change_module.Dep('chromium/src', 'f9f2b720')) execution = find_isolated.FindIsolated('Mac Pro Perf').Start(change) execution.Poll() self.assertExecutionSuccess(execution) self.assertEqual(execution.result_arguments, {'isolated_hash': '7c7e90be'}) def testChangeHasMultipleDeps(self): base_commit = change_module.Dep('chromium/src', 'f9f2b720') deps = (change_module.Dep('r2', 'hash'), change_module.Dep('r3', 'hash')) change = change_module.Change(base_commit, deps) execution = find_isolated.FindIsolated('Mac Pro Perf').Start(change) execution.Poll() self.assertExecutionFailure(execution) def testChangeHasPatch(self): change = change_module.Change( change_module.Dep('chromium/src', 'f9f2b720'), patch='patch/rietveld/codereview.chromium.org/2570613003/1') execution = find_isolated.FindIsolated('Mac Pro Perf').Start(change) execution.Poll() self.assertExecutionFailure(execution) def testNoIsolatedAvailable(self): change = change_module.Change(change_module.Dep('chromium/src', 'bad_hash')) execution = find_isolated.FindIsolated('Mac Pro Perf').Start(change) execution.Poll() self.assertExecutionFailure(execution) class BuilderLookupTest(_FindIsolatedTest): def testSuccesfulBuilderLookupForAllBuilders(self): builder_testers = ( ('arm-builder-rel', 'health-plan-clankium-phone'), ('Android Builder', 'Android Nexus5 Perf'), ('Android arm64 Builder', 'Android Nexus5X Perf'), ('Linux Builder', 'Linux Perf'), ('Mac Builder', 'Mac Air Perf'), ('Win Builder', 'Win 7 Perf'), ('Win x64 Builder', 'Win Zenbook Perf'), ) isolated.Put( (builder, 'git hash', 'telemetry_perf_tests', hex(hash(builder))) for builder, _ in builder_testers) for builder, tester in builder_testers: change = change_module.Change( change_module.Dep('chromium/src', 'git hash')) execution = find_isolated.FindIsolated(tester).Start(change) execution.Poll() self.assertExecutionSuccess(execution) self.assertEqual(execution.result_arguments, {'isolated_hash': hex(hash(builder))}) def testUnknownBuilder(self): change = change_module.Change(change_module.Dep('chromium/src', 'f9f2b720')) execution = find_isolated.FindIsolated('Unix Perf').Start(change) execution.Poll() self.assertExecutionFailure(execution)
1.78125
2
TurtleCore/PythonCode/ScrolledCanvasClass.py
FrankFK/simple-graphics-for-csharp-beginners
0
12783964
## helper functions for Scrolled Canvas, to forward Canvas-methods ## to ScrolledCanvas class def __methodDict(cls, _dict): """helper function for Scrolled Canvas""" baseList = list(cls.__bases__) baseList.reverse() for _super in baseList: __methodDict(_super, _dict) for key, value in cls.__dict__.items(): if type(value) == types.FunctionType: _dict[key] = value def __methods(cls): """helper function for Scrolled Canvas""" _dict = {} __methodDict(cls, _dict) return _dict.keys() __stringBody = ( 'def %(method)s(self, *args, **kw): return ' + 'self.%(attribute)s.%(method)s(*args, **kw)') def __forwardmethods(fromClass, toClass, toPart, exclude = ()): ### MANY CHANGES ### _dict_1 = {} __methodDict(toClass, _dict_1) _dict = {} mfc = __methods(fromClass) for ex in _dict_1.keys(): if ex[:1] == '_' or ex[-1:] == '_' or ex in exclude or ex in mfc: pass else: _dict[ex] = _dict_1[ex] for method, func in _dict.items(): d = {'method': method, 'func': func} if isinstance(toPart, str): execString = \ __stringBody % {'method' : method, 'attribute' : toPart} exec(execString, d) setattr(fromClass, method, d[method]) ### NEWU! class ScrolledCanvas(TK.Frame): """Modeled after the scrolled canvas class from Grayons's Tkinter book. Used as the default canvas, which pops up automatically when using turtle graphics functions or the Turtle class. """ def __init__(self, master, width=500, height=350, canvwidth=600, canvheight=500): TK.Frame.__init__(self, master, width=width, height=height) self._rootwindow = self.winfo_toplevel() self.width, self.height = width, height self.canvwidth, self.canvheight = canvwidth, canvheight self.bg = "white" self._canvas = TK.Canvas(master, width=width, height=height, bg=self.bg, relief=TK.SUNKEN, borderwidth=2) self.hscroll = TK.Scrollbar(master, command=self._canvas.xview, orient=TK.HORIZONTAL) self.vscroll = TK.Scrollbar(master, command=self._canvas.yview) self._canvas.configure(xscrollcommand=self.hscroll.set, yscrollcommand=self.vscroll.set) self.rowconfigure(0, weight=1, minsize=0) self.columnconfigure(0, weight=1, minsize=0) self._canvas.grid(padx=1, in_ = self, pady=1, row=0, column=0, rowspan=1, columnspan=1, sticky='news') self.vscroll.grid(padx=1, in_ = self, pady=1, row=0, column=1, rowspan=1, columnspan=1, sticky='news') self.hscroll.grid(padx=1, in_ = self, pady=1, row=1, column=0, rowspan=1, columnspan=1, sticky='news') self.reset() self._rootwindow.bind('<Configure>', self.onResize) def reset(self, canvwidth=None, canvheight=None, bg = None): """Adjust canvas and scrollbars according to given canvas size.""" if canvwidth: self.canvwidth = canvwidth if canvheight: self.canvheight = canvheight if bg: self.bg = bg self._canvas.config(bg=bg, scrollregion=(-self.canvwidth//2, -self.canvheight//2, self.canvwidth//2, self.canvheight//2)) self._canvas.xview_moveto(0.5*(self.canvwidth - self.width + 30) / self.canvwidth) self._canvas.yview_moveto(0.5*(self.canvheight- self.height + 30) / self.canvheight) self.adjustScrolls() def adjustScrolls(self): """ Adjust scrollbars according to window- and canvas-size. """ cwidth = self._canvas.winfo_width() cheight = self._canvas.winfo_height() self._canvas.xview_moveto(0.5*(self.canvwidth-cwidth)/self.canvwidth) self._canvas.yview_moveto(0.5*(self.canvheight-cheight)/self.canvheight) if cwidth < self.canvwidth or cheight < self.canvheight: self.hscroll.grid(padx=1, in_ = self, pady=1, row=1, column=0, rowspan=1, columnspan=1, sticky='news') self.vscroll.grid(padx=1, in_ = self, pady=1, row=0, column=1, rowspan=1, columnspan=1, sticky='news') else: self.hscroll.grid_forget() self.vscroll.grid_forget() def onResize(self, event): """self-explanatory""" self.adjustScrolls() def bbox(self, *args): """ 'forward' method, which canvas itself has inherited... """ return self._canvas.bbox(*args) def cget(self, *args, **kwargs): """ 'forward' method, which canvas itself has inherited... """ return self._canvas.cget(*args, **kwargs) def config(self, *args, **kwargs): """ 'forward' method, which canvas itself has inherited... """ self._canvas.config(*args, **kwargs) def bind(self, *args, **kwargs): """ 'forward' method, which canvas itself has inherited... """ self._canvas.bind(*args, **kwargs) def unbind(self, *args, **kwargs): """ 'forward' method, which canvas itself has inherited... """ self._canvas.unbind(*args, **kwargs) def focus_force(self): """ 'forward' method, which canvas itself has inherited... """ self._canvas.focus_force()
2.921875
3
Tema1/Code/Server/src/logic/UDPServer.py
SergiuDeveloper/Programare-Concurenta-si-Distribuita
1
12783965
import socket from threading import Thread, Lock from time import time from .BenchmarkData import BenchmarkData class UDPServer: def __init__(self, host, port, benchmark_file_path, chunk_size, ack): self.__host = host self.__port = port self.__running = False self.__running_lock = Lock() self.__benchmark_file_path = benchmark_file_path self.__chunk_size = chunk_size self.__ack = ack def run(self): with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as sock: sock.bind((self.__host, self.__port)) print(f"Waiting for clients on {self.__host}:{self.__port}") self.__set_running(True) while self.__get_running(): data = sock.recvfrom(self.__chunk_size) chunk = data[0] address = data[1] client_ip = address[0] handle_client_thread = Thread(target=self.__handle_client, args=(sock, chunk, address, client_ip)) handle_client_thread.start() def stop(self): self.__set_running(False) def __handle_client(self, sock, chunk, address, client_ip): category = 'udp_server' if self.__ack else 'udp_stream_server' file_name_parts = self.__benchmark_file_path.split('.') if len(file_name_parts) == 1: file_name = f"{self.__benchmark_file_path}_{category}" else: file_name = f"{'.'.join(file_name_parts[:-1])}_{category}.{file_name_parts[-1]}" self.__prepare_new_request(client_ip, file_name, category) with open(file_name, "ab") as benchmark_file: benchmark_file.write(chunk) data = BenchmarkData.get_data(client_ip, category) total_bytes = data[1] chunks_count = data[2] BenchmarkData.add_data(client_ip, category, time(), total_bytes + len(chunk), chunks_count + 1) if self.__ack: self.__acknowledge(sock, address) def __prepare_new_request(self, client_ip, file_name, category): if BenchmarkData.get_data(client_ip, category) is not None: return with open(file_name, "wb") as benchmark_file: pass BenchmarkData.add_data(client_ip, category, time(), 0, 0) def __acknowledge(self, sock, address): try: ack_bytes = bytearray() ack_bytes.append(1) sock.sendto(ack_bytes, address) except: pass def __set_running(self, running): self.__running_lock.acquire() self.__running = running self.__running_lock.release() def __get_running(self): self.__running_lock.acquire() running = self.__running self.__running_lock.release() return running
2.90625
3
data/scan_records.py
geoffrey0822/deepspeech.pytorch
0
12783966
<reponame>geoffrey0822/deepspeech.pytorch import os, sys, argparse, utils, subprocess def regenerate(src, dst, manifest, new_manifest, sample_rate, mono, min_duration, max_duration): if not os.path.isdir(dst): os.mkdir(dst) if not os.path.isfile(manifest): print('%s not found',manifest) exit(-1) print('new manifest:%s'%new_manifest) file_paths = [os.path.join(src, filename) for filename in os.listdir(src)] total = len(file_paths) print('There are %d audio files at origin'%total) count = 0 file_paths = utils.order_and_prune_files(file_paths, min_duration, max_duration) final_total = len(file_paths) print('%d are valid audio files and %d will be ignored'%(final_total, total-final_total)) with open(new_manifest, 'w') as outf: with open(manifest, 'r') as inputf: for ln in inputf: line = ln.rstrip('\n') fields = line.split(',') audio_path = fields[0] file_name = os.path.basename(audio_path) corpus_path = fields[1] #print(audio_path) if audio_path in file_paths: new_audio_path = os.path.join(dst, file_name) cmd = "sox {} -r {} -b 16 -c 1 {}".format( audio_path, sample_rate, new_audio_path) subprocess.call([cmd], shell=True) outf.write('%s,%s\n'%(new_audio_path, corpus_path)) count+=1 if count%1000==0: print('processed %d/%d'%(count, final_total)) def main(): parser = argparse.ArgumentParser(description='Filter and conversion') parser.add_argument('--src', type=str, help='Path for audio files') parser.add_argument('--manifest', type=str, help='Manifest file for processing') parser.add_argument('--dst', type=str, help='Processed output path') parser.add_argument('--new-manifest', type=str, help='Regenerated manifest file') parser.add_argument('--sample-rate', type=int, default=16000) parser.add_argument('--mono', type=int, default=0) parser.add_argument('--min-duration', type=int, default=0, help='in second') parser.add_argument('--max-duration', type=int, default=0, help='in second') args = parser.parse_args() src = args.src dst = args.dst data_path = args.manifest new_data_path = args.new_manifest sample_rate = args.sample_rate min_duration = None max_duration = None if args.min_duration>0: min_duration = args.min_duration if args.max_duration>0: max_duration = args.max_duration isMono = False if args.mono == 1: isMono = True regenerate(src, dst, manifest=data_path, new_manifest=new_data_path, sample_rate=sample_rate, mono=isMono, min_duration=min_duration, max_duration=max_duration) print('Done') if __name__ == "__main__": main()
2.578125
3
lc_classifier/features/extractors/sn_detections_extractor.py
alercebroker/late_classifier
6
12783967
from typing import Tuple from functools import lru_cache from ..core.base import FeatureExtractorSingleBand import pandas as pd import logging class SupernovaeDetectionFeatureExtractor(FeatureExtractorSingleBand): @lru_cache(1) def get_features_keys_without_band(self) -> Tuple[str, ...]: return ('delta_mag_fid', 'delta_mjd_fid', 'first_mag', 'mean_mag', 'min_mag', 'n_det', 'n_neg', 'n_pos', 'positive_fraction') @lru_cache(1) def get_required_keys(self) -> Tuple[str, ...]: return "isdiffpos", "magnitude", "time", "band" def compute_feature_in_one_band(self, detections, band, **kwargs): grouped_detections = detections.groupby(level=0) return self.compute_feature_in_one_band_from_group(grouped_detections, band, **kwargs) def compute_feature_in_one_band_from_group( self, detections, band, **kwargs): """ Parameters ---------- detections :class:pandas.`DataFrame` DataFrame with single band detections of an object. band :class:int kwargs Not required. Returns :class:pandas.`DataFrame` ------- """ columns = self.get_features_keys_with_band(band) def aux_function(oid_detections, **kwargs): bands = oid_detections['band'].values if band not in bands: oid = oid_detections.index.values[0] logging.debug( f'extractor=SN detection object={oid} required_cols={self.get_required_keys()} band={band}') return self.nan_series_in_band(band) oid_band_detections = oid_detections[bands == band].sort_values('time') is_diff_pos_mask = oid_band_detections['isdiffpos'] > 0 n_pos = len(oid_band_detections[is_diff_pos_mask]) n_neg = len(oid_band_detections[~is_diff_pos_mask]) mags = oid_band_detections['magnitude'].values min_mag = mags.min() first_mag = mags[0] mjds = oid_band_detections['time'].values delta_mjd_fid = mjds[-1] - mjds[0] delta_mag_fid = mags.max() - min_mag positive_fraction = n_pos/(n_pos + n_neg) mean_mag = mags.mean() data = [ delta_mag_fid, delta_mjd_fid, first_mag, mean_mag, min_mag, n_neg + n_pos, n_neg, n_pos, positive_fraction ] sn_det_df = pd.Series( data=data, index=columns) return sn_det_df sn_det_results = detections.apply(aux_function) sn_det_results.index.name = 'oid' return sn_det_results
2.4375
2
redditwall.py
gopal131072/Reddit-Wall
1
12783968
import praw import config import time import os #Getting OAUTH object. Configure these in config.py. print("Trying to log in to reddit.\n") try: reddit = praw.Reddit(username = config.username, password = <PASSWORD>, client_id = config.client_id, client_secret = config.client_secret, user_agent = "Pull wallpapers from /r/wallpapers") except exception: print(exception) print("Logged in successfully.\n") topPosts = reddit.subreddit(config.subreddit).hot(limit=config.imagelimit) #Variable to make sure it's an imagepost and not a text post or an album. imagePost = None for post in topPosts: if imagePost is None: url = post.url #wget the url only if it has ".jpg" or ".png" in it. if ".jpg" not in url and ".png" not in url: print("URL is an album or a text post. Moving on.") imagePost = None else: imagePost = url print("URL = " + imagePost) if imagePost is not None: #save it with the name given by the image host. imagepath = imagePost imagepath = imagepath.replace("https://i.redd.it/","") imagepath = imagepath.replace("http://i.imgur.com/","") os.system("wget -O " + config.imgDIR + "/" +imagepath+" " + imagePost) imagePost = None else: print("No images found.")
2.90625
3
mephisto/abstractions/blueprints/remote_procedure/remote_procedure_agent_state.py
VjunetXuuftofi/Mephisto
0
12783969
<filename>mephisto/abstractions/blueprints/remote_procedure/remote_procedure_agent_state.py #!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from typing import List, Optional, Dict, Any, Tuple, TYPE_CHECKING from mephisto.abstractions.blueprint import AgentState import os import json import time import weakref from uuid import uuid4 from dataclasses import dataclass, fields if TYPE_CHECKING: from mephisto.data_model.agent import Agent from mephisto.data_model.packet import Packet @dataclass class RemoteRequest: uuid: str target: str args_json: Optional[str] response_json: Optional[str] timestamp: float def to_dict(self): return dict((field.name, getattr(self, field.name)) for field in fields(self)) class RemoteProcedureAgentState(AgentState): """ Holds information about tasks with live interactions in a remote query model. """ def __init__(self, agent: "Agent"): """ Create an agent state that keeps track of incoming actions from the frontend client Initialize with an existing file if it exists. """ self.agent = weakref.proxy(agent) data_file = self._get_expected_data_file() if os.path.exists(data_file): self.load_data() else: self.requests: Dict[str, RemoteRequest] = {} self.start_time = time.time() self.end_time = -1 self.init_data: Optional[Dict[str, Any]] = None self.final_submission: Optional[Dict[str, Any]] = None self.save_data() def set_init_state(self, data: Any) -> bool: """Set the initial state for this agent""" if self.init_data is not None: # Initial state is already set return False else: self.init_data = data self.save_data() return True def get_init_state(self) -> Optional[Dict[str, Any]]: """ Return the initial state for this agent, None if no such state exists """ if self.init_data is None: return None prev_requests = [] if len(self.requests) > 0: requests = self.requests.values() sorted_requests = sorted(requests, key=lambda x: x.timestamp) prev_requests = [r.to_dict() for r in sorted_requests] return {"task_data": self.init_data, "previous_requests": prev_requests} def _get_expected_data_file(self) -> str: """Return the place we would expect to find data for this agent state""" agent_dir = self.agent.get_data_dir() os.makedirs(agent_dir, exist_ok=True) return os.path.join(agent_dir, "state.json") def load_data(self) -> None: """Load stored data from a file to this object""" agent_file = self._get_expected_data_file() with open(agent_file, "r") as state_json: state = json.load(state_json) self.requests = {x["uuid"]: x for x in state["requests"]} self.init_data = state["init_data"] self.outputs = state["final_submission"] def get_data(self) -> Dict[str, Any]: """Return dict with the messages of this agent""" return { "final_submission": self.final_submission, "init_data": self.init_data, "requests": [r.to_dict() for r in self.requests.values()], "start_time": self.start_time, "end_time": self.end_time, } def get_parsed_data(self) -> Dict[str, Any]: """Return the formatted content""" # TODO implement actually getting this data return self.get_data() def get_task_start(self) -> float: """ Return the start time for this task """ return self.start_time def get_task_end(self) -> float: """ Return the end time for this task """ return self.end_time def save_data(self) -> None: """Save all messages from this agent to""" agent_file = self._get_expected_data_file() with open(agent_file, "w+") as state_json: json.dump(self.get_data(), state_json) def update_data(self, live_update: Dict[str, Any]) -> None: """ Append the incoming packet as well as who it came from """ if "handles" in live_update: # outgoing response_id = str(uuid4()) response = RemoteRequest( uuid=response_id, target=live_update["handles"], args_json=None, response_json=live_update["response"], timestamp=time.time(), ) self.requests[response_id] = response else: # incoming request = RemoteRequest( uuid=live_update["request_id"], target=live_update["target"], args_json=live_update["args"], response_json=None, timestamp=time.time(), ) self.requests[live_update["request_id"]] = request def update_submit(self, submitted_data: Dict[str, Any]) -> None: """Append any final submission to this state""" self.final_submission = submitted_data self.save_data()
2.125
2
custom_addons/purchase_order_filter/models/purchase_order_filter.py
MonwarAdeeb/Bista_Solutions
0
12783970
<gh_stars>0 from odoo import _, api, fields, models from datetime import datetime, timedelta class PurchaseOrderFilter(models.TransientModel): _name = "purchase.order.filter" _description = "Filter Class for Purchase Order" from_date = fields.Date( string='Date From', default=datetime.today()) to_date = fields.Date( string='Date To', default=datetime.today()) def filter_purchase_orders(self): order_list = [] date_list = [] filtered_list = [] # Partially filtering orders from purchase.order.line to reduce data overhead # [taking non emptry received quantity values] filtered_orders = self.env["purchase.order.line"].search( [('qty_received', '>=', 1)]) # This fully filters all valid orders IDs from partially filtered_orders # [billed quantity is less than received quantity] for order in filtered_orders: if order.qty_invoiced < order.qty_received: order_list.append(order.order_id.id) # Rerieving all objects from purchase.order in selected date range filtered_dates = self.env["purchase.order"].search( ['&', ('date_approve', '>=', self.from_date), ('date_approve', '<=', self.to_date)]) # Retreiving IDs of Valid Dates for date in filtered_dates: date_list.append(date.id) # Filtering valid dates and orders by comparing their IDs, # Then adding to filtered_list for item_o in order_list: for item_d in date_list: if item_o == item_d: filtered_list.append(item_o) # Passing data through domain to the default views of the class return { 'name': 'Purchase Order Filter', 'type': 'ir.actions.act_window', 'res_model': 'purchase.order', 'view_mode': 'tree,form', 'domain': [('id', 'in', filtered_list)], }
2.5
2
get_while_uploading.py
sunguanxiong/kinetics
0
12783971
<filename>get_while_uploading.py import argparse import json import os import ftplib from joblib import delayed from joblib import Parallel def FTP_wrapper(): ftp = ftplib.FTP() ftp.connect ("10.107.1.68") ftp.login ("sunguanxiong", "Sgx19940210") return ftp def download_video(video_name, download_dir, remote_dir, ftp): video_file = os.path.join(download_dir, video_name) status = False # Putting the video to remote directory. cmd = "RETR " + video_name try: with open(video_file, 'wb') as f: ftp.retrbinary(cmd, f.write) except: print("errrrrrrrrrrr!!!!") return tuple([video_name, status, "err"]) ftp.delete(video_name) return tuple([video_name, status, 'Downloaded']) def main(download_dir, remote_dir, num_jobs=24): """ arguments: --------- download_dir: str Directory of the downloaded videos remote_dir: str Remote directory to store videos num_jobs: num Number of processes. """ # Loop until no videos ftp = FTP_wrapper() ftp.cwd(remote_dir) # upload 100 videos every time buffer = 100 video_list = ftp.nlst()[:10] print (video_list) while video_list: if num_jobs == 1: status_lst = [] for video in video_list: print(video) status_lst.append(download_video(video, download_dir, remote_dir, ftp)) else: status_lst = Parallel(n_jobs=num_jobs)(delayed(download_video)( video, download_dir, remote_dir, ftp) for video in video_list) # Save download report. with open('download_report.json', 'w') as fobj: fobj.write(json.dumps(status_lst)) if __name__ == '__main__': description = 'Helper script for uploading while downloading videos' p = argparse.ArgumentParser(description=description) p.add_argument('remote_dir', type=str, help=('The directory for downloaded videos')) p.add_argument('download_dir', type=str, help='Remote directory where videos will be uploaded.') p.add_argument('-n', '--num-jobs', type=int, default=1) main(**vars(p.parse_args()))
2.84375
3
launchers/hosp_mob_mean.py
BidGem/WeatherPE
2
12783972
<filename>launchers/hosp_mob_mean.py<gh_stars>1-10 from weape.argv import get_data_from_xlsx def main(): dates, pressure, hospitalizations = get_data_from_xlsx() series = hospitalizations.mobile_mean(730) eventuality = hospitalizations.eventuality().mobile_mean(730) series.plot() eventuality.plot() main()
1.914063
2
tests/ssd.py
hey-yahei/GluonConverter
10
12783973
#-*- coding: utf-8 -*- import os from gluoncv.model_zoo import ssd_512_mobilenet1_0_voc import sys sys.path.append("..") from convert import convert_ssd_model, save_model if __name__ == "__main__": if not os.path.exists("tmp"): os.mkdir("tmp") net = ssd_512_mobilenet1_0_voc(pretrained=True) text_net, binary_weights = convert_ssd_model(net, input_shape=(1,3,512,512), to_bgr=True) save_model(text_net, binary_weights, prefix="tmp/mssd512_voc")
2.09375
2
tests/test_dataflow/multiwoz/conftest.py
luweishuang/task_oriented_dialogue_as_dataflow_synthesis
257
12783974
# Copyright (c) Microsoft Corporation. # Licensed under the MIT license. from typing import Any, Dict, List, Tuple import pytest from dataflow.multiwoz.trade_dst_utils import BeliefState def convert_belief_dict_to_belief_state(belief_dict: Dict[str, str]) -> BeliefState: belief_state: BeliefState = [] for slot_fullname, slot_value in sorted(belief_dict.items()): belief_state.append({"slots": [[slot_fullname, slot_value]]}) return belief_state def build_trade_dialogue( dialogue_id: str, turns: List[Tuple[str, str, Dict[str, str]]] ) -> Dict[str, Any]: trade_dialogue = { "dialogue_idx": dialogue_id, "dialogue": [ { # Our mock dialogues here use 1-based turn indices. # In real MultiWOZ/TRADE dialogues, turn index starts from 0. "turn_idx": turn_idx + 1, "system_transcript": agent_utt, "transcript": user_utt, "belief_state": convert_belief_dict_to_belief_state(belief_dict), } for turn_idx, (agent_utt, user_utt, belief_dict) in enumerate(turns) ], } return trade_dialogue @pytest.fixture def trade_dialogue_1() -> Dict[str, Any]: return build_trade_dialogue( dialogue_id="dummy_1", turns=[ # turn 1 # activate a domain without constraint, the plan should call "Find" with "EqualityConstraint" # we intentionally to only put two "none" slots in the belief state to match the MultiWoZ annotation style ( "", "i want to book a hotel", {"hotel-name": "none", "hotel-type": "none"}, ), # turn 2 # add constraints, the plan should call "Revise" with "EqualityConstraint" ( "ok what type", "guest house and cheap, probably hilton", { "hotel-name": "hilton", "hotel-pricerange": "cheap", "hotel-type": "guest house", }, ), # turn 3 # drop a constraint (but the domain is still active), the plan should call "Revise" with "EqualityConstraint" ( "no results", "ok try another hotel", { "hotel-name": "none", "hotel-pricerange": "cheap", "hotel-type": "guest house", }, ), # turn 4 # drop the domain ("failed", "ok never mind", {}), # turn 5 # activate the domain again ("sure", "can you find a hotel in west", {"hotel-area": "west"}), # turn 6 # activate a new domain and use a refer call ( "how about this", "ok can you find a restaurant in the same area", {"hotel-area": "west", "restaurant-area": "west"}, ), # turn 7 # use a refer call to get a value from a dead domain # the salience model should find the first valid refer value (skips "none") ( "how about this", "use the same price range as the hotel", { "hotel-area": "west", "restaurant-area": "west", "restaurant-pricerange": "cheap", }, ), # turn 8 # do not change belief state ( "ok", "give me the address", { "hotel-area": "west", "restaurant-area": "west", "restaurant-pricerange": "cheap", }, ), # turn 9 # a new domain ( "ok", "book a taxi now", { "hotel-area": "west", "restaurant-area": "west", "restaurant-pricerange": "cheap", "taxi-departure": "none", }, ), # turn 10 # do not change belief state (make sure the plan is "Revise" not "Find") ( "ok", "ok", { "hotel-area": "west", "restaurant-area": "west", "restaurant-pricerange": "cheap", "taxi-departure": "none", }, ), ], )
2.4375
2
scripts/renewable_to_emissions.py
EasyPeasyLemonSqueezy/Climate-Change
0
12783975
<gh_stars>0 #!/usr/bin/env python3 # -*- coding: utf-8 -*- import io import os, sys import json import xlrd from math import ceil, log10 from multiprocessing import Pool from requests import get from base64 import b64decode from bs4 import BeautifulSoup url = 'http://www.iea.org/statistics/statisticssearch/report/?country=%s&product=renewablesandwaste&year=%s' energy_params = ('Biogases', 'Liquid biofuels', 'Geothermal', 'Solar thermal', 'Hydro', 'Solar PV', 'Tide, wave, ocean', 'Wind') awesome = ( 'Belgium', 'Cyprus', 'Czech Republic', 'Denmark', 'European Union', 'Germany', 'Hungary', 'Ireland', 'Malta', 'Netherlands', 'Spain', 'United Kingdom' ) fucked = ( 'Canada', 'Japan', 'Kazakhstan2', 'Latvia', 'New Zealand', 'Russian Federation' ) def parse_energy(country): data = [] for year in range(1990, 2016): values = tuple(map( lambda cell: int(b64decode(cell.find(text=True))), BeautifulSoup(get(url % (country, year)).text, 'html.parser') .find('table') .find_all('tr')[1] .find_all('td')[3:] )) data.append(values) return data def parse_xs(xs): values = [] with xlrd.open_workbook(xs, on_demand=True) as workbook: worksheet = workbook.sheet_by_name(f'Table10s1') for y in range(2, 2 + 26): year = int(worksheet.cell_value(4, y)) value = int(worksheet.cell_value(6, y)) values.append(value) return values def parse(country, cc): print(f'Start: {country}') path = f'data/{country}' emissions = parse_xs(f'{path}/{os.listdir(path)[-1]}') energy = parse_energy(f'{cc}') print(f'Done: {country}') return country, [[u'CO₂e'] + emissions] \ + list(zip(energy_params, *energy)) \ + [['years'] + list(range(1990, 2016))] if __name__ == '__main__': if sys.version_info < (3, 6): sys.exit('Python 3.6 or later is required.\n') with open('countries.json') as f: countries = json.load(f) with Pool(processes=20) as pool: res = pool.starmap(parse, [(c, countries[c]) for c in awesome + fucked]) data = { c: v for c, v in res } data['Kazakhstan'] = data.pop('Kazakhstan2') with io.open(f'renewable_to_emissions.min.json', 'w', encoding='utf8') as f: json.dump(data, f, ensure_ascii=False)
2.46875
2
tests/scrape/test_loaders.py
gudeg-united/mishapp-ds
0
12783976
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import pytest @pytest.mark.parametrize("value, format_, expectation", [ ("01-Jan-15 10:00:00 +07:00", "DD-MMM-YY HH:mm:ss Z", "2015-01-01T03:00:00+00:00"), # noqa ("01-01-2015 10:00:00 +07:00", "DD-MM-YYYY HH:mm:ss Z", "2015-01-01T03:00:00+00:00"), # noqa ]) def test_wib_to_utc(value, format_, expectation): from mishapp_ds.scrape.loaders import datetime_to_utc assert datetime_to_utc(value, format_) == expectation
2.328125
2
test/test_initial_setting_presenter.py
pei223/labelpt
6
12783977
<filename>test/test_initial_setting_presenter.py<gh_stars>1-10 import tempfile from labelpt.app import is_directory_path_exist import shutil import unittest class TestInitialSettingPresenter(unittest.TestCase): def setUp(self) -> None: self.test_dir = tempfile.mkdtemp() def tearDown(self) -> None: shutil.rmtree(self.test_dir) def test_is_directory_path_exist(self): self.assertFalse(is_directory_path_exist("hogehogehoge")) self.assertTrue(is_directory_path_exist("test"))
2.78125
3
mould/gitignore.py
michaeljoseph/mould
6
12783978
<reponame>michaeljoseph/mould def read_ignore(ignore_content): return [ ignore_line for ignore_line in ignore_content.split() if not ignore_line.startswith('#') ] def remove_ignores(file_paths, ignore_list): """ Remove files that match gitignore patterns :param file_paths: :param ignore_list: :return: """ # https://stackoverflow.com/a/25230908/5549 from fnmatch import fnmatch matches = [] for ignore in ignore_list: file_paths = [ n for n in file_paths if n.startswith('#') or not fnmatch(n, ignore) ] matches.extend(file_paths) return matches
3.203125
3
housecanary/apiclient.py
SpainTrain/hc-api-python
18
12783979
<reponame>SpainTrain/hc-api-python from __future__ import print_function import os from builtins import object from housecanary.output import ResponseOutputGenerator from housecanary.requestclient import RequestClient import housecanary.exceptions import housecanary.constants as constants from requests.auth import HTTPBasicAuth class ApiClient(object): """Base class for making API calls""" def __init__(self, auth_key=None, auth_secret=None, version=None, request_client=None, output_generator=None, auth=None): """ auth_key and auth_secret can be passed in as parameters or pulled automatically from the following environment variables: HC_API_KEY -- Your HouseCanary API auth key HC_API_SECRET -- Your HouseCanary API secret Passing in the key and secret as parameters will take precedence over environment variables. Args: auth_key - Optional. The HouseCanary API auth key. auth_secret - Optional. The HouseCanary API secret. version (str) - Optional. The API version to use, in the form "v2", for example. Default is "v2". request_client - Optional. An instance of a class that is responsible for making http requests. It must implement a `post` method. Default is RequestClient. output_generator - Optional. An instance of an OutputGenerator that implements a `process_response` method. Can be used to implement custom serialization of the response data from the request client. Default is ResponseOutputGenerator. authenticator - Optional. An instance of a requests.auth.AuthBase implementation for providing authentication to the request. Default is requests.auth.HTTPBasicAuth. """ self._auth_key = auth_key or os.getenv('HC_API_KEY') self._auth_secret = auth_secret or os.getenv('HC_API_SECRET') self._version = version or constants.DEFAULT_VERSION # user can pass in a custom request_client self._request_client = request_client # if no request_client provided, use the defaults. if self._request_client is None: # allow using custom OutputGenerator or Authenticator with the RequestClient _output_generator = output_generator or ResponseOutputGenerator() _auth = auth or HTTPBasicAuth(self._auth_key, self._auth_secret) self._request_client = RequestClient(_output_generator, _auth) self.property = PropertyComponentWrapper(self) self.block = BlockComponentWrapper(self) self.zip = ZipComponentWrapper(self) self.msa = MsaComponentWrapper(self) def fetch(self, endpoint_name, identifier_input, query_params=None): """Calls this instance's request_client's post method with the specified component endpoint Args: - endpoint_name (str) - The endpoint to call like "property/value". - identifier_input - One or more identifiers to request data for. An identifier can be in one of these forms: - A list of property identifier dicts: - A property identifier dict can contain the following keys: (address, zipcode, unit, city, state, slug, meta). One of 'address' or 'slug' is required. Ex: [{"address": "82 County Line Rd", "zipcode": "72173", "meta": "some ID"}] A slug is a URL-safe string that identifies a property. These are obtained from HouseCanary. Ex: [{"slug": "123-Example-St-San-Francisco-CA-94105"}] - A list of dicts representing a block: - A block identifier dict can contain the following keys: (block_id, num_bins, property_type, meta). 'block_id' is required. Ex: [{"block_id": "060750615003005", "meta": "some ID"}] - A list of dicts representing a zipcode: Ex: [{"zipcode": "90274", "meta": "some ID"}] - A list of dicts representing an MSA: Ex: [{"msa": "41860", "meta": "some ID"}] The "meta" field is always optional. Returns: A Response object, or the output of a custom OutputGenerator if one was specified in the constructor. """ endpoint_url = constants.URL_PREFIX + "/" + self._version + "/" + endpoint_name if query_params is None: query_params = {} if len(identifier_input) == 1: # If only one identifier specified, use a GET request query_params.update(identifier_input[0]) return self._request_client.get(endpoint_url, query_params) # when more than one address, use a POST request return self._request_client.post(endpoint_url, identifier_input, query_params) def fetch_synchronous(self, endpoint_name, query_params=None): """Calls this instance's request_client's get method with the specified component endpoint""" endpoint_url = constants.URL_PREFIX + "/" + self._version + "/" + endpoint_name if query_params is None: query_params = {} return self._request_client.get(endpoint_url, query_params) class ComponentWrapper(object): def __init__(self, api_client=None): """ Args: - api_client - An instance of ApiClient """ self._api_client = api_client def _convert_to_identifier_json(self, identifier_data): """Override in subclasses""" raise NotImplementedError() def get_identifier_input(self, identifier_data): """Convert the various formats of input identifier_data into the proper json format expected by the ApiClient fetch method, which is a list of dicts.""" identifier_input = [] if isinstance(identifier_data, list) and len(identifier_data) > 0: # if list, convert each item in the list to json for address in identifier_data: identifier_input.append(self._convert_to_identifier_json(address)) else: identifier_input.append(self._convert_to_identifier_json(identifier_data)) return identifier_input def fetch_identifier_component(self, endpoint_name, identifier_data, query_params=None): """Common method for handling parameters before passing to api_client""" if query_params is None: query_params = {} identifier_input = self.get_identifier_input(identifier_data) return self._api_client.fetch(endpoint_name, identifier_input, query_params) class PropertyComponentWrapper(ComponentWrapper): """Property specific components All the component methods of this class (except value_report and rental_report) take data as a parameter. data can be in the following forms: - A dict like: {"address": "82 County Line Rd", "zipcode": "72173", "meta": "someID"} or {"address": "82 County Line Rd", "city": "San Francisco", "state": "CA", "meta": "someID"} or {"slug": "123-Example-St-San-Francisco-CA-94105"} - A list of dicts as specified above: [{"address": "82 County Line Rd", "zipcode": "72173", "meta": "someID"}, {"address": "43 Valmonte Plaza", "zipcode": "90274", "meta": "someID2"}] - A single string representing a slug: "123-Example-St-San-Francisco-CA-94105" - A tuple in the form of (address, zipcode, meta) like: ("82 County Line Rd", "72173", "someID") - A list of (address, zipcode, meta) tuples like: [("82 County Line Rd", "72173", "someID"), ("43 Valmonte Plaza", "90274", "someID2")] Using a tuple only supports address, zipcode and meta. To specify city, state, unit or slug, please use a dict. The "meta" field is always optional. The available keys in the dict are: - address (required if no slug) - slug (required if no address) - zipcode (optional) - unit (optional) - city (optional) - state (optional) - meta (optional) - client_value (optional, for "value_within_block" and "rental_value_within_block") - client_value_sqft (optional, for "value_within_block" and "rental_value_within_block") All the component methods of this class return a PropertyResponse object, (or ValueReportResponse or RentalReportResponse) or the output of a custom OutputGenerator if one was specified in the constructor. """ def _convert_to_identifier_json(self, address_data): """Convert input address data into json format""" if isinstance(address_data, str): # allow just passing a slug string. return {"slug": address_data} if isinstance(address_data, tuple) and len(address_data) > 0: address_json = {"address": address_data[0]} if len(address_data) > 1: address_json["zipcode"] = address_data[1] if len(address_data) > 2: address_json["meta"] = address_data[2] return address_json if isinstance(address_data, dict): allowed_keys = ["address", "zipcode", "unit", "city", "state", "slug", "meta", "client_value", "client_value_sqft"] # ensure the dict does not contain any unallowed keys for key in address_data: if key not in allowed_keys: msg = "Key in address input not allowed: " + key raise housecanary.exceptions.InvalidInputException(msg) # ensure it contains an "address" key if "address" in address_data or "slug" in address_data: return address_data # if we made it here, the input was not valid. msg = ("Input is invalid. Must be a list of (address, zipcode) tuples, or a dict or list" " of dicts with each item containing at least an 'address' or 'slug' key.") raise housecanary.exceptions.InvalidInputException((msg)) def block_histogram_baths(self, data): """Call the block_histogram_baths endpoint""" return self.fetch_identifier_component("property/block_histogram_baths", data) def block_histogram_beds(self, data): """Call the block_histogram_beds endpoint""" return self.fetch_identifier_component("property/block_histogram_beds", data) def block_histogram_building_area(self, data): """Call the block_histogram_building_area endpoint""" return self.fetch_identifier_component("property/block_histogram_building_area", data) def block_histogram_value(self, data): """Call the block_histogram_value endpoint""" return self.fetch_identifier_component("property/block_histogram_value", data) def block_histogram_value_sqft(self, data): """Call the block_histogram_value_sqft endpoint""" return self.fetch_identifier_component("property/block_histogram_value_sqft", data) def block_rental_value_distribution(self, data): """Call the block_rental_value_distribution endpoint""" return self.fetch_identifier_component("property/block_rental_value_distribution", data) def block_value_distribution(self, data): """Call the block_value_distribution endpoint""" return self.fetch_identifier_component("property/block_value_distribution", data) def block_value_ts(self, data): """Call the block_value_ts endpoint""" return self.fetch_identifier_component("property/block_value_ts", data) def block_value_ts_historical(self, data): """Call the block_value_ts_historical endpoint""" return self.fetch_identifier_component("property/block_value_ts_historical", data) def block_value_ts_forecast(self, data): """Call the block_value_ts_forecast endpoint""" return self.fetch_identifier_component("property/block_value_ts_forecast", data) def census(self, data): """Call the census endpoint""" return self.fetch_identifier_component("property/census", data) def details(self, data): """Call the details endpoint""" return self.fetch_identifier_component("property/details", data) def flood(self, data): """Call the flood endpoint""" return self.fetch_identifier_component("property/flood", data) def ltv(self, data): """Call the ltv endpoint""" return self.fetch_identifier_component("property/ltv", data) def ltv_details(self, data): """Call the ltv_details endpoint""" return self.fetch_identifier_component("property/ltv_details", data) def mortgage_lien(self, data): """Call the mortgage_lien endpoint""" return self.fetch_identifier_component("property/mortgage_lien", data) def msa_details(self, data): """Call the msa_details endpoint""" return self.fetch_identifier_component("property/msa_details", data) def msa_hpi_ts(self, data): """Call the msa_hpi_ts endpoint""" return self.fetch_identifier_component("property/msa_hpi_ts", data) def msa_hpi_ts_forecast(self, data): """Call the msa_hpi_ts_forecast endpoint""" return self.fetch_identifier_component("property/msa_hpi_ts_forecast", data) def msa_hpi_ts_historical(self, data): """Call the msa_hpi_ts_historical endpoint""" return self.fetch_identifier_component("property/msa_hpi_ts_historical", data) def nod(self, data): """Call the nod endpoint""" return self.fetch_identifier_component("property/nod", data) def owner_occupied(self, data): """Call the owner_occupied endpoint""" return self.fetch_identifier_component("property/owner_occupied", data) def rental_value(self, data): """Call the rental_value endpoint""" return self.fetch_identifier_component("property/rental_value", data) def rental_value_within_block(self, data): """Call the rental_value_within_block endpoint""" return self.fetch_identifier_component("property/rental_value_within_block", data) def sales_history(self, data): """Call the sales_history endpoint""" return self.fetch_identifier_component("property/sales_history", data) def school(self, data): """Call the school endpoint""" return self.fetch_identifier_component("property/school", data) def value(self, data): """Call the value endpoint""" return self.fetch_identifier_component("property/value", data) def value_forecast(self, data): """Call the value_forecast endpoint""" return self.fetch_identifier_component("property/value_forecast", data) def value_within_block(self, data): """Call the value_within_block endpoint""" return self.fetch_identifier_component("property/value_within_block", data) def zip_details(self, data): """Call the zip_details endpoint""" return self.fetch_identifier_component("property/zip_details", data) def zip_hpi_forecast(self, data): """Call the zip_hpi_forecast endpoint""" return self.fetch_identifier_component("property/zip_hpi_forecast", data) def zip_hpi_historical(self, data): """Call the zip_hpi_historical endpoint""" return self.fetch_identifier_component("property/zip_hpi_historical", data) def zip_hpi_ts(self, data): """Call the zip_hpi_ts endpoint""" return self.fetch_identifier_component("property/zip_hpi_ts", data) def zip_hpi_ts_forecast(self, data): """Call the zip_hpi_ts_forecast endpoint""" return self.fetch_identifier_component("property/zip_hpi_ts_forecast", data) def zip_hpi_ts_historical(self, data): """Call the zip_hpi_ts_historical endpoint""" return self.fetch_identifier_component("property/zip_hpi_ts_historical", data) def zip_volatility(self, data): """Call the zip_volatility endpoint""" return self.fetch_identifier_component("property/zip_volatility", data) def component_mget(self, data, components): """Call the component_mget endpoint Args: - data - As described in the class docstring. - components - A list of strings for each component to include in the request. Example: ["property/details", "property/flood", "property/value"] """ if not isinstance(components, list): print("Components param must be a list") return query_params = {"components": ",".join(components)} return self.fetch_identifier_component( "property/component_mget", data, query_params) def value_report(self, address, zipcode, report_type="full", format_type="json"): """Call the value_report component Value Report only supports a single address. Args: - address - zipcode Kwargs: - report_type - "full" or "summary". Default is "full". - format_type - "json", "pdf", "xlsx" or "all". Default is "json". """ query_params = { "report_type": report_type, "format": format_type, "address": address, "zipcode": zipcode } return self._api_client.fetch_synchronous("property/value_report", query_params) def rental_report(self, address, zipcode, format_type="json"): """Call the rental_report component Rental Report only supports a single address. Args: - address - zipcode Kwargs: - format_type - "json", "xlsx" or "all". Default is "json". """ # only json is supported by rental report. query_params = { "format": format_type, "address": address, "zipcode": zipcode } return self._api_client.fetch_synchronous("property/rental_report", query_params) class BlockComponentWrapper(ComponentWrapper): """Block specific components All the component methods of this class take block_data as a parameter. block_data can be in the following forms: - A dict with a ``block_id`` like: {"block_id": "060750615003005", "meta": "someId"} - For histogram endpoints you can include the ``num_bins`` key: {"block_id": "060750615003005", "num_bins": 5, "meta": "someId"} - For time series and distribution endpoints you can include the ``property_type`` key: {"block_id": "060750615003005", "property_type": "SFD", "meta": "someId"} - A list of dicts as specified above: [{"block_id": "012345678901234", "meta": "someId"}, {"block_id": "012345678901234", "meta": "someId2}] - A single string representing a ``block_id``: "012345678901234" - A list of ``block_id`` strings: ["012345678901234", "060750615003005"] The "meta" field is always optional. All the component methods of this class return a BlockResponse object, or the output of a custom OutputGenerator if one was specified in the constructor. """ def _convert_to_identifier_json(self, block_data): if isinstance(block_data, str): # allow just passing a block_id string. return {"block_id": block_data} if isinstance(block_data, dict): allowed_keys = ["block_id", "num_bins", "property_type", "meta"] # ensure the dict does not contain any unallowed keys for key in block_data: if key not in allowed_keys: msg = "Key in block input not allowed: " + key raise housecanary.exceptions.InvalidInputException(msg) # ensure it contains a "block_id" key if "block_id" in block_data: return block_data # if we made it here, the input was not valid. msg = ("Input is invalid. Must be a dict or list of dicts" " with each item containing at least 'block_id' key.") raise housecanary.exceptions.InvalidInputException((msg)) def histogram_baths(self, block_data): """Call the histogram_baths endpoint""" return self.fetch_identifier_component("block/histogram_baths", block_data) def histogram_beds(self, block_data): """Call the histogram_beds endpoint""" return self.fetch_identifier_component("block/histogram_beds", block_data) def histogram_building_area(self, block_data): """Call the histogram_building_area endpoint""" return self.fetch_identifier_component("block/histogram_building_area", block_data) def histogram_value(self, block_data): """Call the histogram_value endpoint""" return self.fetch_identifier_component("block/histogram_value", block_data) def histogram_value_sqft(self, block_data): """Call the histogram_value_sqft endpoint""" return self.fetch_identifier_component("block/histogram_value_sqft", block_data) def rental_value_distribution(self, block_data): """Call the rental_value_distribution endpoint""" return self.fetch_identifier_component("block/rental_value_distribution", block_data) def value_distribution(self, block_data): """Call the value_distribution endpoint""" return self.fetch_identifier_component("block/value_distribution", block_data) def value_ts(self, block_data): """Call the value_ts endpoint""" return self.fetch_identifier_component("block/value_ts", block_data) def value_ts_forecast(self, block_data): """Call the value_ts_forecast endpoint""" return self.fetch_identifier_component("block/value_ts_forecast", block_data) def value_ts_historical(self, block_data): """Call the value_ts_historical endpoint""" return self.fetch_identifier_component("block/value_ts_historical", block_data) def component_mget(self, block_data, components): """Call the block component_mget endpoint Args: - block_data - As described in the class docstring. - components - A list of strings for each component to include in the request. Example: ["block/value_ts", "block/value_distribution"] """ if not isinstance(components, list): print("Components param must be a list") return query_params = {"components": ",".join(components)} return self.fetch_identifier_component( "block/component_mget", block_data, query_params) class ZipComponentWrapper(ComponentWrapper): """Zip specific components All of the Analytics API zip endpoints take a ``zip_data`` argument. zip_data can be in the following forms: - A dict with a ``zipcode`` like: {"zipcode": "90274", "meta": "someId"} - A list of dicts as specified above: [{"zipcode": "90274", "meta": "someId"}, {"zipcode": "01960", "meta": "someId2}] - A single string representing a ``zipcode``: "90274" - A list of ``zipcode`` strings: ["90274", "01960"] The "meta" field is always optional. All of the zip endpoint methods return a ZipResponse, or the output of a custom OutputGenerator if one was specified in the constructor. """ def _convert_to_identifier_json(self, zip_data): if isinstance(zip_data, str): # allow just passing a zipcode string. return {"zipcode": zip_data} if isinstance(zip_data, dict): allowed_keys = ["zipcode", "meta"] # ensure the dict does not contain any unallowed keys for key in zip_data: if key not in allowed_keys: msg = "Key in zip input not allowed: " + key raise housecanary.exceptions.InvalidInputException(msg) # ensure it contains a "zipcode" key if "zipcode" in zip_data: return zip_data # if we made it here, the input was not valid. msg = ("Input is invalid. Must be a dict or list of dicts" " with each item containing at least 'zipcode' key.") raise housecanary.exceptions.InvalidInputException((msg)) def details(self, zip_data): """Call the details endpoint""" return self.fetch_identifier_component("zip/details", zip_data) def hpi_forecast(self, zip_data): """Call the hpi_forecast endpoint""" return self.fetch_identifier_component("zip/hpi_forecast", zip_data) def hpi_historical(self, zip_data): """Call the hpi_historical endpoint""" return self.fetch_identifier_component("zip/hpi_historical", zip_data) def hpi_ts(self, zip_data): """Call the hpi_ts endpoint""" return self.fetch_identifier_component("zip/hpi_ts", zip_data) def hpi_ts_forecast(self, zip_data): """Call the hpi_ts_forecast endpoint""" return self.fetch_identifier_component("zip/hpi_ts_forecast", zip_data) def hpi_ts_historical(self, zip_data): """Call the hpi_ts_historical endpoint""" return self.fetch_identifier_component("zip/hpi_ts_historical", zip_data) def volatility(self, zip_data): """Call the volatility endpoint""" return self.fetch_identifier_component("zip/volatility", zip_data) def component_mget(self, zip_data, components): """Call the zip component_mget endpoint Args: - zip_data - As described in the class docstring. - components - A list of strings for each component to include in the request. Example: ["zip/details", "zip/volatility"] """ if not isinstance(components, list): print("Components param must be a list") return query_params = {"components": ",".join(components)} return self.fetch_identifier_component( "zip/component_mget", zip_data, query_params) class MsaComponentWrapper(ComponentWrapper): """MSA specific components All of the Analytics API msa endpoints take an ``msa_data`` argument. msa_data can be in the following forms: - A dict with an ``msa`` like: {"msa": "41860", "meta": "someId"} - A list of dicts as specified above: [{"msa": "41860", "meta": "someId"}, {"msa": "40928", "meta": "someId2}] - A single string representing a ``msa``: "41860" - A list of ``msa`` strings: ["41860", "40928"] The "meta" field is always optional. All of the msa endpoint methods return an MsaResponse, or the output of a custom OutputGenerator if one was specified in the constructor. """ def _convert_to_identifier_json(self, msa_data): if isinstance(msa_data, str): # allow just passing a msa string. return {"msa": msa_data} if isinstance(msa_data, dict): allowed_keys = ["msa", "meta"] # ensure the dict does not contain any unallowed keys for key in msa_data: if key not in allowed_keys: msg = "Key in msa input not allowed: " + key raise housecanary.exceptions.InvalidInputException(msg) # ensure it contains a "msa" key if "msa" in msa_data: return msa_data # if we made it here, the input was not valid. msg = ("Input is invalid. Must be a dict or list of dicts" " with each item containing at least 'msa' key.") raise housecanary.exceptions.InvalidInputException((msg)) def details(self, msa_data): """Call the details endpoint""" return self.fetch_identifier_component("msa/details", msa_data) def hpi_ts(self, msa_data): """Call the hpi_ts endpoint""" return self.fetch_identifier_component("msa/hpi_ts", msa_data) def hpi_ts_forecast(self, msa_data): """Call the hpi_ts_forecast endpoint""" return self.fetch_identifier_component("msa/hpi_ts_forecast", msa_data) def hpi_ts_historical(self, msa_data): """Call the hpi_ts_historical endpoint""" return self.fetch_identifier_component("msa/hpi_ts_historical", msa_data) def component_mget(self, msa_data, components): """Call the msa component_mget endpoint Args: - msa_data - As described in the class docstring. - components - A list of strings for each component to include in the request. Example: ["msa/details", "msa/hpi_ts"] """ if not isinstance(components, list): print("Components param must be a list") return query_params = {"components": ",".join(components)} return self.fetch_identifier_component( "msa/component_mget", msa_data, query_params)
2.90625
3
qs2/operations.py
steinarvk/numera-te-ipsum
0
12783980
from qs2 import model import qs2.users import qs2.validation import datetime import users import decimal import logging import time import qs2.logutil import sqlalchemy import random import csv import json import pytz import functools from qs2 import ui import qs2.csvexport from qs2.timeutil import (hacky_force_timezone, truncate_to_second_resolution) from sqlalchemy import sql from qs2.error import OperationFailed, ValidationFailed from qs2.dbutil import sql_op from qs2.dbutil import create_trigger PRIORITY_NORMAL = 0 PRIORITY_CORRECTION = -10 from prometheus_client import (Counter, Histogram) metric_db_ops_begun = Counter( "qs_db_ops_begun", "Database operations begun", ["op"]) metric_db_op_latency = Histogram( "qs_db_op_latency", "Database operation latency", ["op"]) def record_metrics_for_op(name): def decorator(function): @functools.wraps(function) def wrapper(*args, **kwargs): metric_db_ops_begun.labels(name).inc() t0 = time.time() rv = function(*args, **kwargs) t1 = time.time() metric_db_op_latency.labels(name).observe(t1-t0) return rv return wrapper return decorator class DbFetch(object): def __init__(self, conn, columns=[], **kwargs): self.conn = conn self.columns = columns self.option = kwargs def strict_confirm(message): confirm = ui.UI.raw_input("{} ['yes' to continue] ".format(message)) if confirm.strip() != "yes": raise OperationFailed("user aborted") def confirm(message): while True: letter = ui.UI.raw_input("{} [Yn] ".format(message)).lower() if letter in ("", "y"): return if letter == "n": raise OperationFailed("user aborted") def drop_all(conn): strict_confirm("Really delete the entire database, losing all data?") with qs2.logutil.section("dropping the database"): model.metadata.reflect(conn) model.metadata.drop_all(conn) def initialize(conn): with qs2.logutil.section("initializing the database"): model.metadata.create_all(conn) def add_user(conn, username, password): qs2.validation.check("username", username) qs2.validation.check("password", password, secret=True) query = model.users.insert().values( timestamp=datetime.datetime.now(), username=username, password_hash=users.hash_password(password) ) (user_id,) = sql_op(conn, "create user", query).inserted_primary_key logging.info("created user '%s' (%d)", username, user_id) return user_id def get_user_id(conn, username): query = sql.select([model.users.c.user_id]).where( model.users.c.username == username ) row = sql_op(conn, "fetch user", query).fetchone() if not row: raise OperationFailed("user ('{}') not found".format(username)) return row["user_id"] def authenticate_user(conn, username, password): query = sql.select( [model.users.c.user_id, model.users.c.password_hash]).where( model.users.c.username == username ) row = sql_op(conn, "fetch user", query).fetchone() ok = users.verify_password(password, row["password_hash"]) if ok: return row["user_id"] def verify_user_interactive(conn, username): password = ui.UI.getpass("Password for {}: ".format(username)) user_id = authenticate_user(conn, username, password) if not user_id: raise OperationFailed("authentication failed") logging.info("verified as user %d/%s", user_id, username) return user_id def add_user_interactive(conn, username): qs2.validation.check("username", username) password = ui.UI.getpass("Password: ") repeat_password = ui.UI.getpass("Confirm password: ") if password != repeat_password: raise OperationFailed("user input error -- passwords did not match") add_user(conn, username, password) def cli_query_form(*fields): rv = {} for field_key, field_name, required, convert in fields: text = ui.UI.raw_input("Enter {}: ".format(field_name)).strip() if not required and not text: print "{}: skipped".format(field_name) continue rv[field_key] = convert(text) print "{}: '{}'".format(field_name, rv[field_key]) return rv def fetch_chess_puzzle(conn, user_id, chess_puzzle_id): logging.info("attempting to fetch chess puzzle %d for user %d", chess_puzzle_id, user_id) columns = [model.chess_puzzles] query = sql.select(columns).where( (model.chess_puzzles.c.user_id_owner == user_id) & (model.chess_puzzles.c.chess_puzzle_id == chess_puzzle_id) ) row = sql_op(conn, "fetch chess puzzle by ID", query).fetchone() if row: return dict(row) def post_chess_puzzle_answer(conn, user_id, req_id, chess_puzzle_id, timestamp, move, expired, answer_latency): with conn.begin() as trans: query = sql.select([model.chess_answers.c.chess_answer_id]).where( (model.chess_answers.c.user_id_owner == user_id) & (model.chess_answers.c.chess_puzzle_id == chess_puzzle_id) ).limit(1) rows = sql_op(conn, "looking for prior answers", query).fetchall() if len(rows) > 0: raise OperationFailed("puzzle already answered") query = model.chess_answers.insert().values( timestamp=timestamp, user_id_owner=user_id, req_id_creator=req_id, chess_puzzle_id=chess_puzzle_id, move=move, expired=expired, answer_latency=answer_latency, ) (answer_id,) = sql_op(conn, "post chess type", query).inserted_primary_key return answer_id def register_chess_puzzle(conn, user_id, req_id, fen, deadline, pgn=None, move_number=None): query = model.chess_puzzles.insert().values( timestamp=datetime.datetime.now(tz=pytz.utc), req_id_creator=req_id, user_id_owner=user_id, fen=fen, deadline=deadline, pgn=str(pgn), move_number=move_number, ) (chess_puzzle_id,) = sql_op(conn, "create chess puzzle", query).inserted_primary_key return { "chess_puzzle_id": chess_puzzle_id, "fen": fen, "deadline": deadline, } def add_question(conn, user_id, question, low_label, high_label, middle_label=None, metadata=None, req_id_creator=None, trigger_spec=None): qs2.validation.check("question", question, secret=True) qs2.validation.check("label", low_label, secret=True) qs2.validation.check("label", high_label, secret=True) if middle_label: qs2.validation.check("label", middle_label, secret=True) if metadata: metadata = json.dumps(metadata) with conn.begin() as trans: trigger_id = create_trigger(conn, user_id, "question", trigger_spec) now = datetime.datetime.now() query = model.survey_questions.insert().values( timestamp=now, user_id_owner=user_id, question=question, low_label=low_label, high_label=high_label, middle_label=middle_label, metadata=metadata, trigger_id=trigger_id, req_id_creator=req_id_creator, ) (sq_id,) = sql_op(conn, "create question", query).inserted_primary_key logging.info("created question #%d", sq_id) return sq_id def maybe_auth(conn, username, skip_auth): if skip_auth: return get_user_id(conn, username) else: return verify_user_interactive(conn, username) def question_queue_query(user_id): return sql.select([model.survey_questions]).where( (model.survey_questions.c.user_id_owner == user_id) & (model.survey_questions.c.active) ).order_by(model.survey_questions.c.next_trigger.asc()) _implicit_trigger_columns = [ model.triggers.c.mean_delay, model.triggers.c.min_delay, model.triggers.c.next_trigger, model.triggers.c.never_trigger_before, model.triggers.c.active, ] @qs2.logutil.profiled("get_all_event_types") @record_metrics_for_op("get_all_event_types") def get_all_event_types(conn, user_id): columns = [model.event_types] + _implicit_trigger_columns query = sql.select(columns).\ select_from(model.event_types.join(model.triggers)).\ where( (model.event_types.c.user_id_owner == user_id) ).order_by(model.event_types.c.timestamp.asc()) return sql_op(conn, "fetch event types", query).fetchall() @qs2.logutil.profiled("get_all_questions") @record_metrics_for_op("get_all_questions") def get_all_questions(conn, user_id): columns = [model.survey_questions] + _implicit_trigger_columns query = sql.select(columns).\ select_from(model.survey_questions.join(model.triggers)).\ where( (model.survey_questions.c.user_id_owner == user_id) ).order_by(model.survey_questions.c.timestamp.asc()) return map(dict, sql_op(conn, "fetch questions", query).fetchall()) @qs2.logutil.profiled("get_pending_corrections") @record_metrics_for_op("get_pending_corrections") def get_pending_events_corrections(conn, user_id, limit=None): query = sql.select([model.event_record]).where( (model.event_record.c.user_id_owner == user_id) & (model.event_record.c.status == "unreported") ).order_by(model.event_record.c.start.asc()).limit(limit) results = sql_op(conn, "fetch unreported events", query).fetchall() evt_id_to_names_memo = {} def taskify(row): start = hacky_force_timezone(row.start) end = hacky_force_timezone(row.end) # TODO oh no, horrible, fix to use a join (late night!) try: evt_name = evt_id_to_names_memo[row.evt_id] except KeyError: evt_name = fetch_event_type(conn, user_id, row.evt_id).name evt_id_to_names_memo[row.evt_id] = evt_name return { "type": "event", "subtype": "correct", "event_correct": { "event_type_id": row.evt_id, "name": evt_name, "start": qs2.qsjson.json_string_datetime(start), "end": qs2.qsjson.json_string_datetime(end), } } count = 0 # TODO earliest = None # TODO return [(PRIORITY_CORRECTION, row.start, taskify(row)) for row in results], count, earliest def make_trigger_conditions(conn, user_id, force): now = datetime.datetime.now() condition = base_condition = ( (model.triggers.c.user_id_owner == user_id) & (model.triggers.c.active) & ((model.triggers.c.never_trigger_before == None) | (model.triggers.c.never_trigger_before < now)) ) if not force: condition = base_condition & (model.triggers.c.next_trigger < now) return condition, base_condition def get_question_challenge(question): return { "type": "question", "question": qs2.qsjson.survey_question_json(dict(question)), } @qs2.logutil.profiled("get_pending_append") @record_metrics_for_op("get_pending_append") def get_pending_event_append(conn, user_id, event_type): # TODO, this is horrible, optimize query! (written very late at night) tail = fetch_event_report_tail(conn, user_id, event_type) return { "type": "event", "subtype": "append", "event_append": { "event_type_id": event_type.evt_id, "name": event_type.name, "use_duration": event_type.use_duration, "start": qs2.qsjson.json_string_datetime(tail), "end": "now", }, } @qs2.logutil.profiled("get_pending_appends") @record_metrics_for_op("get_pending_appends") def get_pending_events_appends(conn, user_id, force=False, limit=None): columns = [model.event_types] + _implicit_trigger_columns joined = model.triggers.join(model.event_types) condition, base_condition = make_trigger_conditions(conn, user_id, force=force) query = sql.select(columns).select_from(joined).where(condition) query = query.order_by(model.triggers.c.next_trigger.asc()).limit(limit) results = sql_op(conn, "fetch pending events", query) rv = [] for row in results: rv.append((PRIORITY_NORMAL, row.next_trigger, get_pending_event_append(conn, user_id, row))) count = 0 # TODO earliest = None # TODO return rv, count, earliest @qs2.logutil.profiled("get_pending_events") @record_metrics_for_op("get_pending_events") def get_pending_events(conn, user_id, force=False, limit=None): crv, cc, cea = get_pending_events_corrections(conn, user_id, limit=limit) prv, pc, pea = get_pending_events_appends(conn, user_id, force=force, limit=limit) earliest = cea if pea is not None: if earliest is None or pea < earliest: earliest = pea return crv + prv, cc + pc, earliest @qs2.logutil.profiled("get_pending_questions") @record_metrics_for_op("get_pending_questions") def get_pending_questions(conn, user_id, columns=[], force=False, limit=None): if not columns: columns = [model.survey_questions] + _implicit_trigger_columns condition, base_condition = make_trigger_conditions(conn, user_id, force=force) joined = model.triggers.join(model.survey_questions, model.triggers.c.trigger_id == model.survey_questions.c.trigger_id) query = sql.select(columns).where(condition).select_from(joined) query = query.order_by(model.triggers.c.next_trigger.asc()) if limit: query = query.limit(limit) results = sql_op(conn, "fetch pending questions", query).fetchall() data = [(PRIORITY_NORMAL, row.next_trigger, get_question_challenge(row)) for row in results] count_query = sql.select([sql.func.count()]).select_from(joined).where(condition) count = sql_op(conn, "fetch query count", count_query).scalar() logging.info("fetched count %d", count) first_trigger_query = sql.select([model.triggers.c.next_trigger]).\ select_from(joined).\ where(base_condition).\ order_by(model.triggers.c.next_trigger.asc()).limit(1) first_trigger = sql_op(conn, "fetch next trigger", first_trigger_query).scalar() logging.info("fetched trigger %s", repr(first_trigger)) return { "results": data, "count": count, "first_trigger": first_trigger, } def peek_question(conn, user_id): query = question_queue_query(user_id) row = sql_op(conn, "fetch question", query).fetchone() return dict(row) def fetch_event_type(conn, user_id, evt_id): columns = [qs2.model.event_types] query = sql.select(columns).where( (model.event_types.c.user_id_owner == user_id) & (model.event_types.c.evt_id == evt_id) ) return sql_op(conn, "fetch event type by ID", query).fetchone() def fetch_question(conn, user_id, question_id, *columns): if not columns: columns = [model.survey_questions] query = sql.select(columns).where( (model.survey_questions.c.user_id_owner == user_id) & (model.survey_questions.c.sq_id == question_id) ) row = sql_op(conn, "fetch question by ID", query).fetchone() if row: return dict(row) @qs2.logutil.profiled("fetch_survey_question_answers") @record_metrics_for_op("fetch_survey_question_answers") def fetch_survey_question_answers(conn, user_id, question_id, t0=None, t1=None): condition = ( (model.survey_answers.c.user_id_owner == user_id) & (model.survey_answers.c.sq_id == question_id) ) if t0 is not None: condition = condition & (model.survey_answers.c.timestamp >= t0) if t1 is not None: # Note, exclusive. This makes it easy to request distinct batches. condition = condition & (model.survey_answers.c.timestamp < t1) query = sql.select([model.survey_answers]).where( condition).order_by(model.survey_answers.c.timestamp.asc()) return sql_op(conn, "fetch question answers", query).fetchall() def fetch_csv_export(conn, user_id, querystring, out): terms = qs2.csvexport.parse_csv_query(querystring) rawstreams = [map(lambda row: (qs2.qsjson.json_datetime(row.timestamp), row.value), fetch_var(conn, user_id, term.var_type, term.var_id)) for term in terms] streams = [qs2.csvexport.DataStream(stream) for stream in rawstreams] tabulated = qs2.csvexport.interpolate_streams(streams) writer = csv.writer(out) for row in tabulated: writer.writerow(row) def fetch_var(conn, user_id, var_type, var_id): if var_type != "question": raise OperationFailed("unsupported var_type '{}'".format(var_type)) return fetch_question_answers(conn, user_id, var_id) def fetch_question_answers(conn, user_id, question_id): query = sql.select([ model.survey_answers.c.timestamp, model.survey_answers.c.value, ]).where( (model.survey_answers.c.user_id_owner == user_id) & (model.survey_answers.c.sq_id == question_id) ).order_by(model.survey_answers.c.timestamp.asc()) rows = sql_op(conn, "fetch question answers", query).fetchall() return rows def peek_question_interactive(conn, username, skip_auth=False): user_id = maybe_auth(conn, username, skip_auth) row = peek_question(conn, user_id) for key, value in row.items(): print key, "\t", value def symmetric_truncated_gauss(sigma, clip): assert clip > 0 while True: rv = random.gauss(0, sigma) if abs(rv) < clip: return rv def randomize_next_delay(mean_delay): k = 1 + symmetric_truncated_gauss(0.5, 1) result = datetime.timedelta(seconds=k * mean_delay.total_seconds()) logging.debug("randomizing %s with factor %lf: %s", mean_delay, k, result) return result def fetch_trigger(conn, trigger_id): query = sql.select([model.triggers]).\ where(model.triggers.c.trigger_id == trigger_id) return sql_op(conn, "fetch trigger", query).fetchone() def reset_trigger(conn, trigger_id): id_match = model.triggers.c.trigger_id == trigger_id row = fetch_trigger(conn, trigger_id) now = datetime.datetime.now() if row.min_delay: never_trigger_before = now + randomize_next_delay(row.min_delay) else: never_trigger_before = None next_trigger = now + randomize_next_delay(row.mean_delay) query = model.triggers.update().where(id_match).values( next_trigger = next_trigger, never_trigger_before = never_trigger_before, ) sql_op(conn, "update trigger for reset", query) def fetch_question_trigger_id(conn, user_id, question_id): rv = fetch_question(conn, user_id, question_id, model.survey_questions.c.trigger_id) return rv["trigger_id"] def skip_question(conn, user_id, question_id): now = datetime.datetime.now() with conn.begin() as trans: reset_trigger(conn, fetch_question_trigger_id(conn, user_id, question_id)) def fetch_all_question_keys(conn, user_id): return [("question", q["sq_id"]) for q in get_all_questions(conn, user_id)] def fetch_all_measurement_keys(conn, user_id): # TODO events return fetch_all_question_keys(conn, user_id) def log_request(conn, url, referer, user_agent, method, client_ip): now = datetime.datetime.now() query = model.requests.insert().values( timestamp=now, client_ip=client_ip, url=url, referer=referer, user_agent=user_agent, method=method, ) (req_id,) = sql_op(conn, "log request", query).inserted_primary_key return req_id def fetch_event_report_tail(conn, user_id, event_type): query = sql.select([qs2.model.event_record.c.end]).\ where( (qs2.model.event_record.c.evt_id == event_type.evt_id) & (qs2.model.event_record.c.user_id_owner == user_id) ).order_by(qs2.model.event_record.c.end.desc()).limit(1) results = sql_op(conn, "fetch tail of event record", query).fetchall() backdating = datetime.timedelta(days=1) # TODO: make configurable somehow if results: rv = hacky_force_timezone(results[0].end) else: rv = hacky_force_timezone(event_type.timestamp - backdating) return truncate_to_second_resolution(rv) def append_to_event_record(conn, event_type, start, end, state, req_id, comment=None): query = qs2.model.event_record.insert().values( req_id_creator=req_id, user_id_owner=event_type.user_id_owner, evt_id=event_type.evt_id, status=state, start=start, end=end, comment=comment, ) (evr_id,) = sql_op(conn, "append to event record", query).inserted_primary_key return evr_id def try_correct_event_report(conn, user_id, event_type, start, end, state, req_id): # Note: this does not support _splitting_ events for now, but this could # be implemented in the future. Until this is implemented, events (within one # event type) must be reported _in order_. Their absence can be reported out of # order. t = qs2.model.event_record query = sql.select([t.c.evr_id, t.c.status]).\ where( (t.c.user_id_owner == user_id) & (t.c.evt_id == event_type.evt_id) & (t.c.start == start) & (t.c.end == end) ) results = sql_op(conn, "fetch event record for change", query).fetchall() if len(results) == 0: raise OperationFailed("no matching event records found") if len(results) > 1: raise OperationFailed("too many matching event records found") status = results[0].status if status != "unreported": raise OperationFailed("status already reported (as: '{}')".format(status)) evr_id = results[0].evr_id query = t.update().where(t.c.evr_id == evr_id).values(status=state) sql_op(conn, "updating old event record", query) return { "corrected_event_record_id": evr_id } def post_event_report(conn, user_id, event_type, start, end, state, req_id, comment=None): # query to see: # - whether there is an interval between [last_end, start] # - whether start < last_end (reject) logging.info("posting %s for event #%d", state, event_type.evt_id) if end < start: raise ValidationFailed("event ends before it starts") if state not in ("on", "off", "unknown"): raise ValidationFailed("invalid state: {}".format(state)) tail = fetch_event_report_tail(conn, user_id, event_type) logging.info("tail for event #%d reported as: %s", event_type.evt_id, tail) logging.info("start was: %s", start) if start < tail: if comment is not None: raise ValidationFailed("did not expect comment on correction") logging.info("start was before tail, trying to correct") try: return try_correct_event_report(conn, user_id, event_type, start, end, state, req_id) except OperationFailed as e: logging.info("failed to correct, denying retroactive change") message = "retroactive change to event record, invalid update: " + e.message raise OperationFailed(message) rv = {} if tail < start: logging.info("tail was before start, appending extra") evr_id_gap = append_to_event_record(conn, event_type, start=tail, end=start, state="unreported", req_id=req_id) rv["missing_report"] = { "start": hacky_force_timezone(tail), "end": hacky_force_timezone(start), "event_report_id": evr_id_gap, } rv["event_report_id"] = append_to_event_record(conn, event_type, start=start, end=end, state=state, req_id=req_id, comment=comment) reset_trigger(conn, event_type.trigger_id) return rv def post_answer(conn, user_id, question_id, value, answer_latency=None, req_id_creator=None): now = datetime.datetime.now() qs2.validation.check("survey_value", value) with conn.begin() as trans: reset_trigger(conn, fetch_question_trigger_id(conn, user_id, question_id)) query = model.survey_answers.insert().values( timestamp=now, user_id_owner=user_id, sq_id=question_id, value=value, req_id_creator=req_id_creator, answer_latency=answer_latency, ) (answer_id,) = sql_op(conn, "create answer", query).inserted_primary_key return answer_id def post_answer_interactive(conn, username, question_id, value, skip_auth=False): qs2.validation.check("survey_value", value) user_id = maybe_auth(conn, username, skip_auth) question = fetch_question(conn, user_id, question_id) confirm("Answer {} to question '{}'? (0 = {}, 1 = {})".format( value, question["question"], question["low_label"], question["high_label"], )) post_answer(conn, user_id, question_id, value) def add_question_interactive(conn, username, skip_auth=False): user_id = maybe_auth(conn, username, skip_auth) data = cli_query_form( ("question", "question", True, str), ("low_label", "lower/left label", True, str), ("high_label", "upper/right label", True, str), ("middle_label", "middle label", False, str), ("delay_s", "mean delay (seconds)", False, int), ) for key, value in data.items(): print key, "\t", value confirm("Add this question?") add_question(conn, user_id=user_id, **data) def format_scale(q): rv = [] for key, value in zip(("low_label", "middle_label", "high_label"), (0, 50, 100)): if key in q: rv.append("{} ({}%)".format(q[key], value)) return " ... ".join(rv) def display_question(q): print "Question:", q["question"] print "Trigger time:", q["next_trigger"] print format_scale(q) def survey_interactive(conn, username, accept_stale=False): user_id = verify_user_interactive(conn, username) while True: now = datetime.datetime.now() q = peek_question(conn, user_id) stale = q["next_trigger"] > now if stale and not accept_stale: break display_question(q) value = qs2.validation.ask("survey_value", decimal.Decimal, ui.UI.raw_input, logging.error) now = datetime.datetime.now() post_answer(conn, user_id, q["sq_id"], value) def add_event_type(conn, user_id, name, use_duration, trigger_spec, req_id_creator): qs2.validation.check("question", name, secret=True) qs2.validation.check("bool", use_duration) qs2.validation.check("trigger_spec", trigger_spec) now = datetime.datetime.now() with conn.begin() as trans: trigger_id = create_trigger(conn, user_id, "event", trigger_spec) query = qs2.model.event_types.insert().values( timestamp=now, req_id_creator=req_id_creator, user_id_owner=user_id, name=name, use_duration=use_duration, trigger_id=trigger_id, ) (evt_id,) = sql_op(conn, "create event type", query).inserted_primary_key logging.info("created event #%d", evt_id) return evt_id
2.125
2
os_net_config/tests/test_cli.py
openstack/os-net-conf
0
12783981
# -*- coding: utf-8 -*- # Copyright 2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from io import StringIO import os.path import random import re import sys import yaml import os_net_config from os_net_config import cli from os_net_config import common from os_net_config import sriov_config from os_net_config.tests import base from os_net_config import utils REALPATH = os.path.dirname(os.path.realpath(__file__)) SAMPLE_BASE = os.path.join(REALPATH, '../../', 'etc', 'os-net-config', 'samples') class TestCli(base.TestCase): def setUp(self): super(TestCli, self).setUp() rand = str(int(random.random() * 100000)) sriov_config._SRIOV_CONFIG_FILE = '/tmp/sriov_config_' + rand + '.yaml' common._LOG_FILE = '/tmp/' + rand + 'os_net_config.log' sys.stdout = StringIO() sys.stderr = StringIO() def stub_is_ovs_installed(): return True self.stub_out('os_net_config.utils.is_ovs_installed', stub_is_ovs_installed) def tearDown(self): super(TestCli, self).tearDown() if os.path.isfile(common._LOG_FILE): os.remove(common._LOG_FILE) if os.path.isfile(sriov_config._SRIOV_CONFIG_FILE): os.remove(sriov_config._SRIOV_CONFIG_FILE) def run_cli(self, argstr, exitcodes=(0,)): for s in [sys.stdout, sys.stderr]: s.flush() s.truncate(0) s.seek(0) ret = cli.main(argstr.split()) self.assertIn(ret, exitcodes) sys.stdout.flush() sys.stderr.flush() stdout = sys.stdout.getvalue() stderr = sys.stderr.getvalue() return (stdout, stderr) def stub_get_stored_pci_address(self, ifname, noop): if 'eth0' in ifname: return "0000:00:07.0" if 'eth1' in ifname: return "0000:00:08.0" if 'eth2' in ifname: return "0000:00:09.0" if 'em3' in ifname: return "0000:00:03.0" if 'em1' in ifname: return "0000:00:01.0" def test_bond_noop_output(self): bond_yaml = os.path.join(SAMPLE_BASE, 'bond.yaml') bond_json = os.path.join(SAMPLE_BASE, 'bond.json') stdout_yaml, stderr = self.run_cli('ARG0 --provider=ifcfg --noop ' '--exit-on-validation-errors ' '-c %s' % bond_yaml) self.assertEqual('', stderr) stdout_json, stderr = self.run_cli('ARG0 --provider=ifcfg --noop ' '--exit-on-validation-errors ' '-c %s' % bond_json) self.assertEqual('', stderr) sanity_devices = ['DEVICE=br-ctlplane', 'DEVICE=em2', 'DEVICE=em1', 'DEVICE=bond1', 'DEVICETYPE=ovs'] for dev in sanity_devices: self.assertIn(dev, stdout_yaml) self.assertEqual(stdout_yaml, stdout_json) def test_ivs_noop_output(self): ivs_yaml = os.path.join(SAMPLE_BASE, 'ivs.yaml') ivs_json = os.path.join(SAMPLE_BASE, 'ivs.json') stdout_yaml, stderr = self.run_cli('ARG0 --provider=ifcfg --noop ' '--exit-on-validation-errors ' '-c %s' % ivs_yaml) self.assertEqual('', stderr) stdout_json, stderr = self.run_cli('ARG0 --provider=ifcfg --noop ' '--exit-on-validation-errors ' '-c %s' % ivs_json) self.assertEqual('', stderr) sanity_devices = ['DEVICE=nic2', 'DEVICE=nic3', 'DEVICE=api201', 'DEVICE=storage202', 'DEVICETYPE=ivs'] for dev in sanity_devices: self.assertIn(dev, stdout_yaml) self.assertEqual(stdout_yaml, stdout_json) def test_bridge_noop_output(self): bridge_yaml = os.path.join(SAMPLE_BASE, 'bridge_dhcp.yaml') bridge_json = os.path.join(SAMPLE_BASE, 'bridge_dhcp.json') stdout_yaml, stderr = self.run_cli('ARG0 --provider=eni --noop ' '--exit-on-validation-errors ' '-c %s' % bridge_yaml) self.assertEqual('', stderr) stdout_json, stderr = self.run_cli('ARG0 --provider=eni --noop ' '--exit-on-validation-errors ' '-c %s' % bridge_json) self.assertEqual('', stderr) sanity_devices = ['iface br-ctlplane inet dhcp', 'iface em1', 'ovs_type OVSBridge'] for dev in sanity_devices: self.assertIn(dev, stdout_yaml) self.assertEqual(stdout_yaml, stdout_json) def test_vlan_noop_output(self): vlan_yaml = os.path.join(SAMPLE_BASE, 'bridge_vlan.yaml') vlan_json = os.path.join(SAMPLE_BASE, 'bridge_vlan.json') stdout_yaml, stderr = self.run_cli('ARG0 --provider=ifcfg --noop ' '--exit-on-validation-errors ' '-c %s' % vlan_yaml) self.assertEqual('', stderr) stdout_json, stderr = self.run_cli('ARG0 --provider=ifcfg --noop ' '--exit-on-validation-errors ' '-c %s' % vlan_json) self.assertEqual('', stderr) sanity_devices = ['DEVICE=br-ctlplane', 'DEVICE=em1', 'DEVICE=vlan16', 'DEVICETYPE=ovs'] for dev in sanity_devices: self.assertIn(dev, stdout_yaml) self.assertEqual(stdout_yaml, stdout_json) def test_interface_noop_output(self): interface_yaml = os.path.join(SAMPLE_BASE, 'interface.yaml') interface_json = os.path.join(SAMPLE_BASE, 'interface.json') stdout_yaml, stderr = self.run_cli('ARG0 --provider=ifcfg --noop ' '--exit-on-validation-errors ' '-c %s' % interface_yaml) self.assertEqual('', stderr) stdout_json, stderr = self.run_cli('ARG0 --provider=ifcfg --noop ' '--exit-on-validation-errors ' '-c %s' % interface_json) self.assertEqual('', stderr) sanity_devices = ['DEVICE=em1', 'BOOTPROTO=static', 'IPADDR=192.0.2.1'] for dev in sanity_devices: self.assertIn(dev, stdout_yaml) self.assertEqual(stdout_yaml, stdout_json) def test_bridge_noop_rootfs(self): for provider in ('ifcfg', 'eni'): bond_yaml = os.path.join(SAMPLE_BASE, 'bridge_dhcp.yaml') stdout_yaml, stderr = self.run_cli('ARG0 --provider=%s --noop ' '--exit-on-validation-errors ' '--root-dir=/rootfs ' '-c %s' % (provider, bond_yaml)) self.assertEqual('', stderr) self.assertIn('File: /rootfs/', stdout_yaml) def test_interface_noop_detailed_exit_codes(self): interface_yaml = os.path.join(SAMPLE_BASE, 'interface.yaml') stdout_yaml, stderr = self.run_cli('ARG0 --provider=ifcfg --noop ' '--exit-on-validation-errors ' '-c %s --detailed-exit-codes' % interface_yaml, exitcodes=(2,)) def test_interface_noop_detailed_exit_codes_no_changes(self): interface_yaml = os.path.join(SAMPLE_BASE, 'interface.yaml') class TestImpl(os_net_config.NetConfig): def add_interface(self, interface): pass def apply(self, cleanup=False, activate=True): # this fake implementation returns no changes return {} self.stub_out('os_net_config.impl_ifcfg.IfcfgNetConfig', TestImpl) stdout_yaml, stderr = self.run_cli('ARG0 --provider=ifcfg --noop ' '--exit-on-validation-errors ' '-c %s --detailed-exit-codes' % interface_yaml, exitcodes=(0,)) def test_sriov_noop_output(self): def test_get_vf_devname(device, vfid): return device + '_' + str(vfid) def test_get_pci_address(ifname, noop): return '0000:79:10.2' def test_interface_mac(name): return 'AA:BB:CC:DD:EE:FF' self.stub_out('os_net_config.utils.get_vf_devname', test_get_vf_devname) self.stub_out('os_net_config.utils.get_pci_address', test_get_pci_address) self.stub_out('os_net_config.utils.interface_mac', test_interface_mac) ivs_yaml = os.path.join(SAMPLE_BASE, 'sriov_pf.yaml') ivs_json = os.path.join(SAMPLE_BASE, 'sriov_pf.json') stdout_yaml, stderr = self.run_cli('ARG0 --provider=ifcfg --noop ' '--exit-on-validation-errors ' '-c %s' % ivs_yaml) self.assertEqual('', stderr) contents = utils.get_file_data(sriov_config._SRIOV_CONFIG_FILE) sriov_config_yaml = yaml.safe_load(contents) os.remove(sriov_config._SRIOV_CONFIG_FILE) stdout_json, stderr = self.run_cli('ARG0 --provider=ifcfg --noop ' '--exit-on-validation-errors ' '-c %s' % ivs_json) self.assertEqual('', stderr) contents = utils.get_file_data(sriov_config._SRIOV_CONFIG_FILE) sriov_config_json = yaml.safe_load(contents) sanity_devices = ['DEVICE=p2p1', 'DEVICE=p2p1_5', 'DEVICE=p2p1_1', 'DEVICE=br-vfs', 'DEVICE=br-bond', 'TYPE=OVSBridge'] for dev in sanity_devices: self.assertIn(dev, stdout_yaml) self.assertEqual(stdout_yaml, stdout_json) self.assertCountEqual(sriov_config_yaml, sriov_config_json) def test_sriov_vf_with_dpdk_noop_output(self): def test_get_vf_devname(device, vfid): return device + '_' + str(vfid) def test_get_pci_address(ifname, noop): return '0000:79:10.2' self.stub_out('os_net_config.utils.get_vf_devname', test_get_vf_devname) self.stub_out('os_net_config.utils.get_pci_address', test_get_pci_address) ivs_yaml = os.path.join(SAMPLE_BASE, 'sriov_pf_ovs_dpdk.yaml') ivs_json = os.path.join(SAMPLE_BASE, 'sriov_pf_ovs_dpdk.json') stdout_yaml, stderr = self.run_cli('ARG0 --provider=ifcfg --noop ' '--exit-on-validation-errors ' '-c %s' % ivs_yaml) self.assertEqual('', stderr) contents = utils.get_file_data(sriov_config._SRIOV_CONFIG_FILE) sriov_config_yaml = yaml.safe_load(contents) os.remove(sriov_config._SRIOV_CONFIG_FILE) stdout_json, stderr = self.run_cli('ARG0 --provider=ifcfg --noop ' '--exit-on-validation-errors ' '-c %s' % ivs_json) self.assertEqual('', stderr) contents = utils.get_file_data(sriov_config._SRIOV_CONFIG_FILE) sriov_config_json = yaml.safe_load(contents) sanity_devices = ['DEVICE=p2p1', 'DEVICE=p2p1_5', 'DEVICE=br-vfs', 'TYPE=OVSUserBridge', 'DEVICE=dpdk0', 'TYPE=OVSDPDKPort'] for dev in sanity_devices: self.assertIn(dev, stdout_yaml) self.assertEqual(stdout_yaml, stdout_json) self.assertCountEqual(sriov_config_yaml, sriov_config_json) def test_ovs_dpdk_bond_noop_output(self): ivs_yaml = os.path.join(SAMPLE_BASE, 'ovs_dpdk_bond.yaml') ivs_json = os.path.join(SAMPLE_BASE, 'ovs_dpdk_bond.json') stdout_yaml, stderr = self.run_cli('ARG0 --provider=ifcfg --noop ' '--exit-on-validation-errors ' '-c %s' % ivs_yaml) self.assertEqual('', stderr) stdout_json, stderr = self.run_cli('ARG0 --provider=ifcfg --noop ' '--exit-on-validation-errors ' '-c %s' % ivs_json) self.assertEqual('', stderr) sanity_devices = ['DEVICE=br-link', 'TYPE=OVSUserBridge', 'DEVICE=dpdkbond0', 'TYPE=OVSDPDKBond'] for dev in sanity_devices: self.assertIn(dev, stdout_yaml) self.assertEqual(stdout_yaml, stdout_json) def test_nfvswitch_noop_output(self): nfvswitch_yaml = os.path.join(SAMPLE_BASE, 'nfvswitch.yaml') nfvswitch_json = os.path.join(SAMPLE_BASE, 'nfvswitch.json') stdout_yaml, stderr = self.run_cli('ARG0 --provider=ifcfg --noop ' '--exit-on-validation-errors ' '-c %s' % nfvswitch_yaml) self.assertEqual('', stderr) stdout_json, stderr = self.run_cli('ARG0 --provider=ifcfg --noop ' '--exit-on-validation-errors ' '-c %s' % nfvswitch_json) self.assertEqual('', stderr) sanity_devices = ['DEVICE=nic2', 'DEVICE=nic3', 'DEVICE=api201', 'DEVICE=storage202', 'DEVICETYPE=nfvswitch'] for dev in sanity_devices: self.assertIn(dev, stdout_yaml) self.assertEqual(stdout_yaml, stdout_json) def test_ovs_dpdk_noop_output(self): ivs_yaml = os.path.join(SAMPLE_BASE, 'ovs_dpdk.yaml') ivs_json = os.path.join(SAMPLE_BASE, 'ovs_dpdk.json') stdout_yaml, stderr = self.run_cli('ARG0 --provider=ifcfg --noop ' '--exit-on-validation-errors ' '-c %s' % ivs_yaml) self.assertEqual('', stderr) stdout_json, stderr = self.run_cli('ARG0 --provider=ifcfg --noop ' '--exit-on-validation-errors ' '-c %s' % ivs_json) self.assertEqual('', stderr) sanity_devices = ['DEVICE=br-link', 'TYPE=OVSUserBridge', 'DEVICE=dpdk0', 'TYPE=OVSDPDKPort'] for dev in sanity_devices: self.assertIn(dev, stdout_yaml) self.assertEqual(stdout_yaml, stdout_json) def test_nic_mapping_report_output(self): mapping_report = os.path.join(SAMPLE_BASE, 'mapping_report.yaml') def dummy_mapped_nics(nic_mapping=None): return nic_mapping self.stub_out('os_net_config.objects.mapped_nics', dummy_mapped_nics) stdout, stderr = self.run_cli('ARG0 --interfaces ' '--exit-on-validation-errors ' '-m %s' % mapping_report) self.assertEqual('', stderr) stdout_list = yaml.safe_load(stdout) self.assertEqual(stdout_list['nic1'], 'em1') self.assertEqual(stdout_list['nic2'], 'em2') self.assertEqual(stdout_list['nic3'], 'em4') self.assertEqual(stdout_list['nic4'], 'em3') def test_nic_mapping_report_with_explicit_interface_name(self): mapping_report = os.path.join(SAMPLE_BASE, 'mapping_report.yaml') def dummy_mapped_nics(nic_mapping=None): return nic_mapping self.stub_out('os_net_config.objects.mapped_nics', dummy_mapped_nics) stdout, stderr = self.run_cli('ARG0 --interfaces em2 em3 ' '--exit-on-validation-errors ' '-m %s' % mapping_report) self.assertEqual('', stderr) stdout_list = yaml.safe_load(stdout) self.assertNotIn('em1', stdout_list.keys()) self.assertNotIn('em1', stdout_list.values()) self.assertEqual(stdout_list['em2'], 'em2') self.assertEqual(stdout_list['em3'], 'em3') self.assertNotIn('em4', stdout_list.keys()) self.assertNotIn('em4', stdout_list.values()) def test_contrail_vrouter_noop_output(self): cvi_yaml = os.path.join(SAMPLE_BASE, 'contrail_vrouter.yaml') cvi_json = os.path.join(SAMPLE_BASE, 'contrail_vrouter.json') stdout_yaml, stderr = self.run_cli('ARG0 --provider=ifcfg --noop ' '--exit-on-validation-errors ' '-c %s' % cvi_yaml) self.assertEqual('', stderr) stdout_json, stderr = self.run_cli('ARG0 --provider=ifcfg --noop ' '--exit-on-validation-errors ' '-c %s' % cvi_json) self.assertEqual('', stderr) sanity_devices = ['DEVICE=vhost0', 'BIND_INT=em3', 'DEVICETYPE=vhost', 'TYPE=kernel_mode'] for dev in sanity_devices: self.assertIn(dev, stdout_yaml) self.assertEqual(stdout_yaml, stdout_json) def test_contrail_vrouter_vlan_noop_output(self): cvi_yaml = os.path.join(SAMPLE_BASE, 'contrail_vrouter_vlan.yaml') cvi_json = os.path.join(SAMPLE_BASE, 'contrail_vrouter_vlan.json') stdout_yaml, stderr = self.run_cli('ARG0 --provider=ifcfg --noop ' '--exit-on-validation-errors ' '-c %s' % cvi_yaml) self.assertEqual('', stderr) stdout_json, stderr = self.run_cli('ARG0 --provider=ifcfg --noop ' '--exit-on-validation-errors ' '-c %s' % cvi_json) self.assertEqual('', stderr) sanity_devices = ['DEVICE=vhost0', 'BIND_INT=vlan100', 'DEVICETYPE=vhost', 'TYPE=kernel_mode'] for dev in sanity_devices: self.assertIn(dev, stdout_yaml) self.assertEqual(stdout_yaml, stdout_json) def test_contrail_vrouter_dpdk_noop_output(self): timestamp_rex = re.compile( (r'contrail_vrouter_dpdk\.(yaml|json)|^[\d]{4}-[\d]{2}-[\d]{2} ' r'[\d]{2}:[\d]{2}:[\d]{2}\.[\d]{3} '), flags=re.M ) cvi_yaml = os.path.join(SAMPLE_BASE, 'contrail_vrouter_dpdk.yaml') cvi_json = os.path.join(SAMPLE_BASE, 'contrail_vrouter_dpdk.json') self.stub_out('os_net_config.utils.get_stored_pci_address', self.stub_get_stored_pci_address) stdout_yaml, stderr = self.run_cli('ARG0 --provider=ifcfg --noop ' '--exit-on-validation-errors ' '--debug ' '-c %s' % cvi_yaml) self.assertEqual('', stderr) stdout_json, stderr = self.run_cli('ARG0 --provider=ifcfg --noop ' '--exit-on-validation-errors ' '--debug ' '-c %s' % cvi_json) self.assertEqual('', stderr) sanity_devices = ['DEVICE=vhost0', 'BIND_INT=0000:00:03.0', 'DEVICETYPE=vhost', 'TYPE=dpdk'] for dev in sanity_devices: self.assertIn(dev, stdout_yaml) stdout_yaml = timestamp_rex.sub('', stdout_yaml) stdout_json = timestamp_rex.sub('', stdout_json) self.assertEqual(stdout_yaml, stdout_json)
1.84375
2
pdc_app/pdc_core_app/urls.py
PLsergent/pdc_itlink
1
12783982
from django.urls import path, include from . import views urlpatterns = [ path('', views.index, name='index'), path('protectedErr/', views.protected_error, name='protected_error'), path('accounts/', include('django.contrib.auth.urls')), path('auth/', include('social_django.urls', namespace='social')), path('user/update/<int:id>', views.UpdateUser.as_view(), name='UpdateUser'), path('projets/', views.projets, name='projets'), path('projets/add/', views.AjoutProjet.as_view(), name='AjoutProjet'), path('projets/update/<int:idProjet>', views.UpdateProjet.as_view(), name='UpdateProjet'), path('projets/delete/<int:idProjet>', views.DeleteProjet.as_view(), name='DeleteProjet'), path('projets/tache_probable_add/', views.NouvelleTacheProbable.as_view(), name='NouvelleTacheProbable'), path('projets/tache_probable_update/<int:idCom>', views.UpdateTacheProbable.as_view(), name='UpdateTacheProbable'), path('projets/tache_probable_delete/<int:idCom>', views.DeleteTacheProbable.as_view(), name='DeleteTacheProbable'), path('clients/add/', views.AjoutClient.as_view(), name='AjoutClient'), path('clients/update/<int:idClient>', views.UpdateClient.as_view(), name='UpdateClient'), path('clients/delete/<int:idClient>', views.DeleteClient.as_view(), name='DeleteClient'), path('collaborateurs/', views.collaborateurs, name='collaborateurs'), path('collaborateurs/add/', views.AjoutCollab.as_view(), name='AjoutCollab'), path('collaborateurs/update/<str:pk>', views.UpdateCollab.as_view(), name='UpdateCollab'), path('collaborateurs/delete/<str:pk>', views.DeleteCollab.as_view(), name='DeleteCollab'), path('collaborateurs/assign/', views.AffectationProjetDateSet.as_view(), name='AffectationProjetDateSet'), path('collaborateurs/assign/update/<int:idRP>', views.UpdateAffectationProjetDateSet.as_view(), name='UpdateAffectationProjetDateSet'), path('collaborateurs/assign/delete/<int:idRP>', views.DeleteAffectation.as_view(), name='DeleteAffectation'), path('commandes/', views.commandes, name='commandes'), path('commandes/add/', views.PasserCommande.as_view(), name='PasserCommande'), path('commandes/update/<int:idCom>', views.UpdateCommande.as_view(), name='UpdateCommande'), path('commandes/fromtask/<int:idCom>', views.PassCommandFromTask.as_view(), name='PassCommandFromTask'), path('commandes/delete/<int:idCom>', views.DeleteCommande.as_view(), name='DeleteCommande'), path('autres/', views.autres, name='autres'), path('autres/assign', views.AffectationAutres.as_view(), name='AffectationAutres'), path('autres/assign/update/<int:idRA>', views.UpdateAffectationAutres.as_view(), name='UpdateAffectationAutres'), path('autres/assign/delete/<int:idRA>', views.DeleteAffectationAutres.as_view(), name='DeleteAffectationAutres'), path('data/', views.data, name='data'), path('history/', views.history, name='history'), path('history/revert_projet/<str:model>/<int:id>', views.revert_projet, name='revert_projet'), path('history/revert_command/<str:model>/<int:id>', views.revert_command, name='revert_command'), path('history/revert_collab/<str:model>/<str:id>', views.revert_collab, name='revert_collab'), path('history/revert_autres/<str:model>/<int:id>', views.revert_autres, name='revert_autres'), path('history/revert_data/<str:model>/<int:id>', views.revert_data, name='revert_data'), path('history/revert_data_bis/<str:model>/<int:id>', views.revert_data_bis, name='revert_data_bis'), path('history/delete/', views.clean_history, name="clean_history"), path('charge_update/<int:id>', views.assigned_charges_update, name="assigned_charges_update") ]
1.90625
2
chapter5_operations/prediction_monitoring_pattern/src/configurations.py
sudabon/ml-system-in-actions
133
12783983
import os from logging import getLogger from src.constants import CONSTANTS, PLATFORM_ENUM logger = getLogger(__name__) class PlatformConfigurations: platform = os.getenv("PLATFORM", PLATFORM_ENUM.DOCKER.value) if not PLATFORM_ENUM.has_value(platform): raise ValueError(f"PLATFORM must be one of {[v.value for v in PLATFORM_ENUM.__members__.values()]}") class DBConfigurations: mysql_username = os.getenv("MYSQL_USER") mysql_password = os.getenv("MYSQL_PASSWORD") mysql_port = int(os.getenv("MYSQL_PORT", 3306)) mysql_database = os.getenv("MYSQL_DATABASE", "sample_db") mysql_server = os.getenv("MYSQL_SERVER") sql_alchemy_database_url = ( f"mysql://{mysql_username}:{mysql_password}@{mysql_server}:{mysql_port}/{mysql_database}?charset=utf8" ) class APIConfigurations: title = os.getenv("API_TITLE", "ServingPattern") description = os.getenv("API_DESCRIPTION", "machine learning system serving patterns") version = os.getenv("API_VERSION", "0.1") class ModelConfigurations: model_filepath = os.getenv("MODEL_FILEPATH") label_filepath = os.getenv("LABEL_FILEPATH") outlier_model_filepath = os.getenv("OUTLIER_MODEL_FILEPATH") outlier_lower_threshold = float(os.getenv("OUTLIER_LOWER_THRESHOLD", 0.0)) logger.info(f"{PlatformConfigurations.__name__}: {PlatformConfigurations.__dict__}") logger.info(f"{APIConfigurations.__name__}: {APIConfigurations.__dict__}") logger.info(f"{ModelConfigurations.__name__}: {ModelConfigurations.__dict__}")
2.46875
2
src/the_impossible/live/migrations/newsletter/migrations/0003_remove_subscriber_last_sent.py
micha31r/The-Impossible
0
12783984
# Generated by Django 2.2.7 on 2020-07-15 07:26 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('newsletter', '0002_auto_20200514_1518'), ] operations = [ migrations.RemoveField( model_name='subscriber', name='last_sent', ), ]
1.328125
1
reo/migrations/0091_merge_20201228_2115.py
akuam1/REopt_Lite_API
41
12783985
# Generated by Django 2.2.13 on 2020-12-28 21:15 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('reo', '0090_absorptionchillermodel_chiller_cop'), ('reo', '0087_merge_20201228_2004'), ] operations = [ ]
1.273438
1
code/k_fold.py
federico-code/PedMS-Classification
0
12783986
import numpy as np from sklearn.model_selection import StratifiedKFold from sklearn.model_selection import LeaveOneOut import pandas as pd import os import sys from feature_selection import read_data def leaveOneOut(df): # target and data selection y=df.iloc[:,-1] X=df.iloc[:,:-1] y=y.to_numpy() loo = LeaveOneOut() loo.get_n_splits(X,y) split={ "train":[], "test":[] } for train_index, test_index in loo.split(X, y): print("loo sizes", len(train_index), len(test_index)) split["train"].append(train_index) split["test"].append(test_index) print("leave one out ... created ", len(split["train"])) return split def stratifiedKfold(df, k=5): # target and data selection y=df.iloc[:,-1] X=df.iloc[:,:-1] y=y.to_numpy() skf =StratifiedKFold(k) skf.get_n_splits(X,y) split={ "train":[], "test":[] } for train_index, test_index in skf.split(X, y): split["train"].append(train_index) split["test"].append(test_index) print("stratified k fold ... k = ", k) return split def createFolder(df, split_dict, path="./kFold"): access_rights = 0o777 try: os.mkdir(path, access_rights) except OSError: print ("Creation of the directory %s failed" % path) else: print ("Successfully created the directory %s" % path) train = split_dict["train"] test = split_dict["test"] for i in range(len(train)): df.iloc[train[i]].to_csv(path+"/train_"+str(i)+".txt",sep="\t") df.iloc[test[i]].to_csv(path+"/test_"+str(i)+".txt",sep="\t") def main(): data = read_data(sys.argv[1]) if sys.argv[2] == "stratified": k=int(sys.argv[3]) split=stratifiedKfold(data,k) if sys.argv[4]: createFolder(data, split, path=sys.argv[4]) else: createFolder(data, split) elif sys.argv[2] == "loo": split=leaveOneOut(data) createFolder(data, split, path="./loo") # python k_fold.py .\data\new_data.txt [stratified | loo] {k} if __name__== "__main__": main()
2.671875
3
ribbon/handlers.py
silverfix/django-ribbon
1
12783987
<filename>ribbon/handlers.py # -*- coding: utf-8 - from __future__ import unicode_literals, division, absolute_import import json import stripe from django.conf import settings from . import models stripe.api_key = settings.STRIPE_SECRET_KEY def _json_dump_response(response): return json.dumps(dict(response), skipkeys=True, indent=4) def new_customer__new_card(user, token): response_customer = stripe.Customer.create( source=token, description="serverable User ID: {}".format(user.pk), email=user.email ) stripe_customer = models.StripeCustomer.objects.create( user=user, stripe_id=response_customer.stripe_id, stripe_response=_json_dump_response(response_customer) ) response_card = response_customer.sources.data[0] stripe_card = models.StripeCard.objects.create( stripe_id=response_card.id, stripe_customer=stripe_customer, stripe_response=_json_dump_response(response_card), last4=response_card.last4, exp_year=response_card.exp_year, exp_month=response_card.exp_month, brand=response_card.brand, country=response_card.country, funding=response_card.funding ) return stripe_card def old_customer__new_card(customer, token): response_customer = stripe.Customer.retrieve(customer.stripe_id) response_card = response_customer.sources.create(source=token) stripe_card = models.StripeCard.objects.create( stripe_id=response_card.id, stripe_customer=customer, stripe_response=_json_dump_response(response_card), last4=response_card.last4, exp_year=response_card.exp_year, exp_month=response_card.exp_month, brand=response_card.brand, country=response_card.country, funding=response_card.funding ) return stripe_card def charge(card, amount): response_charge = stripe.Charge.create( amount=amount, currency="usd", source=card.stripe_id, customer=card.stripe_customer ) stripe_charge = models.StripeCharge.objects.create( stripe_id=response_charge.stripe_id, stripe_customer=card.stripe_customer, stripe_card=card, stripe_response=_json_dump_response(response_charge), amount=response_charge.amount, currency=response_charge.currency, ) return stripe_charge
1.960938
2
tests/test_parse.py
assasinitachi123/extra-boolean
0
12783988
<reponame>assasinitachi123/extra-boolean from extra_boolean import parse def test_parse(): assert parse("1") == True assert parse("truthy") == True assert parse("Not Off") == True assert parse("Not Inactive") == True assert parse("cold") == False assert parse("inactive") == False assert parse("Negative Yes") == False assert parse("Negative Aye") == False
2.875
3
interprocedural_analyses/taint/test/integration/static_methods.py
skylerberg/pyre-check
1
12783989
# flake8: noqa class StaticClass: @staticmethod def sink(oops): __test_sink(oops) def test(source): return StaticClass.sink(source) def run_test(source): test(__test_source())
1.96875
2
net/tune_torch.py
marbleton/FPGA_MNIST
7
12783990
<reponame>marbleton/FPGA_MNIST from __future__ import print_function, division import numpy as np import torch import torch.quantization import torch.nn as nn import torch.optim from util import read_np_torch, evaluate_network from train_torch import evaluate, prepare_datasets, LEARNING_RATE, evaluate_labels, load_torch, \ save_torch_model_weights from train_torch import LinearRelu, Flatten from debug import LayerActivations # Import the own made network from EggNet import NeuralNetwork import EggNet.NeuralNetwork.Ext.NeuralNetworkExtension as nnext class FixedConvLayer(torch.nn.Module): def __init__(self, kernel, bias): super(FixedConvLayer, self).__init__() self.kernel = kernel self.bias = bias # self.requires_grad(False) def forward(self, x): with torch.no_grad(): x_ = np.moveaxis(x.numpy(), 1, 3) y_np = nnext.conv2d(x_, self.kernel, stride=1) + self.bias y_ = np.moveaxis(y_np, 3, 1) return torch.from_numpy(y_) # return nnext.conv2d_3x3(x, self.kernel) + self.bias def main(): net = load_torch(filepath='torch/LeNet.pth') weights = read_np_torch(ordering='BHWC', target_dtype=np.float32) save_torch_model_weights(net) # qnet = load_torch(filepath='torch/QLeNet.pth') testloader, trainloader = prepare_datasets() criterion = nn.CrossEntropyLoss() optimizer = torch.optim.Adam(net.parameters(), lr=LEARNING_RATE) net.eval() (top1, top5) = evaluate(net, criterion, data_loader=testloader) print("top1 performance: ", top1) # Read the weights in keras convention weights = read_np_torch(ordering='BHWC', target_dtype=np.float32) weights_torch = read_np_torch(ordering='BCHW', target_dtype=np.float32) # Training in torch happens with np.float32 #qweights = perform_fake_quant(weight_dict=weights, target_bits=5, frac_bits=3, target_dtype=np.float32) #qweights_torch = perform_fake_quant(weight_dict=weights_torch, target_bits=8, frac_bits=4, target_dtype=np.float32) # Compare with our net mnist = NeuralNetwork.Reader.MNIST(folder_path='/tmp/mnist/') test_images = mnist.test_images() test_labels = mnist.test_labels() # our_net = init_network_from_weights(qweights, from_torch=True) batch_size = 50 accuracy = evaluate_network(batch_size, our_net, test_images, test_labels) print("Accuracy: ", accuracy) net.eval() # Reshape images #img_bach_torch = np.reshape(img_batch, newshape=(-1, 1, 28, 28)).astype(np.float32) #_imshow(img_bach_torch, mode='torch') #lbl_torch = net(torch.from_numpy(img_bach_torch)) lbl_torch = lbl_torch.topk(1)[1].numpy().flatten() # ToDo: Maybe simplify this expression a bit # To check the output of the fully connected layers against our net LayerActivations(net, layer_num=0, validate_func=LayerActivations) LayerActivations(net, layer_num=4, validate_func=None) LayerActivations(net, layer_num=7, validate_func=None) LayerActivations(net, layer_num=9, validate_func=None) lbls = evaluate_labels(net, criterion, data_loader=testloader) (top1, top5) = evaluate(net, criterion, data_loader=testloader) fixed_conv_model = nn.Sequential( FixedConvLayer(kernel=weights['cn1.k'], bias=weights['cn1.b']), nn.MaxPool2d(kernel_size=2, stride=2), FixedConvLayer(kernel=weights['cn2.k'], bias=weights['cn2.b']), nn.MaxPool2d(kernel_size=2, stride=2), Flatten(), # LinearRelu(in_features=qweights['fc1.w'].shape[1], out_features=qweights['fc1.w'].shape[0]), LinearRelu.init_with_weights(w=weights_torch['fc1.w'], b=weights_torch['fc1.b']), nn.Dropout(p=0.25), # LinearRelu(in_features=qweights['fc2.w'].shape[1], out_features=10), LinearRelu.init_with_weights(w=weights_torch['fc2.w'], b=weights_torch['fc2.b']), nn.Softmax(dim=1) ) #train_network(fixed_conv_model, 1, criterion=criterion, optimizer=optimizer, trainloader=trainloader) #fixed_conv_model.eval() #(top1, top5) = evaluate(fixed_conv_model, criterion, data_loader=testloader) #print(top1) #(top1, top5) = evaluate(net, criterion, testloader) if __name__ == '__main__': main()
2.421875
2
src/202_simple_classification.py
l-chenyao/TensorFlow-tutorials-by-Yao
0
12783991
""" Dependencies: tensorflow: 1.2.0 matplotlib numpy """ import tensorflow as tf import matplotlib.pyplot as plt import numpy as np tf.set_random_seed(1) np.random.seed(1) #fake data n_data = np.ones((100,2)) x0 = np.random.normal(2*n_data, 1) #class0 x shape = (100, 2)) y0 = np.zeros(100) #class0 y shape = (100, 1)) x1 = np.random.normal(-2*n_data, 1) # class1 x shape=(100, 2) y1 = np.ones(100) # class1 y shape=(100, 1) x = np.vstack((x0, x1)) #shape (200, 2)) + some noise y = np.hstack((y0,y1)) #shape (200, ) #plot data plt.scatter(x[:, 0], x[:, 1], c=y, s=100, lw=0, cmap='RdYlGn') plt.show() tf_x = tf.placeholder(tf.float32, x.shape) #input x tf_y = tf.placeholder(tf.int32, y.shape) #neural network layers l1 = tf.layers.dense(tf_x, 10, tf.nn.relu) output = tf.layers.dense(l1, 2) loss = tf.losses.sparse_softmax_cross_entropy(labels = tf_y, logits =output) #compute cost accuracy = tf.metrics.accuracy(labels = tf.squeeze(tf_y), predictions = tf.argmax(output, axis=1), )[1] optimizer = tf.train.GradientDescentOptimizer(learning_rate= 0.05) train_op = optimizer.minimize(loss) sess = tf.Session() init_op = tf.group(tf.global_variables_initializer(), tf.local_variables_initializer()) sess.run(init_op) plt.ion() for step in range(100): #train and net output _, acc, pred = sess.run([train_op, accuracy, output], {tf_x: x, tf_y:y }) if step %2 == 0: #plot and show learning process plt.cla() plt.scatter(x[:, 0], x[:, 1], c = pred.argmax(1), s = 100, lw = 0, cmap = 'RdYlGn') plt.text(1.5, -4, 'Accuracy = %.2f'% acc, fontdict = {'size':20, 'color':'red'}) plt.pause(0.1) plt.ioff() plt.show()
2.84375
3
main_job.py
vsantiago113/Cisco-FMC-Selenium-Boilerplate
0
12783992
from selenium import webdriver from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions from selenium.common.exceptions import NoSuchElementException, TimeoutException, ElementNotVisibleException, \ ElementNotInteractableException import time import os import sys ''' This code was made to work on FMC version 6.2.3.13 and 6.3.0 and it has not been tested on any other version. This code might not work on older or newer versions. ''' # Add to this list supported FMC versions that your code support or ones you tested. supported_versions = ['6.2.3.13', '6.3.0'] def fmc_login(manager: str, username: str, password: str, version: str) -> webdriver: ''' This function navigates to the login page, check if the page is asking to confirm the unsecure ssl and click on it if is there, then login, click on the button to login if there is a session already logged in an and make the browser full screen. :param manager: The Address of the Manager/FMC. :param username: The Manager/FMC login username. :param password: The <PASSWORD>/<PASSWORD> login password. :param version: The version of FMC you are using. :return: webdriver ''' if version not in supported_versions: print('This version is not supported or this code have not been tested in this version.') sys.exit(1) # This capabilities are used to disabled all notifications and prompts when selenium starts. capabilities = { 'browserName': 'chrome', 'chromeOptions': { 'useAutomationExtension': False, 'forceDevToolsScreenshot': True, 'args': ['--start-maximized', '--disable-infobars', '--disable-extensions'] } } # Make sure you download the correct driver according to your web browser and browser version. driver = webdriver.Chrome('bins/chromedriver', desired_capabilities=capabilities) driver.implicitly_wait(1) # This like waits one second before executing anything on the DOM. if manager.startswith('http'): driver.get('{}/login.cgi?'.format(manager)) else: driver.get('https://{}/login.cgi?'.format(manager)) time.sleep(3) # Used to acknowledge the unsecure ssl certificate if it prompts for it. try: advanced_button = driver.find_element(By.XPATH, '/html/body/div/div[2]/button[3]') except (NoSuchElementException, ElementNotVisibleException, ElementNotInteractableException): pass else: advanced_button.click() time.sleep(2) unsafe = driver.find_element(By.XPATH, '/html/body/div/div[3]/p[2]/a') unsafe.click() WebDriverWait(driver, 120).until( expected_conditions.presence_of_element_located( (By.ID, 'username')) ) # Waits until it finds the username form field and timeout in 120 seconds. login_form_username = driver.find_element_by_id('username') login_form_username.send_keys(username) login_form_password = driver.find_element_by_id('password') login_form_password.send_keys(password) driver.maximize_window() login_form_password.submit() # Use to accept the notification indicating that there is already a session open for this account. time.sleep(3) try: proceed = driver.find_element(By.XPATH, '//*[@id="confirm_dialog"]/div[2]/input[1]') except (NoSuchElementException, ElementNotVisibleException, ElementNotInteractableException): pass else: proceed.click() # return the webdriver as driver return driver def logout(driver: webdriver, manager: str) -> None: ''' This function log you out and gracefully quits everything. :param driver: The web browser driver. :param manager: The Address of the Manager/FMC. :return: None ''' time.sleep(5) if manager.startswith('http'): driver.get('{}/login.cgi?logout=1'.format(manager)) else: driver.get('https://{}/login.cgi?logout=1'.format(manager)) time.sleep(5) driver.quit() # Gracefully quits everything. def disabled_notifications(driver: webdriver, version) -> None: ''' This function disables the notifications on the FMC to prevent the notification popups from crashing the selenium. :param driver: The web browser driver. :param version: The version of FMC you are using. :return: None ''' if version == '6.2.3.13': tasks_icon = '/html/body/div[13]/div[1]/ul/li[12]/div/div[3]' gear_icon = '/html/body/div[13]/div[1]/ul/li[12]/div/div[4]/div[4]' notifications_icon = '/html/body/div[13]/div[1]/ul/li[12]/div/div[5]/ul/li/div/div/img' enabled_image = 'YgAAADuklEQVR42tWV7U9TZxiHnW7TLX' disabled_image = 'YgAAAC8UlEQVR42tWVXUuaYRjHKxmxsY' elif version == '6.3.0': tasks_icon = '/html/body/div[7]/div[2]/div/div[2]/div/ul/li[8]/div/div[3]' gear_icon = '/html/body/div[7]/div[2]/div/div[2]/div/ul/li[8]/div/div[4]/div[4]' notifications_icon = '/html/body/div[7]/div[2]/div/div[2]/div/ul/li[8]/div/div[5]/ul/li/div/div/img' enabled_image = 'YgAAADuklEQVR42tWV7U9TZxiHnW7TLX' disabled_image = 'YgAAAC8UlEQVR42tWVXUuaYRjHKxmxsY' else: tasks_icon = '' gear_icon = '' notifications_icon = '' enabled_image = '' disabled_image = '' time.sleep(2) WebDriverWait(driver, 120).until( expected_conditions.presence_of_element_located( (By.XPATH, tasks_icon)) ) # Waits until it finds the tasks icon on the upper right corner and timeout in 120 seconds. tasks_element = driver.find_element(By.XPATH, tasks_icon) tasks_element.click() gear_element = driver.find_element(By.XPATH, gear_icon) gear_element.click() notifications_button = driver.find_element(By.XPATH, notifications_icon) notifications_button_img = notifications_button.get_attribute('src')[64:96] # This are the enabled and disabled images for notifications. # In earlier versions or newer versions of the Cisco Management Center this icons might or may not be different. if notifications_button_img == enabled_image: print('Disabling notifications!') print(notifications_button_img) notifications_button.click() elif notifications_button_img == disabled_image: print('Button is already disabled!') print(notifications_button_img) tasks_element.click() time.sleep(2) # Your custom code goes in here def my_function(driver: webdriver, manager: str, version: str, *args, **kwargs) -> None: ''' Start your automation code on here. :param driver: The web browser driver. :param manager: The Address of the Manager/FMC. :param version: The version of FMC you are using. :return: None ''' if manager.startswith('http'): driver.get('{}/platinum/ApplianceInformation.cgi'.format(manager)) else: driver.get('https://{}/platinum/ApplianceInformation.cgi'.format(manager)) # The address of the manager you want to login to MANAGER = '127.0.0.1' # The version of FMC you are using VERSION = '6.3.0' # Login on the web interface DRIVER = fmc_login(MANAGER, os.environ.get('USERNAME'), os.environ.get('PASSWORD'), VERSION) # Disables the notificatiosn globally disabled_notifications(DRIVER, VERSION) # Run your custom function my_function(DRIVER, MANAGER, VERSION) # Logout of the web interface and quit everything gracefully logout(DRIVER, MANAGER)
2.65625
3
harvester_odt/jobcontrol_jobs.py
opendatatrentino/opendata-harvester
1
12783993
""" Functions to be used as JobControl jobs """ from datetime import datetime import logging import os from jobcontrol.globals import execution_context from harvester.utils import (get_storage_direct, jobcontrol_integration, report_progress) logger = logging.getLogger('harvester_odt.pat_statistica') def _get_uniqid(): return "{0:%y%m%d-%H%M%S}-{1}".format(datetime.now(), os.getpid()) def _prepare_storage_url(url): return url.format(id=_get_uniqid()) def get_storage_from_arg(arg): """ Get a storage instance from an argument to a function. This is needed for functions that may be called via an external tool that doesn't allow passing object instances directly. """ from harvester.ext.storage.base import BaseStorage if isinstance(arg, BaseStorage): return arg if isinstance(arg, basestring): return get_storage_direct( _prepare_storage_url(arg), options={}) return get_storage_direct( _prepare_storage_url(arg['url']), options=arg.get('conf', None)) def crawl_statistica(storage): """Run crawler for statistica""" import harvester_odt.pat_statistica.crawler storage = get_storage_from_arg(storage) with jobcontrol_integration(): harvester_odt.pat_statistica.crawler.crawl_statistica(storage) return storage def crawl_statistica_subpro(storage): """Run crawler for statistica - subprovinciale""" import harvester_odt.pat_statistica.crawler storage = get_storage_from_arg(storage) with jobcontrol_integration(): harvester_odt.pat_statistica.crawler.crawl_statistica_subpro(storage) return storage def crawl_geocatalogo(storage): """Run crawler for GeoCatalogo""" from harvester_odt.pat_geocatalogo.crawler import Geocatalogo crawler = Geocatalogo('', {'with_resources': False}) storage = get_storage_from_arg(storage) with jobcontrol_integration(): crawler.fetch_data(storage) return storage def crawl_comunweb(storage, url): """Run crawler for comunweb :param storage: Output storage :param url: base URL of the ComunWeb website """ from harvester_odt.comunweb.crawler import ComunWebCrawler crawler = ComunWebCrawler(url) storage = get_storage_from_arg(storage) with jobcontrol_integration(): crawler.fetch_data(storage) return storage def convert_statistica_to_ckan(input_storage, storage): """Convert data from pat_statistica to Ckan""" from harvester_odt.pat_statistica.converter \ import convert_statistica_to_ckan input_storage = get_storage_from_arg(input_storage) storage = get_storage_from_arg(storage) with jobcontrol_integration(): convert_statistica_to_ckan(input_storage, storage) return storage def convert_statistica_subpro_to_ckan(input_storage, storage): """Convert data from pat_statistica_subpro to Ckan""" from harvester_odt.pat_statistica.converter \ import convert_statistica_subpro_to_ckan input_storage = get_storage_from_arg(input_storage) storage = get_storage_from_arg(storage) with jobcontrol_integration(): convert_statistica_subpro_to_ckan(input_storage, storage) return storage def convert_geocatalogo_to_ckan(input_storage, storage): """Convert data from pat_geocatalogo to Ckan""" from harvester_odt.pat_geocatalogo.converter \ import GeoCatalogoToCkan input_storage = get_storage_from_arg(input_storage) storage = get_storage_from_arg(storage) converter = GeoCatalogoToCkan('', {}) with jobcontrol_integration(): converter.convert(input_storage, storage) return storage def debugging_job(storage): """ Job to be used for debugging purposes. """ storage = get_storage_from_arg(storage) with jobcontrol_integration(): report_progress(None, 0, 1) job = execution_context.current_job logger.debug('Running job: {0!r}'.format(job)) deps = list(job.get_deps()) logger.debug('Found {0} dependencies'.format(len(deps))) for dep in deps: build = dep.get_latest_successful_build() if build is None: logger.debug('Dependency {0!r} has no builds' .format(dep)) else: logger.debug('Dependency {0!r} latest build returned {1!r}' .format(dep, build['retval'])) with jobcontrol_integration(): report_progress(None, 1, 1) return storage
2.796875
3
contrib/remote/remote.py
fffkongress/info-beamer
0
12783994
import sys try: import pygame from pygame.locals import * except ImportError: print "==========================" print "You have to install pygame" print "==========================" raise try: from OSC import OSCClient, OSCMessage # provided by pyOSC except ImportError: print "=========================" print "You have to install pyOSC" print "=========================" raise PORT = 4444 if len(sys.argv) != 5: print "usage: remote <addr> <path> <width> <height>" sys.exit(1) addr, path, width, height = sys.argv[1:5] width, height = int(width), int(height) client = OSCClient() client.connect((addr, PORT)) pygame.init() screen = pygame.display.set_mode((width, height)) pygame.display.set_caption('Info Beamer Remote Control') font = pygame.font.Font(None, 16) text = font.render('Sending to info-beamer @ %s:%d' % (addr, PORT), True, (255, 255, 255)) screen.fill((255, 0, 0)) screen.blit(text, ( (width - text.get_width()) / 2, (height - text.get_height()) / 2 )) pygame.display.flip() while 1: event = pygame.event.wait() if event.type == KEYUP: msg = OSCMessage(path + "keyup") msg.append(event.key) client.send(msg) elif event.type == KEYDOWN: if event.key == K_ESCAPE: break msg = OSCMessage(path + "keydown") msg.append(event.key) client.send(msg) elif event.type == MOUSEBUTTONDOWN: msg = OSCMessage(path + "mousedown") msg.append(event.button) msg.append(event.pos[0]) msg.append(event.pos[1]) client.send(msg) elif event.type == MOUSEBUTTONUP: msg = OSCMessage(path + "mouseup") msg.append(event.button) msg.append(event.pos[0]) msg.append(event.pos[1]) client.send(msg) elif event.type == MOUSEMOTION: msg = OSCMessage(path + "mousemotion") msg.append(event.pos[0]) msg.append(event.pos[1]) client.send(msg)
2.765625
3
classes_clean.py
herttale/School-district-optimization
1
12783995
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Fri Sep 7 10:51:21 2018 @author: hertta """ from shapely.ops import cascaded_union from copy import deepcopy import random from shapely.geometry import LineString class SchoolDistr: """ The class representing the school districts """ def __init__(self, school_id, blocks, td_matrix): # class attribute 1: the school id number self.school_id = school_id # class attribute 2: the blocks belonging to the district (as a dict, # with keys corresponding to the td_matrix keys). self.blocks = blocks # class attribute 3: distance matrix (as a dict, with keys # corresponding to the blocks keys). self.td_matrix = td_matrix # class attribute 3: the geometry of the district (shapely polygon) self.geometry = None # class attribute 4: the maximum allowed distance from block to the # district's school self.max_distance = None # class attribute 5: the amount of 7-year-olds living inside the # district self.students = None # class attribute 6: the maximum amount of 7-year-olds that the # district can host self.student_limit = None # class attribute 7: the current value of the optimization parameter self.optimization_value = None # function call: initiate district attributes self.initiate_distr_attrs() # Method for initializing attributes def initiate_distr_attrs(self): self.geometry = self.calculate_geometry() self.max_distance = self.calculate_max_distance() self.students = self.calculate_student_base() self.student_limit = self.students*1.20 self.optimization_value = self.calculate_optimization_value() # Method for updating attributes def update_distr(self): self.geometry = self.calculate_geometry() self.students = self.calculate_student_base() self.optimization_value = self.calculate_optimization_value() # Method for calculating the district's geometry as cascaded union of the # block geometries def calculate_geometry(self): geom_list = [] for key, block in self.blocks.items(): geom_list.append(block.geometry) return cascaded_union(geom_list) # Method for calculating the district's maximum distance constraint. The # travel time data must not include infinite distance values. def calculate_max_distance(self): maxt = 0 for key, block in self.blocks.items(): ttime = self.td_matrix[key]['walk_d'] if ttime > maxt: maxt = ttime return maxt * 1.20 # Method for calculating the current value of the optimization parameter def calculate_optimization_value(self): majority_pop = 0 minority_pop = 0 for key, block in self.blocks.items(): majority_pop += block.lang_majority minority_pop += block.lang_other return minority_pop/(minority_pop + majority_pop) # Method for calculating the current amount of 7-year-olds living # inside the district def calculate_student_base(self): student_sum = 0 for key, block in self.blocks.items(): student_sum += block.student_base return student_sum # Method for calculating the district's neighbourhood: which blocks # the district shares a line segment with def touches_which(self, blocks_dict): neighbors = [] for key, block in blocks_dict.items(): if type(self.geometry.intersection(block.geometry)) == LineString: if key not in self.blocks: neighbors.append(block) return neighbors # Method for calculating whether a block is too far for adoption # Returns True if the block is too far def is_too_far(self, block): dist = self.td_matrix[block.block_id]['walk_d'] return dist > self.max_distance # Method for adopting a selected block def add_block(self, block): if block == None: return else: block.school_id = self.school_id self.blocks[block.block_id] = block # Method for removing an adopted block def remove_block(self, block): if block == None: return else: del self.blocks[block.block_id] # A method for testing if adopting a block would break another district's # contiguity. Returns True if contiguity would break. def break_contiguity(self, block): blocks_copy = deepcopy(self.blocks) geom_list = [] for key, item in blocks_copy.items(): geom_list.append(item.geometry) geom1 = cascaded_union(geom_list) del blocks_copy[block.block_id] geom_list = [] for key, item in blocks_copy.items(): geom_list.append(item.geometry) geom2 = cascaded_union(geom_list) return type(geom1) != type(geom2) # A method for selecting the best block in neighbourhood def select_best_block(self, blockset, districts, global_mean, global_st_dev): majority_pop = 0 minority_pop= 0 for key, value in self.blocks.items(): majority_pop += value.lang_majority minority_pop += value.lang_other best_block = None for block in blockset: # test for rule 2 if block.contains_school == False: # test for rule 3 if (block.student_base + self.students) <= self.student_limit: # test for rule 4 if self.is_too_far(block) == False: current_district = districts[block.school_id] # test for rule 5 if current_district.break_contiguity(block) == False: # calculate specs for the block's current district current_district_majority_pop = 0 current_district_minority_pop= 0 for key, value in current_district.blocks.items(): current_district_majority_pop += \ value.lang_majority current_district_minority_pop += \ value.lang_other current_d_new_value = ((current_district_minority_pop - block.lang_other)/ (current_district_minority_pop - block.lang_other + current_district_majority_pop - block.lang_majority)) current_d_current_value = ((current_district_minority_pop)/ (current_district_minority_pop + current_district_majority_pop)) # test the adoption outcome in relation to current state if best_block == None: own_new_value1 = ((minority_pop + block.lang_other)/ (minority_pop + block.lang_other + majority_pop + block.lang_majority)) # test for the rule 6 if (abs(current_d_new_value - global_mean) <= abs(current_d_current_value - global_mean) or abs((current_d_current_value - global_mean) - (self.optimization_value - global_mean)) > abs((current_d_new_value - global_mean) - (own_new_value1 - global_mean))): if (abs(own_new_value1 - global_mean) < abs(self.optimization_value - global_mean)): best_block = block # test the adoption outcome in relation to the current best_block else: own_new_value2 = ((minority_pop + block.lang_other)/ (minority_pop + block.lang_other + majority_pop + block.lang_majority)) current_best = ((minority_pop + best_block.lang_other)/ (minority_pop + best_block.lang_other + majority_pop + best_block.lang_majority)) # test for the rule 6 if (abs(current_d_new_value - global_mean) <= abs(current_d_current_value - global_mean) or abs((current_d_current_value - global_mean) - (self.optimization_value - global_mean)) > abs((current_d_new_value - global_mean) - (own_new_value1 - global_mean))): if (abs(own_new_value2 - global_mean) < abs(current_best - global_mean)): best_block = block # return the best block return best_block # A method for selecting a random block in neighbourhood def select_random_block(self, blockset, districts): blocklist = [] for block in blockset: # test for rule 2 if block.contains_school == False: # test for rule 3 if (block.student_base + self.students) <= self.student_limit: # test for rule 4 if self.is_too_far(block) == False: current_district = districts[block.school_id] # test for rule 5 if current_district.break_contiguity(block) == False: blocklist.append(block) if len(blocklist) > 0: # generate a random number for selecting a block randomindx = random.randint(0,len(blocklist)-1) # return a random block according to the random number generated return blocklist[randomindx] class Block: """ The class representing the residential blocks """ def __init__(self, geometry, block_id, lang_majority, lang_other, student_base, school_id, contains_school): # class attribute 1: the geometry of the block (shapely polygon) self.geometry = geometry # class attribute 2: block id self.block_id = block_id # class attribute 3: the amount of population with Finnish or Swedish as # their mother tongue self.lang_majority = lang_majority # class attribute 4: the amount of population with other languages than Finnish # or Swedish as their mother tongue self.lang_other = lang_other # class attribute 5: the amount of 7-year-olds living in the block self.student_base = student_base # class attribute 6: the id of the school district the block currently # belongs to self.school_id = school_id # class attribute 7: True if the block contains a school, otherwise False self.contains_school = contains_school
2.90625
3
results/sherpa_studies/start_sherpa_frontend.py
yimengmin/wiki-cs-dataset
30
12783996
<reponame>yimengmin/wiki-cs-dataset import sherpa import sys sherpa.core.Study.load_dashboard(sys.argv[1]) while True: pass
1.101563
1
py2030/app.py
markkorput/py2030
0
12783997
#!/usr/bin/env python import time from optparse import OptionParser from .component_manager import ComponentManager if __name__ == '__main__': parser = OptionParser() parser.add_option('-p', '--profile', dest='profile', default=None) # parser.add_option('-f', '--file', dest='file', default=None) parser.add_option('-v', '--verbose', dest='verbose', action="store_true", default=False) parser.add_option('-y', '--yml', '--yaml', '--config-file', dest='config_file', default=None) opts, args = parser.parse_args() if opts.profile == None: import socket opts.profile = socket.gethostname().replace('.', '_') del socket options = { 'verbose': opts.verbose, 'profile': opts.profile, 'config_file': opts.config_file } while True: cm = ComponentManager(options) cm.setup() try: while cm.running: cm.update() except KeyboardInterrupt: print('KeyboardInterrupt. Quitting.') cm.destroy() if not cm.restart: print(cm.shutdown_message) break print('restarting...') time.sleep(1.0)
2.328125
2
galaxy/api/v2/views/collection.py
ironfroggy/galaxy
0
12783998
# (c) 2012-2019, Ansible by Red Hat # # This file is part of Ansible Galaxy # # Ansible Galaxy is free software: you can redistribute it and/or modify # it under the terms of the Apache License as published by # the Apache Software Foundation, either version 2 of the License, or # (at your option) any later version. # # Ansible Galaxy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # Apache License for more details. # # You should have received a copy of the Apache License # along with Galaxy. If not, see <http://www.apache.org/licenses/>. from django.conf import settings from rest_framework import views from rest_framework import status as status_codes import rest_framework.exceptions as drf_exc from rest_framework.permissions import IsAuthenticated from pulpcore.app.serializers import ArtifactSerializer from pulpcore.app.response import OperationPostponedResponse from pulpcore.tasking.tasks import enqueue_with_reservation from pulpcore.app import models as pulp_models from galaxy.api.v2.serializers import collection as serializers from galaxy.main import models from galaxy.pulp import tasks __all__ = [ 'UploadCollectionView' ] class CollectionExistsError(drf_exc.APIException): status_code = status_codes.HTTP_409_CONFLICT default_detail = 'Collection already exists.' default_code = 'collection_exists' class UploadCollectionView(views.APIView): permission_classes = (IsAuthenticated, ) def post(self, request, *args, **kwargs): """Upload an Ansible Collection.""" serializer = serializers.UploadCollectionSerializer( data=request.data, context={'request': request}) serializer.is_valid(raise_exception=True) data = serializer.validated_data # TODO(cutwater): Merge Artifact and UploadCollectionSerializers # TODO(cutwater): Extract namespace and name from `METADATA.json` # and validate that collection name matches filename. namespace = self._validate_namespace(request.user, data) artifact_data = {'file': request.data['file']} if serializer.data['sha256'] is not None: artifact_data['sha256'] = data['sha256'] repository = pulp_models.Repository.objects.get( name=settings.GALAXY_PULP_REPOSITORY) artifact = self._save_artifact(artifact_data) import_task = models.ImportTask.objects.create( owner=request.user, state=models.ImportTask.STATE_PENDING, ) async_result = enqueue_with_reservation( tasks.import_collection, [], kwargs={ 'artifact_pk': artifact.pk, 'repository_pk': repository.pk, 'namespace_pk': namespace.pk, 'task_id': import_task.id, }) return OperationPostponedResponse(async_result, request) def _validate_namespace(self, user, data): """Validate that collection namespace exists and user owns it.""" ns_name = data['filename'].namespace try: ns = models.Namespace.objects.get(name=ns_name) except models.Namespace.DoesNotExist: raise drf_exc.ValidationError( 'Namespace {0} does not exist'.format(ns_name)) if not ns.owners.filter(id=user.id).count(): raise drf_exc.PermissionDenied( 'The namespace listed on your filename must match one of ' 'the namespaces you have access to.' ) return ns def _save_artifact(self, data): artifact_serializer = ArtifactSerializer(data=data) try: artifact_serializer.is_valid(raise_exception=True) except drf_exc.ValidationError as e: error_codes = e.get_codes() if 'unique' in error_codes.get('non_field_errors', []): raise CollectionExistsError() raise return artifact_serializer.save()
1.867188
2
gtfs_grading_app/forms.py
wesleyi23/gtfs_grading_application
6
12783999
<gh_stars>1-10 from django import forms from gtfs_grading import settings from gtfs_grading_app.models import review_category, review_widget, consistency_widget, results_capture_widget, \ gtfs_field, consistency_widget_visual_example, consistency_widget_link, score, data_selector, result, result_image, \ result_reference from gtfs_grading_app.gtfs_spec.import_gtfs_spec import get_gtfs_table_tuple, get_field_type, \ get_gtfs_field_tuple_from_table, get_all_gtfs_field_tuple from gtfs_grading_app.classes.classes import DataSelector # type: ignore class GtfsZipForm(forms.Form): file = forms.FileField() class AddReviewCategory(forms.Form): review_table = forms.ChoiceField(label='Table', widget=forms.Select(attrs={'class': 'form-select form-select-sm'})) gtfs_field = forms.ChoiceField(label='Field', widget=forms.Select(attrs={'class': 'form-select form-select-sm'})) def __init__(self, *args, **kwargs): CHOICES_TABLE = get_gtfs_table_tuple() CHOICES_FIELD = get_all_gtfs_field_tuple() print(CHOICES_FIELD) super().__init__(*args, **kwargs) self.fields['review_table'].choices = CHOICES_TABLE self.fields['gtfs_field'].choices = CHOICES_FIELD def save(self): data = self.cleaned_data gtfs_type = get_field_type(data['gtfs_field'], data['review_table']) obj, created = gtfs_field.objects.get_or_create(name=data['gtfs_field'], table=data['review_table'], type=gtfs_type) my_review_widget = review_widget.objects.create() my_consistency_widget = consistency_widget.objects.create() my_results_capture_widget = results_capture_widget.objects.create() my_review_category = review_category() my_review_category.gtfs_field = obj my_review_category.review_widget = my_review_widget my_review_category.consistency_widget = my_consistency_widget my_review_category.results_capture_widget = my_results_capture_widget my_review_category = my_review_category.save() return my_review_category class AddReviewWidget(forms.ModelForm): class Meta: model = review_widget exclude = ['related_field_same_table', 'related_field_other_table'] # class AddRelatedFieldSameTable(forms.ModelForm): # class Meta: # EXAMPLE_FIELD_CHOICES = [ # ('blue', 'Blue'), # ('green', 'Green'), # ('black', 'Black'), # ] # model = gtfs_field # exclude = ['table', 'type'] # widgets = {'name': forms.SelectMultiple(choices=EXAMPLE_FIELD_CHOICES,)} class AddConsistencyWidget(forms.ModelForm): class Meta: model = consistency_widget exclude = ['other_text'] class AddResultsCaptureWidget(forms.ModelForm): class Meta: model = results_capture_widget exclude = [''] widgets = {'has_score_reason': forms.Select(attrs={'class': 'form-select form-select-sm custom-select-sm'}), 'has_score_image': forms.Select(attrs={'class': 'form-select form-select-sm custom-select-sm'}), 'has_reference_link': forms.Select(attrs={'class': 'form-select form-select-sm custom-select-sm'}), 'has_reference_date': forms.Select(attrs={'class': 'form-select form-select-sm custom-select-sm'})} class AddConsistencyWidgetVisualExample(forms.ModelForm): class Meta: model = consistency_widget_visual_example exclude = [''] widgets = {'consistency_widget': forms.HiddenInput()} class AddConsistencyWidgetLink(forms.ModelForm): class Meta: model = consistency_widget_link exclude = [''] widgets = {'consistency_widget': forms.HiddenInput()} class AddConsistencyWidgetOtherText(forms.ModelForm): class Meta: model = consistency_widget fields = ['other_text'] class AddReviewWidgetRelatedFieldSameTable(forms.Form): def __init__(self, *args, **kwargs): my_gtfs_table_name = kwargs.pop("my_gtfs_table_name") super(AddReviewWidgetRelatedFieldSameTable, self).__init__(*args, **kwargs) self.gtfs_table_name = my_gtfs_table_name CHOICES = get_gtfs_field_tuple_from_table(my_gtfs_table_name) self.fields['field_name'] = forms.ChoiceField(choices=CHOICES, widget=forms.Select(attrs={'class': "form-select form-select-sm"})) self.fields['review_widget_id'] = forms.IntegerField(widget=forms.HiddenInput()) def save(self): field_type = get_field_type(self.cleaned_data['field_name'], self.gtfs_table_name) my_gtfs_field, created = gtfs_field.objects.get_or_create(name=self.cleaned_data['field_name'], table=self.gtfs_table_name, type=field_type) # if created: # my_gtfs_field.save() my_review_widget = review_widget.objects.get(id=self.cleaned_data['review_widget_id']) my_review_widget.related_field_same_table.add(my_gtfs_field) class AddResultCaptureScore(forms.ModelForm): class Meta: model = score exclude = [''] widgets = {'results_capture_widget': forms.HiddenInput(), 'score': forms.NumberInput(attrs={'class': 'form-control form-control-sm'}), 'help_text': forms.Textarea(attrs={'class': 'form-control', 'rows': 2})} class ChooseDataSelector(forms.Form): # see java script in admin_details.html when making changes def __init__(self, *args, **kwargs): self.my_review_catagory = kwargs.pop("my_review_category") CHOICES, related_fields = DataSelector.get_valid_choices_and_related_fields() super(ChooseDataSelector, self).__init__(*args, **kwargs) self.fields['name'] = forms.ChoiceField(choices=CHOICES, label=False, widget=forms.Select( attrs={'class': 'form-select form-select-sm'})) self.fields['number_to_review'] = forms.IntegerField(widget=forms.NumberInput(attrs={'class': 'form-control form-control-sm'})) self.related_fields = related_fields def save(self): if self.cleaned_data['name'] == "log10(n) + 2": self.cleaned_data['number_to_review'] = None my_data_selector, created = data_selector.objects.get_or_create(name=self.cleaned_data['name'], number_to_review=self.cleaned_data['number_to_review']) self.my_review_catagory.data_selector = my_data_selector self.my_review_catagory.save() class NewReviewForm(forms.Form): def __init__(self, *args, **kwargs): agency_options_choices = kwargs.pop('agency_options') mode_options_choices = kwargs.pop('mode_options') super(NewReviewForm, self).__init__(*args, **kwargs) self.fields['agency'] = forms.ChoiceField(choices=agency_options_choices, label='Agency', widget=forms.Select( attrs={'class': 'form-select form-select-sm'})) self.fields['mode'] = forms.ChoiceField(choices=mode_options_choices, label='Mode', widget=forms.Select( attrs={'class': 'form-select form-select-sm'})) class ResultForm(forms.Form): def __init__(self, *args, **kwargs): my_results_capture_widget = kwargs.pop('results_capture_widget') super(ResultForm, self).__init__(*args, **kwargs) self.fields['result_id'] = forms.IntegerField(required=True, widget=forms.HiddenInput()) self.fields['review_category_id'] = forms.IntegerField(required=True, widget=forms.HiddenInput()) self.fields['score_id'] = forms.IntegerField(required=True, label='', widget=forms.NumberInput(attrs={'class': 'score-id-form-field'})) if my_results_capture_widget.has_score_image in ['Optional', 'Required']: self.fields['image'] = forms.ImageField() if my_results_capture_widget.has_reference_link in ['Optional', 'Required']: self.fields['reference_name'] = forms.CharField() self.fields['reference_url'] = forms.URLField(widget=forms.URLInput) if my_results_capture_widget.has_reference_date in ['Optional', 'Required']: self.fields['published_reference_date'] = forms.DateField(widget=forms.DateInput(attrs={'class': 'datepicker'})) if my_results_capture_widget.has_score_reason in ['Optional', 'Required']: self.fields['score_reason'] = forms.CharField(widget=forms.Textarea) if my_results_capture_widget.has_score_image == 'Optional': self.fields['image'].required = False if my_results_capture_widget.has_reference_link == 'Optional': self.fields['reference_name'].required = False self.fields['reference_url'].required = False if my_results_capture_widget.has_reference_date == 'Optional': self.fields['published_reference_date'].required = False if my_results_capture_widget.has_score_reason == 'Optional': self.fields['score_reason'].required = False def __save__(self): my_review_category = review_category.objects.get(id=self.cleaned_data['review_category_id']) my_results_capture_widget = results_capture_widget.objects.get(id=my_review_category.results_capture_widget.id) my_result = result.objects.get(id=self.cleaned_data['result_id']) my_result.score_id = self.cleaned_data['score_id'] if my_results_capture_widget.has_score_reason in ['Optional', 'Required']: my_result.score_reason = self.cleaned_data['score_reason'] if my_results_capture_widget.has_score_image in ['Optional', 'Required']: if self.cleaned_data['image']: image, created = result_image.objects.get_or_create(result_id=self.cleaned_data['result_id']) image.image = self.cleaned_data['image'] image.save() my_result.save() if my_results_capture_widget.has_reference_link in ['Optional', 'Required']: reference, created = result_reference.objects.get_or_create(result_id=self.cleaned_data['result_id']) reference.reference_name = self.cleaned_data['reference_name'] reference.url = self.cleaned_data['reference_url'] reference.save() if my_results_capture_widget.has_reference_date in ['Optional', 'Required']: reference.published_reference_date = self.cleaned_data['published_reference_date'] reference.save()
2.109375
2
torchsupport/experimental/apps/fewshot.py
bobelly/torchsupport
18
12784000
from flexx import flx from flexx import event import os from tornado.web import StaticFileHandler class ScaleImageWidget(flx.Widget): """ Display an image from a url. The ``node`` of this widget is an `<img> <https://developer.mozilla.org/docs/Web/HTML/Element/img>`_ wrapped in a `<div> <https://developer.mozilla.org/docs/Web/HTML/Element/div>`_ (the ``outernode``) to handle sizing. """ DEFAULT_MIN_SIZE = 16, 16 _sequence = 0 source = event.StringProp('', settable=True, doc=""" The source of the image, This can be anything that an HTML img element supports. """) stretch = event.BoolProp(False, settable=True, doc=""" Whether the image should stretch to fill all available space, or maintain its aspect ratio (default). """) def _create_dom(self): global window outer = window.document.createElement('div') inner = window.document.createElement('img') outer.appendChild(inner) return outer, inner @event.reaction def __resize_image(self): size = self.size if self.stretch: self.node.style.maxWidth = None self.node.style.maxHeight = None self.node.style.width = size[0] + 'px' self.node.style.height = size[1] + 'px' else: self.node.style.backgroundColor = None self.node.style.marginLeft = "5%" self.node.style.marginTop = "5%" self.node.style.maxWidth = "90%" self.node.style.maxWidth = "auto" self.node.style.width = "90%" self.node.style.height = "auto" @event.reaction def __source_changed(self): self.node.src = self.source class ClickableImage(flx.Widget): def init(self, source): self.src = source self.img = ScaleImageWidget(source = source, flex=1) self.img.node.addEventListener("mouseover", lambda e: self._show_clickable_in()) self.img.node.addEventListener("mouseout", lambda e: self._show_clickable_out()) def _show_clickable_in(self): size = self.img.size[0] p20 = size // 20 self.img.node.style.boxShadow = "0px 0px "+ p20 + "px 2px black" def _show_clickable_out(self): self.img.node.style.boxShadow = None @flx.action def set_source(self, source): self.src = source if self.src == None: self.img.node.style.visibility = "hidden" else: self.img.node.style.visibility = "visible" self.img.set_source(source) class ImageGrid(flx.Widget): def init(self, width=4, height=4, path=lambda x, y: "/images/starting_image.png", handler=lambda o, x, y: print(x, y)): self.width = width self.height = height self.path = path self.handler = handler self.imageGrid = [[None for idy in range(height)] for idx in range(width)] with flx.HFix(): for idx in range(width): with flx.VFix(flex=1): for idy in range(height): self.imageGrid[idx][idy] = ClickableImage(path(idx, idy), flex=1) a, b = idx, idy self.imageGrid[idx][idy].node.addEventListener("click", self._on_click_handler(a, b)) def _on_click_handler(self, idx, idy): return lambda e: self.handler(self, idx, idy) def path_provider(x, y): if (x + y) % 2 == 0: return "/images/starting_image.png" else: return "/images/cytosol_image.png" class FewShot(flx.Widget): def init(self): self.selectedImages = [] with flx.TabLayout() as self.tabs: with flx.HFix(title="selection", flex=1) as self.selector_view: with flx.VFix() as self.images: flx.Label(text="Images", flex=(1, 1)) self.imageGrid = ImageGrid(4, 4, path_provider, lambda o, idx, idy: self.image_click_handler(o, idx, idy), flex=(1, 9)) self.images.node.style.backgroundColor = "#88888888" with flx.VFix() as self.selected: flx.Label(text="Selected", flex=(1, 1)) self.selectedGrid = ImageGrid(4, 4, self.selected_provider, lambda o, idx, idy: self.selected_click_handler(o, idx, idy), flex=(1, 9)) with flx.HFix(title="results", flex=1) as self.result_view: self.resultGrid = ImageGrid(8, 4, path_provider, flex=(1, 1)) @flx.action def image_click_handler(self, o, idx, idy): source = o.imageGrid[idx][idy].src if (source, idx, idy) not in self.selectedImages: self.selectedImages.append((source, idx, idy)) length = len(self.selectedImages) new_position = ( (length - 1) % 4, (length - 1) // 4 ) self.selectedGrid.imageGrid[new_position[0]][new_position[1]].set_source(source) @flx.action def selected_click_handler(self, o, idx, idy): position = idy * 4 + idx if position < len(self.selectedImages): self.selectedImages.pop(position) self.selectedGrid.imageGrid[idx][idy].set_source(None) for pos, elem in enumerate(self.selectedImages): source = elem[0] new_position = ( pos % 4, pos // 4 ) self.selectedGrid.imageGrid[new_position[0]][new_position[1]].set_source(source) for pos in range(len(self.selectedImages), 16): new_position = ( pos % 4, pos // 4 ) self.selectedGrid.imageGrid[new_position[0]][new_position[1]].set_source(None) def selected_provider(self, idx, idy): return lambda x, y: None tornado_app = flx.create_server().app dirname = os.path.expanduser('~/Documents/knoplab/yeastimages_presentation/') tornado_app.add_handlers(r".*", [ (r"/images/(.*)", StaticFileHandler, {"path": dirname}), ]) app = flx.App(FewShot) app.launch('browser') flx.run()
3.046875
3
kernel/components/featurecalculation/vertfeaturecalculation/base_feature_calculation.py
rinceyuan/WeFe
39
12784001
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 2021 Tianmian Tech. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import functools import json from google.protobuf import json_format from common.python.common.consts import ModelType from common.python.utils import log_utils from kernel.components.binning.vertfeaturebinning.vert_binning_promoter import VertFeatureBinningPromoter from kernel.components.binning.vertfeaturebinning.vert_binning_provider import VertFeatureBinningProvider from kernel.components.featurecalculation.base import filter_factory from kernel.components.featurecalculation.base.calculation_properties import CalculationProperties, \ CompletedCalculationResults from kernel.components.featurecalculation.param import FeatureCalculationParam from kernel.model_base import ModelBase from kernel.protobuf.generated import feature_calculation_param_pb2, feature_calculation_meta_pb2 from kernel.transfer.variables.transfer_class.vert_feature_calculation_transfer_variable import \ VertFeatureCalculationTransferVariable from kernel.utils import abnormal_detection from kernel.utils import consts from kernel.utils.data_util import get_header from kernel.utils.io_check import assert_io_num_rows_equal LOGGER = log_utils.get_logger() MODEL_PARAM_NAME = 'FeatureCalculationParam' MODEL_META_NAME = 'FeatureCalculationMeta' MODEL_NAME = 'VertFeatureCalculation' class BaseVertFeatureCalculation(ModelBase): def __init__(self): super(BaseVertFeatureCalculation, self).__init__() self.transfer_variable = VertFeatureCalculationTransferVariable() self.curt_calculate_properties = CalculationProperties() self.completed_calculation_result = CompletedCalculationResults() self.schema = None self.header = None self.party_name = 'Base' # Possible previous model self.binning_model = None self.static_obj = None self.model_param = FeatureCalculationParam() self.meta_dicts = {} def _init_model(self, params): self.model_param = params # self.cols_index = params.calculate_cols self.filter_methods = params.filter_methods # self.local_only = params.local_only def _init_calculate_params(self, data_instances): if self.schema is None: self.schema = data_instances.schema if self.header is not None: return self.schema = data_instances.schema header = get_header(data_instances) self.header = header self.curt_calculate_properties.set_header(header) self.curt_calculate_properties.set_last_left_col_indexes([x for x in range(len(header))]) if self.model_param.calculate_col_indexes == -1: self.curt_calculate_properties.set_calculate_all_cols() else: self.curt_calculate_properties.add_calculate_col_indexes(self.model_param.calculate_col_indexes) self.curt_calculate_properties.add_calculate_col_names(self.model_param.calculate_names) self.completed_calculation_result.set_header(header) self.completed_calculation_result.set_calculate_col_names(self.curt_calculate_properties.calculate_col_names) self.completed_calculation_result.set_all_left_col_indexes(self.curt_calculate_properties.all_left_col_indexes) def _get_meta(self): self.meta_dicts['filter_methods'] = self.filter_methods self.meta_dicts['cols'] = self.completed_calculation_result.get_calculate_col_names() self.meta_dicts['need_run'] = self.need_run meta_protobuf_obj = feature_calculation_meta_pb2.FeatureCalculationMeta(**self.meta_dicts) return meta_protobuf_obj def _get_param(self): LOGGER.debug("curt_calculate_properties.left_col_name: {}, completed_calculation_result: {}".format( self.curt_calculate_properties.left_col_names, self.completed_calculation_result.all_left_col_names )) LOGGER.debug("Length of left cols: {}".format(len(self.completed_calculation_result.all_left_col_names))) # left_cols = {x: True for x in self.curt_calculate_properties.left_col_names} left_cols = {x: True for x in self.completed_calculation_result.all_left_col_names} final_left_cols = feature_calculation_param_pb2.LeftCols( original_cols=self.completed_calculation_result.get_calculate_col_names(), left_cols=left_cols ) result_obj = feature_calculation_param_pb2.FeatureCalculationParam( results=self.completed_calculation_result.filter_results, col_names=self.completed_calculation_result.get_sorted_col_names(), ) result_obj_list = [] result_obj_dic = {} result = json_format.MessageToJson(result_obj) result_obj_dic["role"] = self.role result_obj_dic["member_id"] = self.member_id result_obj_dic["results"] = json.loads(result)["results"] LOGGER.debug("json_result: {}".format(result_obj_dic)) result_obj_list.append(result_obj_dic) if self.role == consts.PROVIDER: print(VertFeatureCalculationTransferVariable().provider_calculate_results.remote(result_obj_dic, role=consts.PROMOTER, idx=0)) elif self.role == consts.PROMOTER: provider_result_obj_dics = VertFeatureCalculationTransferVariable().provider_calculate_results.get(idx=-1) for provider_result_obj in provider_result_obj_dics: result_obj_list.append(provider_result_obj) calculate_results_list = [] for result_obj in result_obj_list: role = result_obj["role"] member_id = str(result_obj["member_id"]) new_results = [] results = result_obj["results"] for result in results: filter_name = result["filterName"] feature_values = result["featureValues"] feature_values = dict(sorted(feature_values.items(), key=lambda e: e[1], reverse=True)) cols = [] values = [] for key in feature_values: cols.append(key) values.append(feature_values[key]) new_result = feature_calculation_param_pb2.FeatureCalculationValueResultParam( filter_name=filter_name, cols=cols, values=values, ) new_results.append(new_result) new_result_obj = feature_calculation_param_pb2.FeatureCalculationResultParam( role=role, member_id=member_id, results=new_results ) calculate_results_list.append(new_result_obj) results = feature_calculation_param_pb2.FeatureCalculationResultsParam( calculate_results=calculate_results_list ) return results def save_data(self): return self.data_output def export_model(self): LOGGER.debug("Model output is : {}".format(self.model_output)) if self.model_output is not None: LOGGER.debug("model output is already exist, return directly") return self.model_output meta_obj = self._get_meta() param_obj = self._get_param() result = { MODEL_META_NAME: meta_obj, MODEL_PARAM_NAME: param_obj } self.model_output = result return result def load_model(self, model_dict): if ModelType.TRAIN_MODEL in model_dict.get("model", {}): # self._parse_need_run(model_dict, MODEL_META_NAME) LOGGER.debug("Feature calculation need run: {}".format(self.need_run)) if not self.need_run: return model_param = list(model_dict.get('model').values())[0].get(MODEL_PARAM_NAME) model_meta = list(model_dict.get('model').values())[0].get(MODEL_META_NAME) self.model_output = { MODEL_META_NAME: model_meta, MODEL_PARAM_NAME: model_param } header = list(model_param.header) # self.schema = {'header': header} self.header = header self.curt_calculate_properties.set_header(header) self.completed_calculation_result.set_header(header) self.curt_calculate_properties.set_last_left_col_indexes([x for x in range(len(header))]) self.curt_calculate_properties.add_calculate_col_names(header) final_left_cols_names = dict(model_param.final_left_cols.left_cols) LOGGER.debug("final_left_cols_names: {}".format(final_left_cols_names)) for col_name, _ in final_left_cols_names.items(): self.curt_calculate_properties.add_left_col_name(col_name) self.completed_calculation_result.add_filter_results(filter_name='conclusion', calculate_properties=self.curt_calculate_properties) self.update_curt_calculate_param() LOGGER.debug("After load model, completed_calculation_result.all_left_col_indexes: {}".format( self.completed_calculation_result.all_left_col_indexes)) if ModelType.BINNING_MODEL in model_dict.get("model", {}): LOGGER.debug("Has binning_model, model_dict: {}".format(model_dict)) if self.role == consts.PROMOTER: self.binning_model = VertFeatureBinningPromoter() else: self.binning_model = VertFeatureBinningProvider() # binning = model_dict['model'][ModelType.BINNING_MODEL] # Model_Param = binning[0]['Model_Param'] # newProviderResults = [] # if 'providerResults' in Model_Param.keys(): # providerResults = Model_Param['providerResults'] # for providerResult in providerResults: # binningResult = providerResult['binningResult'] # if binningResult: # newProviderResults.append(providerResults) # Model_Param['providerResults'] = newProviderResults # binning[0]['Model_Param'] = Model_Param new_model_dict = {'model': model_dict['model'][ModelType.BINNING_MODEL]} LOGGER.debug(f'model={new_model_dict}') self.binning_model.load_model(new_model_dict) @staticmethod def calculate_cols(instance, left_col_idx): instance.features = instance.features[left_col_idx] return instance def _transfer_data(self, data_instances): before_one_data = data_instances.first() f = functools.partial(self.calculate_cols, left_col_idx=self.completed_calculation_result.all_left_col_indexes) new_data = data_instances.mapValues(f) LOGGER.debug("When transfering, all left_col_names: {}".format( self.completed_calculation_result.all_left_col_names )) new_data = self.set_schema(new_data, self.completed_calculation_result.all_left_col_names) one_data = new_data.first()[1] LOGGER.debug( "In feature calculation transform, Before transform: {}, length: {} After transform: {}, length: {}".format( before_one_data[1].features, len(before_one_data[1].features), one_data.features, len(one_data.features))) return new_data def _abnormal_detection(self, data_instances): """ Make sure input data_instances is valid. """ abnormal_detection.empty_table_detection(data_instances) abnormal_detection.empty_feature_detection(data_instances) def set_schema(self, data_instance, header=None): if header is None: self.schema["header"] = self.curt_calculate_properties.header else: self.schema["header"] = header data_instance.schema = self.schema return data_instance def update_curt_calculate_param(self): new_calculate_properties = CalculationProperties() new_calculate_properties.set_header(self.curt_calculate_properties.header) new_calculate_properties.set_last_left_col_indexes(self.curt_calculate_properties.all_left_col_indexes) new_calculate_properties.add_calculate_col_names(self.curt_calculate_properties.left_col_names) LOGGER.debug("In update_curt_calculate_param, header: {}, cols_map: {}," "last_left_col_indexes: {}, calculate_col_names: {}".format( new_calculate_properties.header, new_calculate_properties.col_name_maps, new_calculate_properties.last_left_col_indexes, new_calculate_properties.calculate_col_names )) self.curt_calculate_properties = new_calculate_properties def _filter(self, data_instances, method, suffix): this_filter = filter_factory.get_filter(filter_name=method, model_param=self.model_param, role=self.role) this_filter.set_calculation_properties(self.curt_calculate_properties) this_filter.set_statics_obj(self.static_obj) this_filter.set_binning_obj(self.binning_model) this_filter.set_transfer_variable(self.transfer_variable) self.curt_calculate_properties = this_filter.fit(data_instances, suffix).calculation_properties provider_calculate_properties = getattr(this_filter, 'provider_calculation_properties', None) LOGGER.debug("method: {}, provider_calculate_properties: {}".format( method, provider_calculate_properties)) self.completed_calculation_result.add_filter_results(filter_name=method, calculate_properties=self.curt_calculate_properties, provider_calculate_properties=provider_calculate_properties) LOGGER.debug("method: {}, calculation_cols: {}, left_cols: {}".format( method, self.curt_calculate_properties.calculate_col_names, self.curt_calculate_properties.left_col_names)) self.update_curt_calculate_param() LOGGER.debug("After updated, method: {}, calculation_cols: {}, left_cols: {}".format( method, self.curt_calculate_properties.calculate_col_names, self.curt_calculate_properties.left_col_names)) self.meta_dicts = this_filter.get_meta_obj(self.meta_dicts) def fit(self, data_instances): LOGGER.info("Start Vert Calculation Fit and transform.") self._abnormal_detection(data_instances) self._init_calculate_params(data_instances) if len(self.curt_calculate_properties.calculate_col_indexes) == 0: LOGGER.warning("None of columns has been set to calculat") else: for filter_idx, method in enumerate(self.filter_methods): self._filter(data_instances, method, suffix=str(filter_idx)) new_data = self._transfer_data(data_instances) LOGGER.info("Finish Vert Calculation Fit and transform.") return new_data @assert_io_num_rows_equal def transform(self, data_instances): self._abnormal_detection(data_instances) self._init_calculate_params(data_instances) new_data = self._transfer_data(data_instances) return new_data
1.429688
1
apps/resizer/migrations/0003_auto_20200807_0453.py
atseplyaev/django_image_resizer
0
12784002
# Generated by Django 3.1 on 2020-08-07 04:53 from django.db import migrations, models def set_images_names(apps, schema_editor): UploadImage = apps.get_model('resizer', 'UploadImage') for image in UploadImage.objects.all(): image_name = image.original_image.name.split('/')[-1] image.image_name = image_name image.save() class Migration(migrations.Migration): dependencies = [ ('resizer', '0002_auto_20200807_0444'), ] operations = [ migrations.AddField( model_name='uploadimage', name='image_name', field=models.CharField(default='', max_length=128), ), migrations.RunPython(set_images_names), ]
2.015625
2
migrations/versions/fa4f694e986a_created_comments.py
Amukozoberit/Pitches
1
12784003
"""Created comments Revision ID: fa4f694e986a Revises: <KEY> Create Date: 2021-08-16 21:48:43.079233 """ # revision identifiers, used by Alembic. revision = 'fa4f694e986a' down_revision = '<KEY>' from alembic import op import sqlalchemy as sa def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('pitches', sa.Column('time', sa.DateTime(), nullable=True)) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('pitches', 'time') # ### end Alembic commands ###
1.257813
1
statsmodels/multivariate/api.py
nikhase/statsmodels
34
12784004
<reponame>nikhase/statsmodels from .pca import PCA from .manova import MANOVA from .factor import Factor, FactorResults from .cancorr import CanCorr from . import factor_rotation
0.910156
1
users/views.py
mr-star-k/Feedly
1
12784005
<reponame>mr-star-k/Feedly<filename>users/views.py from django.http import HttpResponse,JsonResponse from django.shortcuts import render, redirect,reverse from django.contrib.auth import login, authenticate,logout from django.contrib.auth.decorators import login_required from django.utils.decorators import method_decorator from .forms import * from django.contrib.sites.shortcuts import get_current_site from django.utils.encoding import force_bytes, force_text from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode from django.template.loader import render_to_string from .tokens import account_activation_token from django.contrib.auth.models import User from django.contrib import messages from django.core.mail import send_mail from feedly.settings.base import EMAIL_HOST_USER from django.views import View from django.views.generic import DetailView from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger from .models import * from django.db.models import Q import json import urllib from django.conf import settings from .search_engine import Search class LandingView(View): def get(self, request, *args, **kwargs): if request.user.is_authenticated: return redirect('home') else: return render(request,'landing.html',{'form':Login_Form()}) class HomeView(View): """its the homepage of the site where posts are displayed """ @method_decorator(login_required) def get(self, request, *args, **kwargs): user = request.user is_voted = Vote.objects.filter(voter=user,status = True) post_voted_list = list() form = CommentForm() for votes in is_voted: post_voted =Post.objects.get(vote=votes) post_voted_list.append(post_voted) context={ # 'user':request.user, # 'searchform':SearchForm, 'post_voted_list': post_voted_list, 'object_list': Post.objects.order_by('-post_on'), 'com_form': form, 'comments':Comment.objects.all().order_by('comment_on') } return render(request, 'home.html', context) class SortedView(View): """ this view helps in sorting the displayed posts according to the choice made.""" @method_decorator(login_required) def get(self, request,rec, *args, **kwargs): if rec =='3': queryset=Post.objects.order_by('-post_on') if rec =='2': queryset = Post.objects.order_by('?') if rec =='1': queryset = Post.objects.order_by('-result') context={ 'object_list': queryset, } return render(request, 'home.html', context) class SignUpView(View): """It is used to fill and validate and create a new user whenever a new signup form is filled""" form = SignupForm() def post(self, request, *args, **kwargs): form = SignupForm(request.POST) if form.is_valid(): ''' Begin reCAPTCHA validation ''' recaptcha_response = request.POST.get('g-recaptcha-response') url = 'https://www.google.com/recaptcha/api/siteverify' values = { 'secret': settings.GOOGLE_RECAPTCHA_SECRET_KEY, 'response': recaptcha_response } data = urllib.parse.urlencode(values).encode() req = urllib.request.Request(url, data=data) response = urllib.request.urlopen(req) result = json.loads(response.read().decode()) """ end of reCAPTCHA validation""" if result['success']: user = form.save(commit=False) user.is_active = False user.save() current_site = get_current_site(request) subject = 'Activate Your Feedly Account' message = render_to_string('acc_active_email.html', { 'user': user, 'domain': current_site.domain, 'uid': urlsafe_base64_encode(force_bytes(user.pk)).decode, 'token': account_activation_token.make_token(user), }) from_mail = EMAIL_HOST_USER to_mail = [user.email] send_mail(subject, message, from_mail, to_mail, fail_silently=False) messages.success(request, 'Please!Confirm your email to complete registration.') return redirect('signup') else: messages.error(request, 'Invalid reCAPTCHA. Please try again.') return redirect('signup') else: return render(request, 'signup.html', {'form': form}) def get(self,request, *args, **kwargs): if request.user.is_authenticated: return redirect('home') else: form = SignupForm() return render(request, 'signup.html', {'form': form}) #account activation function class Activate(View): def get(self, request, uidb64, token): try: uid = force_text(urlsafe_base64_decode(uidb64)) user = User.objects.get(pk=uid) except(TypeError, ValueError, OverflowError, User.DoesNotExist): user = None if user is not None and account_activation_token.check_token(user, token): user.is_active = True user.save() login(request, user, backend='django.contrib.auth.backends.ModelBackend') # messages.success(request, 'thank you! for email verification') return redirect('edit_profile',user.id) else: return HttpResponse("invalid linkh") class EditProfileView(View): @method_decorator(login_required) def post(self, request,user_id ,*args, **kwargs): form = Edit_Profile_Form(request.POST, request.FILES, instance=request.user.myprofile) if form.is_valid(): form.save() messages.success(request, 'Profile updated successfully!!') return redirect('edit_profile', user_id) else: return HttpResponse("hello") @method_decorator(login_required) def get(self, request, *args, **kwargs): form = Edit_Profile_Form(instance=request.user.myprofile) return render(request, 'edit_profile.html', {'form': form}) class LoginView(View): """This view logins the user and raises validation error in case of wrong credentials """ def post(self, request,*args, **kwargs): username = request.POST['username'] password = request.POST['password'] user = authenticate(username=username, password=password) if user is not None: if user.is_active: login(request, user) return redirect('home') else: messages.error('please! verify your Email first') return redirect('landing') else: messages.error(request, 'username or password not correct') return redirect('landing') def get(self, request, *args, **kwagrs): if request.user.is_authenticated: return redirect('home') else: form = Login_Form() return render(request, 'landing.html', {'form': form}) class LogoutView(View): """this view simply logs out user and thereby deleting the session.""" def get(self, request,*args, **kwargs): logout(request) return redirect('landing') class ProfileView(View): """profile view is used to view and edit profile of yours""" @method_decorator(login_required) def get(self, request, user_id,*args, **kwargs): user = User.objects.get(pk=user_id) posts = Post.objects.filter(post_by=user).order_by('-post_on') userlogin = self.request.user is_voted = Vote.objects.filter(voter=userlogin, status=True) post_voted_list = list() form = CommentForm() for votes in is_voted: post_voted = Post.objects.get(vote=votes) post_voted_list.append(post_voted) context={ 'user': user, 'posts': posts, 'comments':Comment.objects.all().order_by('-comment_on'), 'post_voted_list': post_voted_list, 'com_form': form, } return render(request, 'profile.html', context) class DeleteAccount(View): """this deletes the account of the user""" @method_decorator(login_required) def get(self, request, *args, **kwargs): return render(request, 'delete_acc.html') def post(self, request, *args, **kwargs): choice = request.POST['des'] if choice == 'accept': user = request.user user.delete() logout(request) return redirect('landing') if choice == 'reject': current_user = request.user return redirect('profile', current_user.id) class CreatePostView(View): """this generates the new post for the user(ex : image,text,video)""" @method_decorator(login_required) def get(self, request,user_id,ch, *args, **kwagrs): if ch == 'image': form = Create_Imgpost_Form(request.POST or None, request.FILES or None) if ch == 'text': form = Create_Textpost_Form(request.POST or None) if ch == 'video': form = Create_Videopost_Form(request.POST or None, request.FILES or None) return render(request, 'createpost.html',{'form':form}) @method_decorator(login_required) def post(self,request,user_id,ch,*args,**kwrgs): if ch == 'image': form = Create_Imgpost_Form(request.POST or None, request.FILES or None) if ch == 'text': form = Create_Textpost_Form(request.POST or None) if ch == 'video': form = Create_Videopost_Form(request.POST or None, request.FILES or None) f = form.save(commit=False) f.post_by = self.request.user if form.is_valid(): form.save() return redirect('home') else: return render(request, 'createpost.html', {'form': form,}) class VoteView(View): """its helps invoting up and down to a post works similar to like and dislike""" @method_decorator(login_required) def get(self,request,*args,**kwargs): post = request.GET['postid'] print(post) user = self.request.user item = Post.objects.get(pk=post) prev_votes = Vote.objects.filter(Q(voter=user)& Q(post_id = post)) has_voted = (prev_votes.count()>0) if not has_voted: Vote.objects.create(voter=user, post_id=post, status=True) item.result = item.result +1 voted = True item.save() else: item.result = item.result - 1 voted = False item.save() prev_votes[0].delete() result = Vote.objects.filter(post_id = post).count() print(result) print(voted) data={ 'result': result, 'voted': voted, } return JsonResponse(data) class CommentView(View): """handles the comment on the posts""" @method_decorator(login_required) def post(self,request,postid,*args,**kwargs): form = CommentForm(request.POST or None) print ("in view comment") print (postid) f = form.save(commit=False) f.comment_by = self.request.user f.post_id = postid if form.is_valid(): form.save() # comm = Comment.objects.all().order_by('comment_on') # comments = list(comm.values()) data = { 'comment': f.content, 'comment_by': f.comment_by.username, 'comment_on': f.comment_on.strftime("%b. %d, %Y,%I:%M %p"), } return JsonResponse(data) else: return HttpResponse("form Invalid") class SearchView(View): """helps in searching friends and groups """ @method_decorator(login_required) def get(self,request,*args,**kwargs): search = request.GET.get('search') if search not in ('',' '): search_words = search.strip().split(' ') quer = MyProfile.objects query = Search(search_words,quer,0,user=request.user) friend = FriendList.objects.get(user=request.user) requests = FriendRequest.objects.get(user=request.user) sentreq = FriendRequestSent.objects.get(user=request.user) friendlist = list(friend.friends.all()) requestlist = list(requests.friend_request.all()) sentrequestlist = list(sentreq.request_sent.all()) if query: context={ 'results': query, 'friendlist': friendlist, 'requestlist': requestlist, 'sentrequest':sentrequestlist, } return render(request,'search_result.html',context) else: context = { 'results': query, 'friendlist': friendlist, 'requestlist': requestlist, 'sentrequest': sentrequestlist, } return render(request,'search_result.html',context) else: return redirect(request.META['HTTP_REFERER']) class SendCancelRequestView(View): """it is used to send and cancel requests """ @method_decorator(login_required) def get(self,request,user_id,*args,**kwargs): # user = user_id try: req= FriendRequestSent.objects.get(user=self.request.user,request_sent=user_id) except FriendRequestSent.DoesNotExist: req = None if req is None: sender = FriendRequestSent.objects.get(user=self.request.user) sender.request_sent.add(user_id) receiver = FriendRequest.objects.get(user=user_id) receiver.friend_request.add(sender.user) data={ 'status':'sent', } return JsonResponse(data) else: req.request_sent.remove(user_id) receiver = FriendRequest.objects.get(user=user_id) receiver.friend_request.remove(req.user) data={ 'status':'cancel' } return JsonResponse(data) class FriendRequestView(View): """this is used to view friend requests recieved""" @method_decorator(login_required) def get(self,request,*args,**kwargs): friend_request_list = FriendRequest.objects.get(user=self.request.user) request_list = list(friend_request_list.friend_request.all()) context = { 'friend_request_list':request_list, } return render(request,'friend_request.html',context) class AcceptDeclineRequestView(View): @method_decorator(login_required) def get(self,request,*args,**kwargs): sender = self.kwargs['user_id'] status = self.kwargs['status'] if status == 'approve': req = FriendRequest.objects.get(user=self.request.user) req.friend_request.remove(sender) sent = FriendRequestSent.objects.get(user=sender) sent.request_sent.remove(self.request.user) friendlist = FriendList.objects.get(user=self.request.user) friendlist.friends.add(sender) senderfrndlist = FriendList.objects.get(user=sender) senderfrndlist.friends.add(self.request.user) data={ 'status':'approved', 'user': sender, } return JsonResponse(data) elif status == 'decline': req = FriendRequest.objects.get(user=self.request.user) req.friend_request.remove(sender) sent = FriendRequestSent.objects.get(user=sender) sent.request_sent.remove(self.request.user) data = { 'status': 'declined', 'user': sender } return JsonResponse(data) class SentRequestView(View): """View for sending request to any of the user""" @method_decorator(login_required) def get(self,request,*args,**kwargs): sender = FriendRequestSent.objects.get(user=self.request.user) sent_request = sender.request_sent.all() context={ 'sent_request':sent_request, } return render(request,'sent_request.html',context) class FriendListView(View): @method_decorator(login_required) def get(self, request): user = self.request.user your_list = FriendList.objects.get(user=user) friend_list = your_list.friends.all() context = { 'friend_list': friend_list, } return render(request, 'friendlist.html', context)
2.328125
2
tests/test_account_key.py
nichandy/flow-py-sdk
21
12784006
from unittest import TestCase from flow_py_sdk import AccountKey, SignAlgo, HashAlgo from flow_py_sdk.proto.flow.entities import AccountKey as ProtoAccountKey class TestAccountKey(TestCase): def test_rlp(self): expected_rlp_hex = "f847b840c51c02aa382d8d382a121178de8ac97eb6a562a1008660669ab6a220c96fce76e1d392b0c156380ae713b0aa18ad9cff7b85bcc44a9eb43fcddb467f456f0ec803038203e8" key = AccountKey( public_key=bytes.fromhex( "<KEY>" ), sign_algo=SignAlgo.ECDSA_secp256k1, hash_algo=HashAlgo.SHA3_256, weight=AccountKey.weight_threshold, ) rlp = key.rlp() self.assertEqual(expected_rlp_hex, rlp.hex()) def test_hex(self): expected_rlp_hex = "f847b840c51c02aa382d8d382a121178de8ac97eb6a562a1008660669ab6a220c96fce76e1d392b0c156380ae713b0aa18ad9cff7b85bcc44a9eb43fcddb467f456f0ec803038203e8" key = AccountKey( public_key=bytes.fromhex( "<KEY>" ), sign_algo=SignAlgo.ECDSA_secp256k1, hash_algo=HashAlgo.SHA3_256, weight=AccountKey.weight_threshold, ) rlp_hex = key.hex() self.assertEqual(expected_rlp_hex, rlp_hex) def test_from_proto(self): proto_account_key = ProtoAccountKey() proto_account_key.sign_algo = 2 proto_account_key.hash_algo = 1 AccountKey.from_proto(proto_account_key)
2.6875
3
archieve/2021/day07b.py
jabra98/aoc
0
12784007
import sys; datafilepath = sys.argv[1] pos = [ int(x) for x in open(datafilepath).read().split(',') ] avg = sum(pos)/len(pos) # should ideally check floor(mean)-1 and floor(mean)+1 s_low=0 s_high=0 print(avg) for i in pos: dif = abs(int(avg)-i) s_low += dif*(dif+1)/2 for i in pos: dif = abs(int(avg+1)-i) s_high += dif*(dif+1)/2 print(int(min(s_low, s_high)))
3.234375
3
PyRamen/PyRamen.py
nomadic-me/python-homework
0
12784008
# -*- coding: UTF-8 -*- """PyRamen Homework Starter.""" # @TODO: Import libraries import csv from pathlib import Path # @TODO: Set file paths for menu_data.csv and sales_data.csv menu_filepath = Path('') sales_filepath = Path('') # @TODO: Initialize list objects to hold our menu and sales data menu = [] sales = [] # @TODO: Read in the menu data into the menu list # @TODO: Read in the sales data into the sales list # @TODO: Initialize dict object to hold our key-value pairs of items and metrics report = {} # Initialize a row counter variable row_count = 0 # @TODO: Loop over every row in the sales list object # Line_Item_ID,Date,Credit_Card_Number,Quantity,Menu_Item # @TODO: Initialize sales data variables # @TODO: # If the item value not in the report, add it as a new entry with initialized metrics # Naming convention allows the keys to be ordered in logical fashion, count, revenue, cost, profit
3.015625
3
macro/render/page/search.py
cloudmattcloud/Macro-Explain-o-Matic
2
12784009
''' Collection of shared tools for appengine page rendering. ''' # My modules from macro.render.defs import * from macro.render.util import render_template from macro.data.appengine.savedmacro import SavedMacroOps # Generate a search results page. def generate_search_page(path, terms, page, sort, page_size=DEF_SEARCH_RESULTS): ''' Generate a search results page. ''' error = None # Make sure page is a number, failing on bad input prev_page = None if not page: page = 1 else: page = int(page) prev_page = page - 1 # Do the search # TODO: Add column sort results = [] is_next_page = False if (len(terms) < SINGLE_TAG_MAX_LENGTH): (results, is_next_page) = SavedMacroOps.search(terms, page=page, num=page_size, sort=sort) else: error = "Query term too long." terms = terms[:SINGLE_TAG_MAX_LENGTH] + "..." # If the number of results is less than that of page_size, # then there is no next page. next_page = None if is_next_page: next_page = page + 1 # If there are no results, add an error. if not error and len(results) == 0: error = "No results found." # TODO: Hook up template controls to sort results. # TODO: Hook up template controls to page forward/back. # Return generated search page. return render_template('base.template', {'query' : terms, 'content': render_template('search.template', {'search_error' : error, 'curr_version' : "%s.%s.%s" % (MAJOR_VERSION, MINOR_VERSION, PATCH_VERSION), 'query' : terms, 'q_esc' : FORM_QUERY_ESC, 'results' : results, 'sort' : sort, 'page_var' : FORM_SEARCH_PAGE, # Only give a prev page if we're over page 1. 'prev_page' : prev_page, 'page' : page, 'next_page' : next_page, }, path)}, path)
2.109375
2
beams/ApogeeCam.py
Yaccoub/Beams
11
12784010
import numpy as N import win32com.client # generate and import apogee ActiveX module apogee_module = win32com.client.gencache.EnsureModule( '{A2882C73-7CFB-11D4-9155-0060676644C1}', 0, 1, 0) if apogee_module is None: raise ImportError # prevent plugin from being imported from win32com.client import constants as Constants from traits.api import Str, Int, Enum, Float, Bool from traitsui.api import View, Item from Camera import Camera, CameraError class ApogeeCam(Camera): '''Apogee Alta or Ascent camera''' plugin_info = { 'name': 'Apogee', 'description': 'Apogee Alta or Ascent camera', 'author': '<NAME>', 'copyright year': '2011', } camera_num2 = Int(0) camera_model = Str() driver_version = Str() interface = Enum('usb', 'net') expose_time = Float(0.05) open_shutter = Bool(True) view = View( Item('interface'), Item('camera_number'), Item('camera_num2'), Item('expose_time'), Item('open_shutter')) def __init__(self, **traits): super(ApogeeCam, self).__init__(camera_number=0, **traits) self._cam = win32com.client.Dispatch('Apogee.Camera2') self._interface_constants = { 'usb': Constants.Apn_Interface_USB, 'net': Constants.Apn_Interface_NET} self._reverse_constants = dict((v, k) for k, v in self._interface_constants.iteritems()) self._buffer = None def open(self): self._cam.Init(self._interface_constants[self.interface], self.camera_number, self.camera_num2, 0) self._buffer = N.zeros(self.roi[-1:-3:-1], dtype=N.uint16) def close(self): self._cam.Close() def query_frame(self, expose_time=None, open_shutter=None): """ Start an exposure and wait for it to finish. Pass @expose_time or @open_shutter to override the camera object's default parameters. """ if expose_time is None: expose_time = self.expose_time if open_shutter is None: open_shutter = self.open_shutter try: self._cam.Expose(expose_time, open_shutter) while self._cam.ImagingStatus != Constants.Apn_Status_ImageReady: pass self._cam.GetImage(self._buffer.ctypes.data) finally: if self._cam.ImagingStatus < 0: self.reset() self.frame = N.copy(self._buffer) def choose_camera(self): discover = win32com.client.Dispatch('Apogee.CamDiscover') discover.DlgCheckUsb = True discover.ShowDialog(True) if not discover.ValidSelection: raise ValueError('No camera selected') self.interface = self._reverse_constants[discover.SelectedInterface] self.camera_number = discover.SelectedCamIdOne self.camera_num2 = discover.SelectedCamIdTwo def reset(self): self._cam.ResetState() # if error status persists, raise an exception if self._cam.ImagingStatus < 0: raise CameraError('Error not cleared by reset', self.camera_number) def _resolution_default(self): return self._cam.ImagingColumns, self._cam.ImagingRows def _camera_model_default(self): return self._cam.CameraModel def _driver_version_default(self): return self._cam.DriverVersion def _id_string_default(self): return 'Apogee {} Driver version: {}'.format( self.camera_model, self.driver_version) def _roi_default(self): return (self._cam.RoiStartX, self._cam.RoiStartY, self._cam.RoiPixelsH, self._cam.RoiPixelsV) def _roi_changed(self, value): x, y, w, h = value self._cam.RoiStartX = x self._cam.RoiStartY = y self._cam.RoiPixelsH = w self._cam.RoiPixelsV = h self._buffer = N.zeros((h, w), dtype=N.uint16)
2.15625
2
config.py
alexbredo/site-packages
0
12784011
<reponame>alexbredo/site-packages # Copyright (c) 2014 <NAME> # All rights reserved. # # Redistribution and use in source and binary forms, with or # without modification, are permitted provided that the # following conditions are met: # # 1. Redistributions of source code must retain the above # copyright notice, this list of conditions and the following # disclaimer. # # 2. Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials # provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE # GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT # OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import os import sys import ConfigParser from bredo.logger import * class Config(object): def __init__(self, fileprefix): self.log = Logger(fileprefix) try: self.config = ConfigParser.RawConfigParser() filepath = os.path.join(os.path.dirname(sys.argv[0]), fileprefix + '.cfg') self.config.read(filepath) except Exception: self.log.error('Configfile not found. File requested: ' + filepath) def getSetting(self, category, item): try: return self.config.get(category, item) except (ConfigParser.NoSectionError, ConfigParser.NoOptionError): self.log.error('Configfile not found or invalid settings. You requested ' + category + '::' + item + '.')
1.390625
1
classifier/upload_and_download_files.py
Creearc/Codenrock-New-Year-ML-Battle
1
12784012
<reponame>Creearc/Codenrock-New-Year-ML-Battle import os from flask import Flask, request, redirect, url_for from werkzeug.utils import secure_filename os.environ['CUDA_VISIBLE_DEVICES'] = '-1' import tensorflow as tf assert float(tf.__version__[:3]) >= 2.3 import tensorflow.lite as tflite import os import numpy as np import cv2 from PIL import Image def load_labels(label_path): r"""Returns a list of labels""" with open(label_path, 'r') as f: return [line.strip() for line in f.readlines()] def load_model(model_path): r"""Load TFLite model, returns a Interpreter instance.""" interpreter = tflite.Interpreter(model_path=model_path) interpreter.allocate_tensors() return interpreter def process_image(interpreter, image, input_index, k=3): r"""Process an image, Return top K result in a list of 2-Tuple(confidence_score, label)""" input_data = np.expand_dims(image, axis=0) # expand to 4-dim # Process interpreter.set_tensor(input_index, input_data) interpreter.invoke() # Get outputs output_details = interpreter.get_output_details() output_data = interpreter.get_tensor(output_details[0]['index']) output_data = np.squeeze(output_data) # Get top K result top_k = output_data.argsort()[-k:][::-1] # Top_k index result = [] ind, mx = 0, 0 for i in top_k: score = float(output_data[i] / 255.0) result.append((i, score)) if score > mx: mx = score ind = i print('Result: {}'.format(labels[ind])) return result def display_result(top_result, frame, labels): r"""Display top K result in top right corner""" font = cv2.FONT_HERSHEY_SIMPLEX size = 0.6 color = (0, 0, 0) # Blue color thickness = 4 h, w = frame.shape[:2] k = 640 / w frame = cv2.resize(frame, (int(w * k), int(h * k))) for idx, (i, score) in enumerate(top_result): # print('{} - {:0.4f}'.format(label, score)) x = 12 y = 24 * idx + 24 cv2.putText(frame, '{} - {:0.4f}'.format(labels[i], score), (x, y), font, size, color, thickness) cv2.putText(frame, '{} - {:0.4f}'.format(labels[i], score), (x, y), font, size, (255, 255, 255), thickness-2) return frame def detect_image(input_img_path, output_img_path): global interpreter, labels, input_index, height, width img = cv2.imread(input_img_path, cv2.IMREAD_COLOR) img_n = Image.fromarray(cv2.cvtColor(img, cv2.COLOR_BGR2RGB)) img_n = img_n.resize((width, height)) top_result = process_image(interpreter, img_n, input_index) img = display_result(top_result, img.copy(), labels) cv2.imwrite(output_img_path, img) ALLOWED_EXTENSIONS = set(['jpg', 'png']) path = 'files' # path for files to save app = Flask(__name__) app.config['UPLOAD_FOLDER'] = path def allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS @app.route('/', methods=['GET', 'POST']) def upload_file(): if request.method == 'POST': file = request.files['file'] if file and allowed_file(file.filename): filename = secure_filename(file.filename) saved_file = os.path.join(app.config['UPLOAD_FOLDER'], filename) file.save(saved_file) r = filename.split('.') print(r) image_path = os.path.join(app.config['UPLOAD_FOLDER'], '{}_n.{}'.format(r[0], r[1])) detect_image(saved_file, image_path) out = image_path.split('/')[-1] return redirect(url_for('uploaded_file', filename=out)) return ''' <!doctype html> <title>upload_and_download_files</title> <h1>Загрузите файл</h1> <h3>Принимаются изображения в форматах jpg или png</h3> <form action="" method=post enctype=multipart/form-data> <p><input type=file name=file> <input type=submit value=Загрузить> </form> ''' from flask import send_from_directory @app.route('/uploads/<filename>') def uploaded_file(filename): return send_from_directory(app.config['UPLOAD_FOLDER'], filename) if __name__ == "__main__": interpreter = load_model('1_q.tflite') labels = load_labels('frost_labels.txt') input_details = interpreter.get_input_details() input_shape = input_details[0]['shape'] height = input_shape[1] width = input_shape[2] input_index = input_details[0]['index'] print(input_shape) app.run(host='0.0.0.0', port=8000, debug=not True)
2.28125
2
python_basics/Module & packages/basic/mymodule.py
alok8765/basic_python_practicse
0
12784013
def my_func(): print("Hey i am in mymodule.py")
1.53125
2
code/solutions/solution_05.py
DahlitzFlorian/python-basic-training
3
12784014
# This is a possible solution for exercise_05.py def fibonacci(number): i = 1 if number == 0: fib = [] elif number == 1: fib = [1] elif number == 2: fib = [1, 1] elif number > 2: fib = [1, 1] while i < (number - 1): fib.append(fib[i] + fib[i - 1]) i += 1 return fib number = int(input("How many numbers should I generate for you: ")) print(fibonacci(number))
4.1875
4
app/test/airport/test_airport_parser.py
egenerat/gae-django
3
12784015
# -*- coding: utf-8 -*- from app.airport.airports_parsers import get_country, get_money, get_kerosene_supply, get_kerosene_capacity, \ get_engines_supply, get_planes_capacity, get_airport_name from app.common.http_methods_unittests import get_request from app.common.target_urls import MY_AIRPORT import unittest class TestAirportParser(unittest.TestCase): @classmethod def setUpClass(cls): cls.__html_page = get_request(MY_AIRPORT) def test_country(self): country = get_country(self.__html_page) self.assertEqual(u'Égypte', country) def test_money(self): country = get_money(self.__html_page) self.assertEqual(2444908, country) def test_kerosene_supply(self): country = get_kerosene_supply(self.__html_page) self.assertEqual(2009391, country) def test_kerosene_capacity(self): country = get_kerosene_capacity(self.__html_page) self.assertEqual(2500000, country) def test_engines_supply(self): engines_supply = get_engines_supply(self.__html_page) self.assertEqual(1000, engines_supply['5']) self.assertEqual(2, engines_supply['6']) def test_planes_capacity(self): country = get_planes_capacity(self.__html_page) self.assertEqual(9, country) def test_airport_name(self): country = get_airport_name(self.__html_page) self.assertEqual(u'Roissy aéroport', country) if __name__ == '__main__': unittest.main()
2.734375
3
setup.py
yuankaihuo/MedLabel
10
12784016
<gh_stars>1-10 from __future__ import print_function import distutils.spawn import os.path from setuptools import find_packages from setuptools import setup import shlex import subprocess import sys PY3 = sys.version_info[0] == 3 PY2 = sys.version_info[0] == 2 assert PY3 or PY2 here = os.path.abspath(os.path.dirname(__file__)) version_file = os.path.join(here, 'EasierPath', '_version.py') if PY3: import importlib version = importlib.machinery.SourceFileLoader( '_version', version_file ).load_module().__version__ else: assert PY2 import imp version = imp.load_source('_version', version_file).__version__ del here install_requires = [ 'matplotlib', 'numpy', 'Pillow>=2.8.0', 'PyYAML', 'qtpy', 'termcolor', ] # Find python binding for qt with priority: # PyQt5 -> PySide2 -> PyQt4, # and PyQt5 is automatically installed on Python3. QT_BINDING = None try: import PyQt5 # NOQA QT_BINDING = 'pyqt5' except ImportError: pass if QT_BINDING is None: try: import PySide2 # NOQA QT_BINDING = 'pyside2' except ImportError: pass if QT_BINDING is None: try: import PyQt4 # NOQA QT_BINDING = 'pyqt4' except ImportError: if PY2: print( 'Please install PyQt5, PySide2 or PyQt4 for Python2.\n' 'Note that PyQt5 can be installed via pip for Python3.', file=sys.stderr, ) sys.exit(1) assert PY3 # PyQt5 can be installed via pip for Python3 install_requires.append('PyQt5') QT_BINDING = 'pyqt5' del QT_BINDING if sys.argv[1] == 'release': if not distutils.spawn.find_executable('twine'): print( 'Please install twine:\n\n\tpip install twine\n', file=sys.stderr, ) sys.exit(1) commands = [ 'python tests/docs_tests/man_tests/test_labelme_1.py', 'git tag v{:s}'.format(version), 'git push origin master --tag', 'python setup.py sdist', 'twine upload dist/labelme-{:s}.tar.gz'.format(version), ] for cmd in commands: subprocess.check_call(shlex.split(cmd)) sys.exit(0) def get_long_description(): with open('README.md') as f: long_description = f.read() try: import github2pypi return github2pypi.replace_url( slug='yuankaihuo/EasierPath', content=long_description ) except Exception: return long_description setup( name='EasierPath_v1.0.1', version=version, packages=find_packages(), description='Renal Pathology Annotation with Python', long_description=get_long_description(), long_description_content_type='text/markdown', author='<NAME>', author_email='<EMAIL>', url='https://github.com/yuankaihuo/EasierPath', install_requires=install_requires, license='GPLv3', keywords='Image Annotation, Pathology', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Natural Language :: English', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ], package_data={'EasierPath_v1.0.1': ['icons/*', 'config/*.yaml']}, entry_points={ 'console_scripts': [ 'labelme=labelme.main:main', 'labelme_draw_json=labelme.cli.draw_json:main', 'labelme_draw_label_png=labelme.cli.draw_label_png:main', 'labelme_json_to_dataset=labelme.cli.json_to_dataset:main', 'labelme_on_docker=labelme.cli.on_docker:main', ], }, data_files=[('share/man/man1', ['docs/man/labelme.1'])], )
1.882813
2
deps_report/models/runtime_informations.py
MeilleursAgents/deps-report
0
12784017
<filename>deps_report/models/runtime_informations.py from dataclasses import dataclass from datetime import date @dataclass class RuntimeInformations: name: str current_version: str latest_version: str current_version_is_outdated: bool current_version_eol_date: date current_version_is_eol_soon: bool
1.703125
2
contrib/experimental/great_expectations_experimental/expectations/expect_column_values_to_be_valid_iana_timezone.py
arunnthevapalan/great_expectations
1
12784018
<filename>contrib/experimental/great_expectations_experimental/expectations/expect_column_values_to_be_valid_iana_timezone.py<gh_stars>1-10 import json from typing import Optional import pytz from great_expectations.core.expectation_configuration import ExpectationConfiguration from great_expectations.exceptions import InvalidExpectationConfigurationError from great_expectations.execution_engine import ( PandasExecutionEngine, SparkDFExecutionEngine, SqlAlchemyExecutionEngine, ) from great_expectations.expectations.expectation import ColumnMapExpectation from great_expectations.expectations.metrics import ( ColumnMapMetricProvider, column_condition_partial, ) from great_expectations.expectations.metrics.import_manager import F, sparktypes def is_valid_timezone(timezone: str) -> bool: try: pytz.timezone(timezone) return True except pytz.UnknownTimeZoneError: return False # This class defines a Metric to support your Expectation. # For most ColumnMapExpectations, the main business logic for calculation will live in this class. class ColumnValuesIanaTimezone(ColumnMapMetricProvider): # This is the id string that will be used to reference your metric. condition_metric_name = "column_values.iana_timezone" # This method implements the core logic for the PandasExecutionEngine @column_condition_partial(engine=PandasExecutionEngine) def _pandas(cls, column, **kwargs): return column.apply(lambda x: is_valid_timezone(x)) # This method defines the business logic for evaluating your metric when using a SqlAlchemyExecutionEngine # @column_condition_partial(engine=SqlAlchemyExecutionEngine) # def _sqlalchemy(cls, column, _dialect, **kwargs): # raise NotImplementedError # This method defines the business logic for evaluating your metric when using a SparkDFExecutionEngine @column_condition_partial(engine=SparkDFExecutionEngine) def _spark(cls, column, **kwargs): tz_udf = F.udf(is_valid_timezone, sparktypes.BooleanType()) return tz_udf(column) # This class defines the Expectation itself class ExpectColumnValuesToBeValidIanaTimezone(ColumnMapExpectation): """Expect values in this column to be valid IANA timezone strings. A full list of valid timezones can be viewed by `pytz.all_timezones`. See https://www.iana.org/time-zones for more information. """ # These examples will be shown in the public gallery. # They will also be executed as unit tests for your Expectation. examples = [ { "data": { "valid_timezones": [ "UTC", "America/New_York", "Australia/Melbourne", "US/Hawaii", "Africa/Sao_Tome", ], "invalid_timezones": [ "America/Calgary", "Europe/Nice", "New York", "Central", "+08:00", ], }, "tests": [ { "title": "positive_test_with_timezones", "exact_match_out": False, "include_in_gallery": True, "in": {"column": "valid_timezones"}, "out": {"success": True}, }, { "title": "negative_test_with_timezones", "exact_match_out": False, "include_in_gallery": True, "in": {"column": "invalid_timezones"}, "out": {"success": False}, }, ], } ] # This is the id string of the Metric used by this Expectation. # For most Expectations, it will be the same as the `condition_metric_name` defined in your Metric class above. map_metric = "column_values.iana_timezone" # This is a list of parameter names that can affect whether the Expectation evaluates to True or False success_keys = ("mostly",) # This dictionary contains default values for any parameters that should have default values default_kwarg_values = {} def validate_configuration(self, configuration: Optional[ExpectationConfiguration]): """ Validates that a configuration has been set, and sets a configuration if it has yet to be set. Ensures that necessary configuration arguments have been provided for the validation of the expectation. Args: configuration (OPTIONAL[ExpectationConfiguration]): \ An optional Expectation Configuration entry that will be used to configure the expectation Returns: True if the configuration has been validated successfully. Otherwise, raises an exception """ super().validate_configuration(configuration) if configuration is None: configuration = self.configuration # # Check other things in configuration.kwargs and raise Exceptions if needed # try: # assert ( # ... # ), "message" # assert ( # ... # ), "message" # except AssertionError as e: # raise InvalidExpectationConfigurationError(str(e)) return True # This object contains metadata for display in the public Gallery library_metadata = { "tags": ["type-entities", "hackathon", "timezone"], "contributors": [ "@lucasasmith", ], "requirements": ["pytz"], } if __name__ == "__main__": ExpectColumnValuesToBeValidIanaTimezone().print_diagnostic_checklist()
2.296875
2
drf_ujson/parsers.py
pasevin/drf_ujson2
0
12784019
<filename>drf_ujson/parsers.py from typing import Any, Mapping, Optional, Type from django.conf import settings from rest_framework.exceptions import ParseError from rest_framework.parsers import BaseParser from rest_framework.renderers import JSONRenderer, BaseRenderer import ujson __all__ = ["UJSONParser"] class UJSONParser(BaseParser): """ Parses JSON-serialized data by ujson parser. """ media_type: str = "application/json" renderer_class: Type[BaseRenderer] = JSONRenderer # Set to enable usage of higher precision (strtod) function when decoding # string to double values. Default is to use fast but less precise builtin # functionality. precise_float: bool = False def parse( self, stream, media_type: str = None, parser_context: Optional[Mapping[str, Any]] = None, ) -> dict: """ Parses the incoming bytestream as JSON and returns the resulting data. """ parser_context = parser_context or {} encoding = parser_context.get("encoding", settings.DEFAULT_CHARSET) try: data = stream.read().decode(encoding) return ujson.loads(data, precise_float=self.precise_float) except ValueError as exc: raise ParseError("JSON parse error - %s" % str(exc))
2.390625
2
src/pyota/core.py
msicilia/pyota
0
12784020
<filename>src/pyota/core.py<gh_stars>0 """ Utilities dealing with decoding IOTA messages. Reference specs: https://github.com/iotaledger/protocol-rfcs/blob/master/text/0017-tangle-message/0017-tangle-message.md https://github.com/luca-moser/protocol-rfcs/blob/signed-tx-payload/text/0000-transaction-payload/0000-transaction-payload.md https://github.com/iotaledger/protocol-rfcs/blob/master/text/0019-milestone-payload/0019-milestone-payload.md """ from enum import Enum from datetime import datetime class PayloadType(Enum): TXN = 0 # Transaction MILESTONE = 1 # Milestone IDX = 2 # Index # Some helper functions. def get_next_uint8(data: bytes) -> int: return int.from_bytes(data[0:1], "little", signed=False), data[1:] def get_next_uint16(data: bytes) -> int: return int.from_bytes(data[0:2], "little", signed=False), data[2:] def get_next_uint32(data: bytes) -> int: return int.from_bytes(data[0:4], "little", signed=False), data[4:] def get_next_uint64(data: bytes) -> int: return int.from_bytes(data[0:8], "little", signed=False), data[8:] def get_next_bytes(data: bytes, amount: int) -> bytes: return data[:amount], data[amount:] def get_parents_list(data:bytes, parents_count: int): parents = data[:parents_count*32] parents = [data[i : i+32] for i in range(0, len(parents), 32)] return parents, data[parents_count*32:] def get_utxos(data:bytes, number: int): lst_utxos = [] for _ in range(number): input_type, data = get_next_uint8(data) txn_id, data = get_next_bytes(data, 32) txn_index, data = get_next_uint16(data) lst_utxos.append(UTXORef(txn_id, txn_index, input_type)) return lst_utxos, data[(1 + 32 + 2)*number:] def get_outputs(data:bytes, number: int): lst_out = [] for _ in range(number): out_type, data = get_next_uint8(data) addr_type, data = get_next_uint8(data) addr, data = get_next_bytes(data, 32) amount, data = get_next_uint64(data) print(amount) lst_out.append(TxnOutput(out_type, addr_type, addr, amount)) return lst_out, data[(1 + 1 + 32 + 8)*number:] def payload_type(payload: bytes) -> PayloadType: '''Returns the payload type from the IOTA message.''' type_code, _ = get_next_uint32(payload) return PayloadType(type_code) class UTXORef(): '''References an unspent transaction output, referenced as inputs in TxnMessages. ''' def __init__(self, txn_id, txn_index, input_type=0): self.input_type = input_type self.txn_id = txn_id self.txn_idx = txn_index def __repr__(self): return f"UTXORef[{self.txn_id.hex()}:{self.txn_idx}]" class TxnOutput(): ''' ''' def __init__(self, output_type, addr_type, addr, amount) -> None: self.output_type = output_type self.addr_type = addr_type self.addr = addr self.amount = amount def __repr__(self): return f"TxnOutput({self.addr.hex()}:{self.amount})" class IOTAMessage(): '''In IOTA 2 the tangle contains messages, which then contain the transactions or other structures that are processed by the IOTA protocol. Each message directly approves other messages, which are known as parents. NOTE: The nonce is omitted. ''' def __init__(self, messageid: str, networkid: str, parents): # The Message ID is the BLAKE2b-256 hash of the entire serialized message. self.id = messageid # This field denotes whether the message was meant for mainnet, testnet, or a private net. # It also marks what protocol rules apply to the message. Usually, it will be set to the # first 8 bytes of the BLAKE2b-256 hash of the concatenation of the network type and the # protocol version string. self.networkid = networkid # Parents are other message ids. self.parents = parents def __repr__(self): return f"{type(self).__name__}({self.id, self.networkid})" class IOTAIndexMessage(IOTAMessage): '''Allows the addition of an index to the encapsulating message, as well as some arbitrary data. ''' def __init__(self, messageid: str, networkid: str, parents, index: bytes, data: bytes): super().__init__(messageid, networkid, parents) self.index = index self.data = data def looks_like_spam(self) -> bool: '''Guess if the index message was spam. Note that it is common in IOTA that nodes send spam messages to increase the security of the tangle. ''' return "spam" in self._data.lower() def index_utf8(self) -> str: ''' Decode as UTF-8 replacing errors. ''' return self.index.decode("utf-8", errors="replace") def data_utf8(self) -> str: ''' Decode as UTF-8 replacing errors. ''' return self.data.decode("utf-8", errors="replace") class IOTATxnMessage(IOTAMessage): '''The current IOTA protocol uses transactions (which are vertices in the Tangle), where each transaction defines either an input or output. A grouping of those input/output transaction vertices make up a bundle which transfers the given values as an atomic unit (the entire bundle is applied or none of it). The input transactions define the funds to consume and create the deposits onto the output transactions target addresses. ''' def __init__(self, messageid, networkid, parents, txn_type, inputs, outputs, payload): super().__init__(messageid, networkid, parents) self.txn_type = txn_type self.inputs = inputs self.outputs = outputs self.payload = payload class IOTAMilestoneMessage(IOTAMessage): '''In IOTA, nodes use the milestones issued by the Coordinator to reach a consensus on which transactions are confirmed. ''' def __init__(self, messageid, networkid, parents, index_number, timestamp, milestone_parents): super().__init__(messageid, networkid, parents) self.index_number = index_number self.timestamp = timestamp self.milestone_parents = parents def get_timestamp(self): return datetime.utcfromtimestamp(self.timestamp) def decode_payload(payload: bytes): ''' Note that Index field must be at least 1 byte and not longer than 64 bytes for the payload to be valid. ''' t = payload_type(payload) _, payload = get_next_uint32(payload) # remove the payload type. if t == PayloadType.IDX: index_length, payload = get_next_uint16(payload) index, data = get_next_bytes(payload, index_length) return index, data elif t == PayloadType.MILESTONE: index_number, payload = get_next_uint32(payload) timestamp, payload = get_next_uint64(payload) parents_count, payload = get_next_uint8(payload) mlsparents, payload = get_parents_list(payload, parents_count) # TODO: Decode other info, these fields not yet included: inclusion_merkle_root, payload = get_next_bytes(payload, 32) next_pow_score, payload = get_next_uint32(payload) next_pow_score_mlst_idx, payload = get_next_uint32(payload) return index_number, timestamp, mlsparents elif t == PayloadType.TXN: transaction_type, payload = get_next_uint8(payload) # Always zero? inputs_count, payload = get_next_uint16(payload) utxolst, payload = get_utxos(payload, inputs_count) outputs_count, payload = get_next_uint16(payload) outlst, payload = get_outputs(payload, outputs_count) payload_length, payload = get_next_uint32(payload) txn_payload, _ = get_next_bytes(payload, payload_length) return transaction_type, utxolst, outlst, txn_payload else: return NotImplemented def decode_message(messageid : str, message : str, metadata: str) -> IOTAMessage: '''Decodes a IOTA message as extracted from the IOTA database. ''' message = bytes.fromhex(message[2:]) # skip the 0x from the str networkid, message = get_next_uint64(message) parents_count, message = get_next_uint8(message) parents, message = get_parents_list(message, parents_count) payload_len = int.from_bytes(message[:4], "little", signed=False) payload = message[4:-8] # remove the trailing nonce assert(len(payload) == payload_len, "Payload length incorrectly parsed.") if payload_type(payload) == PayloadType.IDX: index, data = decode_payload(payload) return IOTAIndexMessage(messageid, networkid, parents, index, data) elif payload_type(payload) == PayloadType.MILESTONE: index_no, ts, mlsparents = decode_payload(payload) return IOTAMilestoneMessage(messageid, networkid, parents, index_no, ts, mlsparents) elif payload_type(payload) == PayloadType.TXN: txn_type, utxolst, outlst, txn_payload = decode_payload(payload) return IOTATxnMessage(messageid, networkid, parents, txn_type, utxolst, outlst, txn_payload) else: return NotImplemented
2.421875
2
Program skrevet i forelesninger/Video 26 studenteksempel if settning oppgave.py
MegaMilkshakes/DAT_120_all_kode
0
12784021
<filename>Program skrevet i forelesninger/Video 26 studenteksempel if settning oppgave.py<gh_stars>0 # -*- coding: utf-8 -*- """ Created on Wed Sep 1 21:54:51 2021 @author: Gerry """ # ax^2 + bx + c = 0 # hvordan skrive opphøyd? import math a = int(input("Skriv inn et tall for a:")) b = int(input("Skriv inn et tall for b:")) c = int(input("Skriv inn et tall for c:")) verditest = b**2 - 4*a*c #må være 0 eller høyere if verditest >0: losning1 = (-b + math.sqrt(verditest)) / (2*a) losning2 = (-b - math.sqrt(verditest)) / (2*a) print(f"Likninger har to løsninger: {losning1} og {losning2}") elif verditest == 0: losning = (-b)/(2*a) print(f"Likningen har en løsning: {losning}") else: print("Likningen har ingen løsninger")
3.5
4
twitterapiv2/model/recent/public_metrics.py
Preocts/twwordmap
1
12784022
<gh_stars>1-10 from typing import Any from typing import Dict class PublicMetrics: retweet_count: int reply_count: int like_count: int quote_count: int @classmethod def build_obj(cls, obj: Dict[str, Any]) -> "PublicMetrics": """Build object""" new = cls() new.retweet_count = obj.get("retweet_count", 0) new.reply_count = obj.get("reply_count", 0) new.like_count = obj.get("like_count", 0) new.quote_count = obj.get("quote_count", 0) return new
2.75
3
utils/forms.py
DevotedExuDevelopersTeam/ClanManager
1
12784023
<reponame>DevotedExuDevelopersTeam/ClanManager from asyncio import TimeoutError, sleep from os import remove import disnake from disnake import ( ButtonStyle, Embed, File, Message, MessageInteraction, ModalInteraction, PermissionOverwrite, TextInputStyle, ) from disnake.ui import Button, Modal, TextInput, View from utils.bot import Bot from utils.constants import FAQ_CHANNEL_ID, OFFICER_ROLE_ID from utils.image_processor import calculate_ssim from utils.utils import extract_regex class ClanApplicationForm(Modal): def __init__(self, bot: Bot): self.bot = bot components = [ TextInput( label="Your ID", custom_id="ID", placeholder="12345678", max_length=15 ), TextInput( label="Your clan rank", custom_id="Clan Rank", placeholder="Warrior", max_length=30, ), TextInput( label="Your average valor points", custom_id="Average Valor", placeholder="4000", max_length=15, ), TextInput( label="Your country or timezone", custom_id="Country | Timezone", placeholder="USA / EST", max_length=15, ), TextInput( label="Can you grind on war start?", custom_id="War Starter", placeholder="yes / no", max_length=10, ), ] super().__init__(title="Clan Application", components=components) async def callback(self, inter: ModalInteraction): data = inter.text_values try: await self.bot.set_pg_id(inter.id, int(data["ID"])) except Exception as e: print(e) channel = await inter.guild.create_text_channel( name=f"apl-{inter.author}", category=inter.channel.category, overwrites={ inter.author: PermissionOverwrite( read_messages=True, attach_files=True ), inter.guild.default_role: PermissionOverwrite(read_messages=False), }, ) await inter.send( f"Thanks for submitting the form, now please go to {channel.mention} and attach the required profile screenshot!", ephemeral=True, ) await channel.send( "Please attach your profile screenshot as shown in example. If you are not in any clan, head to friends page and search yourself.", file=File("res/profile_example.png"), ) self.bot.pending_applicants.append(inter.author.id) while 1: try: m: Message = await self.bot.wait_for( "message", check=lambda mg: len(mg.attachments) != 0, timeout=300 ) file_path = f".tmp/{inter.author.id}.png" confirm_mg = await channel.send( "Please wait, submitting your application..." ) await m.attachments[0].save(file_path) await m.delete() if calculate_ssim(file_path) < 0.4: await channel.send( f"Sorry, but this is the wrong screenshot. \ Please look at the example and try again. If you face the same issue, check out <#{FAQ_CHANNEL_ID}>." ) continue embed = Embed( color=0xFFFF00, title="New Application", description=f"```REGEX DATA\nID::{inter.author.id}```", ) for k, v in data.items(): embed.add_field(k, v, inline=False) view = View() view.stop() view.add_item(Button(style=ButtonStyle.green, label="Accept")) view.add_item(Button(style=ButtonStyle.red, label="Deny")) await self.bot.pending_applications.send( f"<@&{OFFICER_ROLE_ID}> new application by {inter.author.mention}", embed=embed, file=File(file_path), view=view, ) remove(file_path) await confirm_mg.delete() await channel.send( f"{inter.author.mention}, successfully sent your application! \ Now please wait for officers' decision." ) await sleep(5) await channel.delete() return except TimeoutError: await channel.send( f"{inter.author.mention}, your response has timed out. \ The channel is closing in 10 seconds. Please try again later." ) await sleep(10) await channel.delete() self.bot.pending_applicants.remove(inter.author.id) return except Exception as e: self.bot.pending_applicants.remove(inter.author.id) await channel.send( f"{inter.author.mention}, unknown error occured, we will be investigating it in the nearest time. \ Please be patient and attempt to try again." ) await self.bot.admin.send( f"{self.bot.owner.mention}, unknown error occurred\n```py\n{e}```" ) raise e class ReasonForm(Modal): def __init__(self, bot: Bot, inter: MessageInteraction, id: int): self.bot = bot self.inter = inter self.member = self.bot.get_member(id) components = [ TextInput( label="Reason", placeholder="Provide the reason of denial", style=TextInputStyle.long, max_length=300, custom_id="reason", ) ] super().__init__(title="Application Denial", components=components) async def callback(self, inter: ModalInteraction): await inter.response.defer(ephemeral=True) try: await self.member.send( f"Your clan application was denied: `{inter.text_values['reason']}`" ) except disnake.HTTPException: pass embed = self.inter.message.embeds[0].add_field( "DENIED", f"By: {inter.author}\nReason: {inter.text_values['reason']}" ) embed.color = 0xFF0000 embed.title = "Denied Application" await self.bot.denied_applications.send( f"Application by <@{extract_regex(embed.description, 'id')}> was denied.", embed=embed, file=await self.inter.message.attachments[0].to_file(), ) await self.inter.message.delete() await self.inter.send( f"This application was marked as denied.\nReason: {inter.text_values['reason']}", ephemeral=True, )
1.875
2
user.py
Gordon003/COMP3900
0
12784024
from flask import Flask, render_template, request, session, redirect, url_for, Blueprint from function import data_function, user_function, sort, accom_function, object_function, checker, complaint from werkzeug.utils import secure_filename import os from os.path import join user = Blueprint('user', __name__) # LOGOUT PAGE @user.route('/logout') def logout(): session.clear() return redirect(url_for('index')) # LOGIN PAGE @user.route('/login', methods = ["GET", "POST"]) def login(): # POST Method if request.method == "POST": username = request.form['username'] password = request.form['password'] # Check validity if username == '' or password == '' or not checker.check_Correct_User(username, password): return render_template('home/login.html', error = "No username or password entered") else: return redirect(url_for("index")) # GET Method return render_template('home/login.html') # NEW USER PAGE @user.route('/join', methods = ["GET", "POST"]) def join(): # POST Method if request.method == "POST": # Get user input username = request.form['username'] password = request.form['password'] password2 = request.form['<PASSWORD>'] phone = request.form['phone'] address = request.form['address'] email = request.form['email'] fname = '' if 'file' in request.files: file = request.files['file'] fname = file.filename combine_list = [username, phone, address, email] error = '' # Check unique name if not checker.check_unique_name(username): error = "Username either already existed or not inputed" # Check password matched if not checker.check_valid_password(password) or password != <PASSWORD>: error = "Password are either not matched to each other or not inputed" # Phone if not checker.check_valid_phone(phone): error = 'Phone number not inputed or not at right format.' # Address if not checker.check_valid_address(address): error = "Address not inputed or doesn't existed." # Email if not checker.check_valid_email(email): error = 'Email not inputed or not at right format.' # Icon if fname == '': error = 'Icon not inputed' # If there are error if error != '': return render_template('home/new_user.html', error = error, previousInput = combine_list) # Save image filename = secure_filename(file.filename) fname = username + '.png' path = 'static/images/user/' file.save(os.path.join(path, fname)) # Add user to database user_function.add_User(username, password, phone, address, email) return redirect(url_for('index')) # GET Method return render_template('home/new_user.html') # PROFILE PAGE @user.route('/<username>') def profile(username): otherUser = user_function.get_User(username) print(otherUser) if session and username == session['username']: return render_template('user/profile.html', user = user_function.group_User_Info(), username = username) # No such user elif not otherUser: return render_template('user/profile.html', user = user_function.group_User_Info(), username = username, error = 1) # Access other user profile else: return render_template('user/profile.html', user = user_function.group_User_Info(), otherUser = otherUser, username = username) # EDIT USER INFO @user.route('/<username>/edit', methods = ["GET", "POST"]) def editUser(username): error = '' # POST Method if request.method =="POST": editInfo = request.form['submit'] # Update username if editInfo == 'name': username = request.form['username'] if (username==session['username']) or (username==''): error = 'name' else: oldUsername = session['username'] if user_function.update_Username(username): fname = oldUsername + '.png' newfname = username + '.png' path = 'static/images/user/' filenames = os.listdir(path) for filename in filenames: if (filename == fname): os.rename(os.path.join(path,filename), os.path.join(path,newfname)) return redirect(url_for('user.editUser', username = session['username'])) # Update password elif editInfo == 'password': pass1 = request.form['password1'] pass2 = request.form['password2'] if (pass1=='') or (pass2=='') or (pass1!=pass2) or (pass1==session['password'] or not checker.check_valid_password(pass1)): error = 'password' else: user_function.update_Password(request.form['password1'], request.form['password2']) return redirect(url_for('user.editUser', username = session['username'])) # Update phone # elif editInfo == 'phone': phone = request.form['phone'] if phone == '' or not checker.check_valid_phone(phone): error = 'phone' else: user_function.update_Phone(phone) return redirect(url_for('user.editUser', username = session['username'])) # Update Address elif editInfo == 'address': address = request.form['address'] address = address.lower() if address=='' or not checker.check_valid_address(address): error = 'address' else: user_function.update_Address(address) return redirect(url_for('user.editUser', username = session['username'])) # Update email elif editInfo == 'email': email = request.form['email'] if email=='' or not checker.check_valid_email(email): error = 'email' else: user_function.update_Email(email) return redirect(url_for('user.editUser', username = session['username'])) # GET Method return render_template('user/edit_user.html', user = user_function.group_User_Info(), username = username, error = error) # ADD TO USER BALANCE @user.route('/<username>/recharge', methods = ["GET", "POST"]) def rechargeMoney(username): error = '' # POST Method if request.method =="POST": money = request.form['money'] print(money.isdigit() ) if money.isdigit(): if int(money) > 0 and int(money) <= 800: user_function.add_money(int(money)) else: error = 'money' else: error = 'money' return render_template('user/recharge_money.html', user = user_function.group_User_Info(), username = username, error = error) # VIEW USER ACCMODATION @user.route('/<username>/myAccomodation') def myAccomodation(username): myAccomodationList = accom_function.find_MyAccomodation(username) return render_template('user/myAccom.html', user = user_function.group_User_Info(), username = username, accomodationList = myAccomodationList) # VIEW USER ACCOMODATION BOOKING @user.route('/<username>/check_Accom/<accID>') def myAccomodationBooking(username, accID): accomodation = accom_function.get_Accomodation(accID) confirmedBooking = accom_function.find_otherPeopleConfirmedBooking2(accID) return render_template('accomodation/accomodation_booking_check.html', user = user_function.group_User_Info(), username = username, accomodation = accomodation , confirmedBooking = confirmedBooking) # VIEW USER OBJECT @user.route('/<username>/myObject') def myObject(username): myObjectList = object_function.find_MyObject(username) return render_template('user/myObject.html', user = user_function.group_User_Info(), username = username, objectList = myObjectList) # VIEW USER ACCOMODATION NOTIFICATION @user.route('/<username>/my_Accomodation_Notification') def myAccomNotification(username): waitBooking = accom_function.find_myWaitBooking(session['accID']) otherPeopleBooking = accom_function.find_otherPeopleBooking(session['accID']) rejectedBooking = accom_function.find_myRejectedBooking() return render_template('user/user_accom_notification.html', user = user_function.group_User_Info(), username = username, waitBooking = waitBooking, otherPeopleBooking=otherPeopleBooking, rejectedBooking = rejectedBooking) # VIEW USER ACCOMODATION NOTIFICATION @user.route('/<username>/my_Booking') def myConfirmedBooking(username): yourBooking = accom_function.find_myConfirmedBooking(session['accID']) return render_template('user/user_booking.html', user = user_function.group_User_Info(), username = username, yourBooking = yourBooking) # VIEW USER OBJECT NOTIFICATION @user.route('/<username>/my_Object_Notification') def myObjectNotification(username): object_list = object_function.get_Bid_Object(1) mySoldObject = object_function.find_MySoldObject(username) print(object_list) return render_template('user/user_object_notification.html', user = user_function.group_User_Info(), username = username, object_list = object_list, mySoldObject = mySoldObject) # VIEW USER ACCOMODATION NOTIFICATION @user.route('/<username>/my_Bought_Object') def myBoughtObject(username): yourBooking = accom_function.find_myConfirmedBooking(username) return render_template('user/user_object_bought.html', user = user_function.group_User_Info(), username = username, yourBooking = yourBooking) # REPORT USER @user.route('/<username>/report', methods = ["GET", "POST"]) def user_report(username): user = user_function.get_User(username) if request.method == 'POST': complaint_info = request.form['complaint'] if complaint_info != "": complaint.send_Complaint(user_function.get_UserID_name(username), complaint_info, "user") return render_template('user/user_report_confirm_notif.html', user = user_function.group_User_Info()) return render_template('user/user_report.html', user = user_function.group_User_Info(), reportedUser = user) # USER ACCEPT BOOKING @user.route('/<username>/<bookingID>/accept') def acceptBooking(username, bookingID): accom_function.accept_Booking(bookingID) return redirect(url_for('user.myAccomNotification', username = session['username'])) # USER REJECT BOOKING @user.route('/<username>/<bookingID>/reject') def rejectBooking(username, bookingID): accom_function.reject_Booking(bookingID) return redirect(url_for('user.myAccomNotification', username = session['username'])) # CANCEL @user.route('/<username>/<bookingID>/cancel') def cancel_Booking(username, bookingID): accom_function.reject_Booking(bookingID) return redirect(url_for('user.myConfirmedBooking', username = session['username'])) # REMOVE @user.route('/<username>/<bookingID>/remove') def remove_Booking(username, bookingID): accom_function.remove_Booking(bookingID) return redirect(url_for('user.myAccomNotification', username = session['username'])) # USER ADD NEW ACCOMODATION @user.route('/<username>/add_New_Accom', methods = ["GET", "POST"]) def addAccomodation(username): # POST Method if request.method == "POST": address = request.form['address'] bedroom = request.form['bedroom'] washroom = request.form['washroom'] price = request.form['price'] houseType = request.form['type'] description = request.form['description'] combine_list = [address, bedroom, washroom, price, houseType, description] # Validity error = "" if address == "" or not checker.check_valid_address(address.lower()): error = "address" elif str(price) == "" or not checker.check_valid_price(price): error = "price" elif description == "": error = "description" elif 'mainImage' not in request.files or 'sideImage1' not in request.files or 'sideImage2' not in request.files or 'sideImage3' not in request.files: error = "image" elif not user_function.remove_money(10): error = "money" if error != "": return render_template('accomodation/new_accomodation.html', user = user_function.group_User_Info(), username = username, previousInput = combine_list, error = error) # Save image mainImage = request.files['mainImage'] mainImageName = mainImage.filename sideImage1 = request.files['sideImage1'] sideImage1Name = sideImage1.filename sideImage2 = request.files['sideImage2'] sideImage2Name = sideImage2.filename sideImage3 = request.files['sideImage3'] sideImage3Name = sideImage3.filename # Add accomodation into database accID = accom_function.add_Accomodation(username, address.lower(), bedroom, washroom, description, price, houseType) # Add Image path = 'static/images/accomodation/' + str(accID) os.mkdir(path) mainFile = secure_filename(mainImage.filename) mainImage.save(os.path.join(path,'main.png')) side1File = secure_filename(sideImage1.filename) sideImage1.save(os.path.join(path,'side1.png')) side2File = secure_filename(sideImage2.filename) sideImage2.save(os.path.join(path,'side2.png')) side3File = secure_filename(sideImage3.filename) sideImage3.save(os.path.join(path,'side3.png')) return redirect(url_for('user.myAccomodation', username = username)) return render_template('accomodation/new_accomodation.html', user = user_function.group_User_Info(), username = username, error = "") # EDIT ACCOMODATION @user.route('/<username>/edit_Accom/<accID>', methods = ["GET", "POST"]) def edit_Accomodation(username, accID): error = '' # POST Method if request.method =="POST": editInfo = request.form['submit'] error = '' # Update address if editInfo == 'address': address = request.form['address'] if address == '' or not checker.check_valid_address(address): error = 'address' else: accom_function.update_Acc_Address(accID, address) # Update password elif editInfo == 'bedroom': bedroom = request.form['bedroom'] accom_function.update_Acc_Bedroom(accID, bedroom) # Update washroom elif editInfo == 'washroom': washroom = request.form['washroom'] accom_function.update_Acc_Washroom(accID, washroom) # Update price elif editInfo == 'price': price = request.form['price'] if checker.check_valid_price(price): accom_function.update_Acc_Price(accID, price) else: error = 'price' # Update description elif editInfo == 'description': description = request.form['description'] if description == "": error = 'description' else: accom_function.update_Acc_Description(accID, description) if error != "": accomodation = accom_function.get_Accomodation(accID) return render_template('accomodation/edit_accom.html', user = user_function.group_User_Info(), accomodation = accomodation, error = error) # GET Method accomodation = accom_function.get_Accomodation(accID) return render_template('accomodation/edit_accom.html', user = user_function.group_User_Info(), accomodation = accomodation) # USER REMOVE ACCOMODATION @user.route('/<username>/delete_Accom/<accID>') def deleteAccomodation(username,accID): accom_function.remove_Accomodation(accID) user_function.add_money(10) return redirect(url_for('user.myAccomodation', username = username)) # USER ADD NEW OBJECT @user.route('/<username>/add_New_Object', methods = ["GET", "POST"]) def addObject(username): if request.method == "POST": address = request.form['address'] minimumPrice = request.form['minimumPrice'] buyNowPrice = request.form['buyNowPrice'] objectType = request.form['type'] description = request.form['description'] combine_list = [address, minimumPrice, buyNowPrice, objectType, description] # Validity error = "" if address == "" or not checker.check_valid_address(address): error = "address" elif str(minimumPrice) == "" or not checker.check_valid_price(minimumPrice): error = "minimumPrice" elif str(buyNowPrice) == "" or not checker.check_valid_price(buyNowPrice) or buyNowPrice < minimumPrice: error = "buyNowPrice" elif description == "": error = "description" elif 'mainImage' not in request.files or 'sideImage1' not in request.files: error = "image" elif not user_function.remove_money(5): error = "money" if error != "": object_list = data_function.get_Object() return render_template('object/new_object.html', user = user_function.group_User_Info(), username = username, previousInput = combine_list, error = error, object_list = object_list) # Save image mainImage = request.files['mainImage'] mainImageName = mainImage.filename sideImage1 = request.files['sideImage1'] sideImage1Name = sideImage1.filename # Add accomodation into database objectID = object_function.add_Object(username, address.lower(), description, minimumPrice, buyNowPrice, objectType.lower()) # Add static image path = 'static/images/object/' + str(objectID) os.mkdir(path) mainFile = secure_filename(mainImage.filename) mainImage.save(os.path.join(path,'main.png')) side1File = secure_filename(sideImage1.filename) sideImage1.save(os.path.join(path,'side1.png')) return redirect(url_for('user.myObject', username = username)) object_list = data_function.get_Object() return render_template('object/new_object.html', user = user_function.group_User_Info(), username = username, object_list = object_list) # EDIT OBJECT @user.route('/<username>/edit_Object/<objectID>', methods = ["GET", "POST"]) def edit_Object(username, objectID): error = '' # POST Method if request.method =="POST": editInfo = request.form['submit'] # Update address if editInfo == 'address': address = request.form['address'] if address =='' or not checker.check_valid_address(address): error = 'address' else: object_function.update_Object_Address(objectID, address) return redirect(url_for('user.edit_Object', username = session['username'], objectID = objectID)) # Update description elif editInfo == 'description': description = request.form['description'] if description == '': error = 'description' else: object_function.update_Object_Description(objectID, description) return redirect(url_for('user.edit_Object', username = session['username'], objectID = objectID)) # GET Method myObject = object_function.get_Object(objectID)[0] return render_template('object/edit_Object.html', user = user_function.group_User_Info(), myObject = myObject, error = error) # USER REMOVE ACCOMODATION @user.route('/<username>/delete_Object/<objectID>') def deleteObject(username,objectID): object_function.remove_Object(objectID) user_function.add_money(5) return redirect(url_for('user.myObject', username = username))
2.8125
3
pqr_to_delphi.py
jbardhan/molman
0
12784025
<filename>pqr_to_delphi.py<gh_stars>0 #!/usr/bin/env python import os, sys, subprocess, argparse, re, logging, errno import mymm parser = argparse.ArgumentParser(description = "This program takes a .pqr file (MEAD format only for now, meaning no chain field!) and writes a CRG and PDB file from it.", prog = sys.argv[0]) parser.add_argument('--pqr', metavar = 'lysozyme.pqr') parser.add_argument('--output', metavar = '<base for output filename>') args = parser.parse_args(sys.argv[1:]) system = mymm.Molecule() system.read_pqr(args.pqr) system.write_pdb2(args.output + ".pdb") system.write_crg(args.output + ".crg")
2.40625
2
gabi.py
csanadH/poker-player-ezmoni
0
12784026
import json class Gabi: def myFunc(self, game, hand, cards): print("GABI") try: if (hand[0]["rank"] == hand[1]["rank"]): print("pair, returning 800") return 800 elif (hand[0]["rank"] in "89TJQKA" and hand[1]["rank"] in "89TJQKA"): print("high card, returning 600") return 600 else: return game_state["current_buy_in"] - game_state["players"][game_state["in_action"]]["bet"] print("nopair") except Exception as ex: print("bad gabi") print(ex) finally: print("GABI END") def calcBet(self, game): return 500 def fulsh(self, hand, cards): allcards = hand + cards hearts = 0 spades = 0 clubs = 0 diamonds = 0 for card in allcards: if (card["suit"] == "hearts"): hearts += 1 elif (card["suit"] == "spades"): spades += 1 elif (card["suit"] == "clubs"): clubs += 1 elif (card["suit"] == "diamonds"): diamonds += 1 if (hearts >=4 | spades >= 4 | clubs >= 4 | diamonds >= 4): return True return False def straight(self, hand, cards): allcards = hand + cards result = [] for card in allcards: if (card["rank"] == "J"): card["rank"] = 11 elif (card["rank"] == "Q"): card["rank"] = 12 elif (card["rank"] == "K"): card["rank"] = 13 elif (card["rank"] == "A"): card["rank"] = 14 else: card["rank"] = int(card["rank"]) result.append(card["rank"]) result.sort() j = 0 for i in range(0, len(result) - 1): if (result[i] + 1 == result[i + 1]): j+=1 if (j > 3): return True else: return False if __name__ == '__main__': json_data=open("sample.json").read() data = json.loads(json_data) asd = data["current_buy_in"] - data["players"][data["in_action"]]["bet"] print(Gabi().fulsh([{'rank': '3', 'suit': 'hearts'},{'rank': 'K','suit': 'hearts'},{'rank': '3', 'suit': 'hearts'},{'rank': 'K','suit': 'hearts'},{'rank': '3', 'suit': 'hearts'},{'rank': 'K','suit': 'spades'}], []))
3.421875
3
apps/photos/urls.py
commtrack/commtrack-old-to-del
1
12784027
<gh_stars>1-10 from django.conf.urls.defaults import * import settings image_path = settings.RAPIDSMS_APPS['photos']['image_path'] urlpatterns = patterns('', (r'^photos/?$', 'photos.views.recent'), (r'^photo/(?P<photo_id>\d+)/?$', 'photos.views.show'), (r'^%s/(?P<path>.*)$' % image_path, 'django.views.static.serve', {"document_root": '%s/' % image_path}), (r'^photo/comments/', include('django.contrib.comments.urls')), # (r'^photos/populate/?$', 'photos.views.populate'), # remove this once testing is done (r'^photos/import/?$', 'photos.views.import_photos'), # and this too )
1.710938
2
train.py
vanAmsterdam/CauseEffectPairs
0
12784028
from torch.tensor import Tensor from tqdm import tqdm # from torch.nn import NeuralNet # from torch.nn.loss import Loss, MSE # from torch.optim import Optimizer, SGD # from data import DataIterator, BatchIterator def train(model, num_epochs, x, target, loss_fn, optimizer, gradient_clip = None): for i in tqdm(range(num_epochs)): pred = model(x) loss = loss_fn(pred, target) optimizer.zero_grad() loss.backward(retain_graph=True) if not gradient_clip is None: for param in model.parameters(): param.grad.clamp_(*gradient_clip) optimizer.step() # loss.zero_grad() # def train(net: 1, # inputs: Tensor, # targets: Tensor, # num_epochs: int = 5000, # iterator: DataIterator = BatchIterator(), # loss: 1, # optimizer: Optimizer = SGD()) -> None: # for epoch in range(num_epochs): # epoch_loss = 0.0 # for batch in iterator(inputs, targets): # predicted = net.forward(batch.inputs) # epoch_loss += loss.loss(predicted, batch.targets) # grad = loss.grad(predicted, batch.targets) # net.backward(grad) # optimizer.step(net) # print(epoch, epoch_loss)
2.59375
3
tests/messages_data/mime_emails/raw_email7.py
unqx/imap_tools
344
12784029
<gh_stars>100-1000 import datetime from imap_tools import EmailAddress DATA = dict( subject='testing', from_='<EMAIL>', to=('<EMAIL>',), cc=(), bcc=(), reply_to=(), date=datetime.datetime(2005, 6, 6, 22, 21, 22, tzinfo=datetime.timezone(datetime.timedelta(0, 7200))), date_str='Mon, 6 Jun 2005 22:21:22 +0200', text='This is the first part.\r\n', html='', headers={'mime-version': ('1.0 (Apple Message framework v730)',), 'content-type': ('multipart/mixed; boundary=Apple-Mail-13-196941151',), 'message-id': ('<9169D984-4E0B-45EF-<EMAIL>-<EMAIL>>',), 'from': ('<EMAIL>',), 'subject': ('testing',), 'date': ('Mon, 6 Jun 2005 22:21:22 +0200',), 'to': ('<EMAIL>',)}, attachments=[ dict( filename='test.rb', content_id='', content_disposition='attachment', content_type='text/x-ruby-script', payload=b'puts "testing, testing"\r\n', ), dict( filename='test.pdf', content_id='', content_disposition='inline', content_type='application/pdf', payload=b'blah blah blah', ), dict( filename='smime.p7s', content_id='', content_disposition='attachment', content_type='application/pkcs7-signature', payload=b"\x8d\xa9\xa2\xb1*\x86H\x86\xf7\r\x01\x07\x02\xa0\x800\x88\xda\x9a+1\x0b0\t\x06\x05+\x0e\x03\x02\x1a\x05\x000\x80\x06\t*\x86J6\xa6\x8a\xc1\x07\x01\x00\x00\xa0\x82\x05J0\x82\x05F0\x82\x04.\x8d\xa9\xa2\xb1\x02\x02\x04?\xbe\xbaD0\r\x06\t*\x88\xda\x9a+\r\x01\x01\x05\x05\x00011\x0b0\t\x06\x03U\x04\x06\x13\x02F6\xa6\x8a\xc0\n\x06\x03U\x04\n\x13\x03TDC1\x140\x12\x06\x8d\xa9\xa2\xb3\x13\x0bTDC OCES CH\xda\x9a+\r040229115901Z\x17\r06026\xa6\x8a\xc22901Z0\x81\x801\x0b0\t\x06\x03U\x04\x8d\xa9\xa2\xb0K1)0'\x06\x03U\x04\n\x13 H\xda\x9a+. organisatorisk tin6\xa6\x8a\xc4nin", ), ], from_values=EmailAddress('', '<EMAIL>', '<EMAIL>'), to_values=(EmailAddress('', '<EMAIL>', '<EMAIL>'),), cc_values=(), bcc_values=(), reply_to_values=(), )
2.03125
2
angular_scaffold/management/commands/helpers/_generate_debugger.py
juanfe/django-angular-scaffold
4
12784030
<reponame>juanfe/django-angular-scaffold<filename>angular_scaffold/management/commands/helpers/_generate_debugger.py import os import hashlib def generate_debugger(directory, password): m = hashlib.md5() m.update(password) password_hash = m.hexdigest() logger_file = os.path.join(directory, 'assets', 'app', 'config', 'logger.js') print "Creating: " + logger_file with open(logger_file, 'w') as f: f.write("""app.config(["$logProvider", function ($logProvider) { "use strict"; //Enables debug when ?debug=1&password=*password* var password = <PASSWORD>", querystring = (window.location.search ? window.location.search.substring(1) : window.location.hash.indexOf('?') !== -1 ? window.location.hash.split('?')[1] : ""), params = {}; angular.forEach(querystring.split('&'), function (pair) { params[pair.split('=')[0]] = pair.split('=')[1]; }); $logProvider.debugEnabled(false); if (params.hasOwnProperty('debug') && params.hasOwnProperty('password')) { if (params.debug && md5(params.password) === password) { $logProvider.debugEnabled(true); console.info("Logging Enabled"); console.log = function () {}; } } }]); """ % password_hash) _debugger_docs = """#Logging The logs of this project are using angular's `$logProvider` service. By default they are disabled, however, you can enable them in the browser by passing the proper credentials. ##Enabling Logging To enable logging in the browser, pass the following query string on the end of your URL: ``` ?debug=1&password=%s ``` ##Using Logging Now that logging can be enabled and disabled for debugging, make sure you provide good, logging via `$log.debug(message)`. This functionally replaces `console.log()`. """ % password if not os.path.exists(os.path.join(directory, 'docs')): os.makedirs(os.path.join(directory, 'docs')) with open(os.path.join(directory, 'docs', 'logging.md'), 'w') as f: f.write(_debugger_docs)
2.25
2
src/konfetti/exceptions.py
samarcan/konfetti
23
12784031
class KonfettiError(Exception): """Common error for all errors in `konfetti`.""" class MissingError(AttributeError, KonfettiError): """Config option is missing in the given settings module.""" # Should be inherited from AttributeError because tools like Celery rely # on this behavior class SettingsNotSpecified(KonfettiError): """Environment variable, that points to a setting module is not set.""" class SettingsNotLoadable(KonfettiError): """Settings module is not found or can't be imported.""" class VaultBackendMissing(KonfettiError): """A secret variable is accessed, but vault backend is not configured.""" class SecretKeyMissing(MissingError): """Path exists in Vault, but doesn't contain specified value.""" class ForbiddenOverrideError(KonfettiError): """An attempt to override configuration with a key that doesn't exist in the configuration.""" class InvalidSecretOverrideError(KonfettiError): """Environment variable for secret override contains invalid or non-JSON data."""
2.515625
3
setup.py
abrammer/windspharm
56
12784032
"""Build and install the windspharm package.""" # Copyright (c) 2012-2018 <NAME> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. import os.path from setuptools import setup import versioneer packages = ['windspharm', 'windspharm.examples', 'windspharm.tests'] package_data = { 'windspharm.examples': ['example_data/*'], 'windspharm.tests': ['data/regular/*.npy', 'data/gaussian/*.npy']} with open(os.path.join(os.path.dirname(__file__), 'README.md'), 'r') as f: long_description = f.read() setup(name='windspharm', version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), description='vector wind analysis in spherical coordinates', author='<NAME>', author_email='<EMAIL>', url='http://ajdawson.github.com/windspharm/', long_description=long_description, long_description_content_type='text/markdown', packages=packages, package_data=package_data, install_requires=['numpy', 'pyspharm >= 1.0.8'],)
1.796875
2
tests/test_cli/test_predict.py
jspaaks/vak
1
12784033
"""tests for vak.cli.predict module""" import pytest import vak.cli.predict import vak.config import vak.constants import vak.paths from . import cli_asserts from ..test_core.test_predict import predict_output_matches_expected @pytest.mark.parametrize( "audio_format, spect_format, annot_format", [ ("cbin", None, "notmat"), ("wav", None, "birdsong-recognition-dataset"), ], ) def test_predict( audio_format, spect_format, annot_format, specific_config, tmp_path, model, device ): output_dir = tmp_path.joinpath( f"test_predict_{audio_format}_{spect_format}_{annot_format}" ) output_dir.mkdir() options_to_change = [ {"section": "PREDICT", "option": "output_dir", "value": str(output_dir)}, {"section": "PREDICT", "option": "device", "value": device}, ] toml_path = specific_config( config_type="predict", model=model, audio_format=audio_format, annot_format=annot_format, options_to_change=options_to_change, ) vak.cli.predict.predict(toml_path) cfg = vak.config.parse.from_toml_path(toml_path) assert predict_output_matches_expected(output_dir, cfg.predict.annot_csv_filename) assert cli_asserts.log_file_created(command="predict", output_path=output_dir)
2.125
2
ch02/question_3.py
dhrey112/IntroToPython_Deitel
0
12784034
<filename>ch02/question_3.py<gh_stars>0 """3. 2.3 (Fill in the missing code) Replace *** in the following code with a statement that will print a message like 'Congratulations! Your grade of 91 earns you an A in this course'. Your statement should print the value stored in the variable grade:""" grade = int(input('Enter your grade: ')) if grade >= 90: print('Congratulations! Your grade of', grade, 'earns you an A in this course')
3.96875
4
47 - Distinct primes factors/distinct.py
jamtot/PyProjectEuler
0
12784035
<filename>47 - Distinct primes factors/distinct.py def primegen(): primes = [2,3] yield 2 yield 3 num = 3 while True: prime = True num+=2 for p in primes: if num%p==0: prime = False break if prime: primes.append(num) yield num def primecompgen(composites = True): primes = [2,3] comps = [] if not composites: yield 2 yield 3 num = 3 while True: prime = True if composites: num+=1 else: num+=2 for p in primes: if num%p == 0: prime = False break if prime: primes.append(num) if not composites: yield num else: comps.append(num) if composites: yield num, primes def factor(n, primes): factors = [] if n < 1: factors while True: for p in primes: if p*p > n: if n == 1: return factors else: return factors + [(n, 1)] if n%p == 0: times = 1 n=n/p while n%p == 0: n = n/p times += 1 factors.append((p, times)) def findconsec(n): pcg = primecompgen() consecutive = 0 prevcomp = 0 while consecutive < n: comp, primes = pcg.next() if comp!=prevcomp+1: consecutive = 0 if len(factor(comp, primes))==n: consecutive+=1 else: consecutive = 0 prevcomp = comp return (comp-n)+1 if __name__ == "__main__": assert findconsec(2) == 14 assert findconsec(3) == 644 print findconsec(4) # 134043
3.578125
4
player/management/commands/gendata.py
jacebrowning/virtualboombox
4
12784036
<filename>player/management/commands/gendata.py from datetime import timedelta from contextlib import suppress from django.core.management.base import BaseCommand from django.contrib.auth.models import User from django.db.utils import IntegrityError from django.utils import timezone from player.models import Account, Song from social.models import Reaction class Command(BaseCommand): help = "Generate data for manual testing" def handle(self, *args, **kwargs): # pylint: disable=unused-argument with suppress(IntegrityError): User.objects.create_superuser('admin', 'admin@localhost', 'password') me, _ = Account.objects.get_or_create(username='justus87') for index in range(5): song, _ = Song.objects.get_or_create( artist=f"Artist {index}", title=f"Title {index}", account=me, ) Reaction.objects.get_or_create( song=song, comment=f"Artist {index} rocks!", ) a, _ = Account.objects.get_or_create(username='aliasguru') a.latitude = 33.670348 a.longitude = -117.775990 a.save() a, _ = Account.objects.get_or_create(username='thecreepr') a.latitude = 42.909358 a.longitude = -85.753993 a.save() Account.objects.get_or_create(username='_invalid') Song.objects.get_or_create( artist="The Beatles", title="Come Together", date=timezone.now() - timedelta(days=4), ) Song.objects.get_or_create( artist="this_is_an_unknown_artist", title="this_is_an_unknown_title", date=timezone.now() - timedelta(days=3), ) Song.objects.get_or_create( artist="An Old Artist", title="And Old Title", date=timezone.now() - timedelta(days=30), )
2.4375
2
proplot/rctools.py
stefraynaud/proplot
0
12784037
<filename>proplot/rctools.py #!/usr/bin/env python3 """ A special object named `~proplot.rctools.rc`, belonging to the `~proplot.rctools.rc_configurator` class, is created on import. This is your one-stop shop for changing global settings belonging to any of the following three categories. 1. Builtin matplotlib `rcParams <https://matplotlib.org/users/customizing.html>`__ settings. These have the format ``x.y`` or ``x.y.z``. 2. ProPlot :ref:`rcParamsCustom` settings. These also have the format ``x.y`` (see below). 3. ProPlot :ref:`rcParamsShort` settings. These have no dots (see below). You can change settings with the `~proplot.rctools.rc` object as follows. * ``plot.rc.name = value`` * ``plot.rc['name'] = value`` * ``plot.rc.update(name1=value1, name2=value2)`` * ``plot.rc.update({'name1':value1, 'name2':value2})`` To temporarily change settings on a particular axes, use either of the following. * ``ax.format(name=value)`` * ``ax.format(rc_kw={'name':value})`` In all of these examples, if the setting name ``name`` contains any dots, you can simply **omit the dots**. For example, to change the :rcraw:`title.loc` property, use ``plot.rc.titleloc = value``, ``plot.rc.update(titleloc=value)``, or ``ax.format(titleloc=value)``. ############# rcParamsShort ############# These are **simple, short** names used to change multiple matplotlib and ProPlot settings at once, as shorthands for settings with longer names, or for special options. For example, :rcraw:`ticklen` changes the tick length for the *x* and *y* axes in one go. ================ ==================================================================================================================================================================================================================================== Key Description ================ ==================================================================================================================================================================================================================================== ``nbsetup`` Whether to run `nb_setup` on import. Can only be changed from the ``~/.proplotrc`` file. ``format`` The inline backend figure format, one of ``retina``, ``png``, ``jpeg``, ``pdf``, or ``svg``. Can only be changed from the ``~/.proplotrc`` file. ``autosave`` If not empty or ``0`` and :rcraw:`nbsetup` is ``True``, passed to `%autosave <https://www.webucator.com/blog/2016/03/change-default-autosave-interval-in-ipython-notebook/>`__. Can only be changed from the ``~/.proplotrc`` file. ``autoreload`` If not empty or ``0`` and :rcraw:`nbsetup` is ``True``, passed to `%autoreload <https://ipython.readthedocs.io/en/stable/config/extensions/autoreload.html#magic-autoreload>`__. Can only be changed from the ``~/.proplotrc`` file. ``abc`` Boolean, indicates whether to draw a-b-c labels by default. ``tight`` Boolean, indicates whether to auto-adjust figure bounds and subplot spacings. ``share`` The axis sharing level, one of ``0``, ``1``, ``2``, or ``3``. See `~proplot.subplots.subplots` for details. ``align`` Whether to align axis labels during draw. See `aligning labels <https://matplotlib.org/3.1.1/gallery/subplots_axes_and_figures/align_labels_demo.html>`__. ``span`` Boolean, toggles spanning axis labels. See `~proplot.subplots.subplots` for details. ``fontname`` Name of font used for all text in the figure. The default is Helvetica Neue. See `~proplot.fonttools` for details. ``cmap`` The default colormap. ``lut`` The number of colors to put in the colormap lookup table. ``cycle`` The default color cycle name, used e.g. for lines. ``rgbcycle`` If ``True``, and ``colorblind`` is the current cycle, this registers the ``colorblind`` colors as ``'r'``, ``'b'``, ``'g'``, etc., like in `seaborn <https://seaborn.pydata.org/tutorial/color_palettes.html>`__. ``color`` The color of axis spines, tick marks, tick labels, and labels. ``alpha`` The opacity of the background axes patch. ``facecolor`` The color of the background axes patch. ``small`` Font size for legend text, tick labels, axis labels, and text generated with `~matplotlib.axes.Axes.text`. ``large`` Font size for titles, "super" titles, and a-b-c subplot labels. ``linewidth`` Thickness of axes spines and major tick lines. ``margin`` The margin of space between axes edges and objects plotted inside the axes, if ``xlim`` and ``ylim`` are unset. ``ticklen`` Length of major ticks in points. ``tickdir`` Major and minor tick direction. Must be one of ``out``, ``in``, or ``inout``. ``tickpad`` Padding between ticks and tick labels in points. ``grid`` Boolean, toggles major grid lines on and off. ``gridminor`` Boolean, toggles minor grid lines on and off. ``tickratio`` Ratio of minor tickline width to major tickline width. ``gridratio`` Ratio of minor gridline width to major gridline width. ``ticklenratio`` Ratio of minor tickline length to major tickline length. ``reso`` Resolution of geographic features, one of ``'lo'``, ``'med'``, or ``'hi'`` ``geogrid`` Boolean, toggles meridian and parallel gridlines on and off. ``land`` Boolean, toggles land patches on and off. ``ocean`` Boolean, toggles ocean patches on and off. ``lakes`` Boolean, toggles lake patches on and off. ``coast`` Boolean, toggles coastline lines on and off. ``borders`` Boolean, toggles country border lines on and off. ``innerborders`` Boolean, toggles internal border lines on and off, e.g. for states and provinces. ``rivers`` Boolean, toggles river lines on and off. ================ ==================================================================================================================================================================================================================================== ############## rcParamsCustom ############## The ``subplots`` category controls the default layout for figures and axes. The ``abc``, ``title``, and ``tick`` categories control a-b-c label, title, and axis tick label settings. The ``suptitle``, ``leftlabel``, ``toplabel``, ``rightlabel``, and ``bottomlabel`` categories control figure title and edge label settings. There are two new additions to the ``image`` category, and the new ``colorbar`` category controls *inset* and *outer* `~proplot.axes.Axes.colorbar` properties. The new ``gridminor`` category controls minor gridline settings, and the new ``geogrid`` category controls meridian and parallel line settings for `~proplot.axes.ProjectionAxes`. For both ``gridminor`` and ``geogrid``, if a property is empty, the corresponding property from ``grid`` is used. Finally, the ``geoaxes``, ``land``, ``ocean``, ``rivers``, ``lakes``, ``borders``, and ``innerborders`` categories control various `~proplot.axes.ProjectionAxes` settings. These are used when the boolean toggles for the corresponding :ref:`rcParamsShort` settings are turned on. =================================================================== ========================================================================================================================================================================================================================================================= Key(s) Description =================================================================== ========================================================================================================================================================================================================================================================= ``abc.style`` a-b-c label style. For options, see `~proplot.axes.Axes.format`. ``abc.loc`` a-b-c label position. For options, see `~proplot.axes.Axes.format`. ``abc.border`` Boolean, indicates whether to draw a white border around a-b-c labels inside an axes. ``abc.linewidth`` Width of the white border around a-b-c labels. ``abc.color``, ``abc.size``, ``abc.weight`` Font color, size, and weight for a-b-c labels. ``axes.formatter.zerotrim`` Boolean, indicates whether trailing decimal zeros are trimmed on tick labels. ``axes.formatter.timerotation`` Float, indicates the default *x* axis tick label rotation for datetime tick labels. ``borders.color``, ``borders.linewidth`` Line color and linewidth for country border lines. ``bottomlabel.color``, ``bottomlabel.size``, ``bottomlabel.weight`` Font color, size, and weight for column labels on the bottom of the figure. ``colorbar.loc`` Inset colorbar location, options are listed in `~proplot.axes.Axes.colorbar`. ``colorbar.grid`` Boolean, indicates whether to draw borders between each level of the colorbar. ``colorbar.frameon`` Boolean, indicates whether to draw a frame behind inset colorbars. ``colorbar.framealpha`` Opacity for inset colorbar frames. ``colorbar.length`` Length of outer colorbars. ``colorbar.insetlength`` Length of inset colorbars. Units are interpreted by `~proplot.utils.units`. ``colorbar.width`` Width of outer colorbars. Units are interpreted by `~proplot.utils.units`. ``colorbar.insetwidth`` Width of inset colorbars. Units are interpreted by `~proplot.utils.units`. ``colorbar.axespad`` Padding between axes edge and inset colorbars. Units are interpreted by `~proplot.utils.units`. ``colorbar.extend`` Length of rectangular or triangular "extensions" for panel colorbars. Units are interpreted by `~proplot.utils.units`. ``colorbar.insetextend`` Length of rectangular or triangular "extensions" for inset colorbars. Units are interpreted by `~proplot.utils.units`. ``geoaxes.facecolor``, ``geoaxes.edgecolor``, ``geoaxes.linewidth`` Face color, edge color, and edge width for the map outline patch. ``geogrid.labels`` Boolean, indicates whether to label the parallels and meridians. ``geogrid.labelsize`` Font size for latitide and longitude labels. Inherits from ``small``. ``geogrid.latmax`` Absolute latitude in degrees, poleward of which meridian gridlines are cut off. ``geogrid.lonstep``, ``geogrid.latstep`` Interval for meridian and parallel gridlines, in degrees. ``gridminor.linewidth``, ``geogrid.linewidth`` The line width. ``gridminor.linestyle``, ``geogrid.linestyle`` The line style. ``gridminor.alpha``, ``geogrid.alpha`` The line transparency. ``gridminor.color``, ``geogrid.color`` The line color. ``image.levels`` Default number of levels for ``pcolormesh`` and ``contourf`` plots. ``image.edgefix`` Whether to fix the the `white-lines-between-filled-contours <https://stackoverflow.com/q/8263769/4970632>`__ and `white-lines-between-pcolor-rectangles <https://stackoverflow.com/q/27092991/4970632>`__ issues. This slows down figure rendering a bit. ``innerborders.color``, ``innerborders.linewidth`` Line color and linewidth for internal border lines. ``land.color``, ``ocean.color``, ``lakes.color`` Face color for land, ocean, and lake patches. ``leftlabel.color``, ``leftlabel.size``, ``leftlabel.weight`` Font color, size, and weight for row labels on the left-hand side. ``rightlabel.color``, ``rightlabel.size``, ``rightlabel.weight`` Font color, size, and weight for row labels on the right-hand side. ``rivers.color``, ``rivers.linewidth`` Line color and linewidth for river lines. ``subplots.axwidth`` Default width of each axes. Units are interpreted by `~proplot.utils.units`. ``subplots.panelwidth`` Width of side panels. Units are interpreted by `~proplot.utils.units`. ``subplots.pad`` Padding around figure edge. Units are interpreted by `~proplot.utils.units`. ``subplots.axpad`` Padding between adjacent subplots. Units are interpreted by `~proplot.utils.units`. ``subplots.panelpad`` Padding between subplots and panels, and between stacked panels. Units are interpreted by `~proplot.utils.units`. ``subplots.titlespace`` Vertical space for titles. Units are interpreted by `~proplot.utils.units`. ``subplots.ylabspace`` Horizontal space between subplots alotted for *y*-labels. Units are interpreted by `~proplot.utils.units`. ``subplots.xlabspace`` Vertical space between subplots alotted for *x*-labels. Units are interpreted by `~proplot.utils.units`. ``subplots.innerspace`` Space between subplots alotted for tick marks. Units are interpreted by `~proplot.utils.units`. ``subplots.panelspace`` Purely empty space between main axes and side panels. Units are interpreted by `~proplot.utils.units`. ``suptitle.color``, ``suptitle.size``, ``suptitle.weight`` Font color, size, and weight for the figure title. ``tick.labelcolor``, ``tick.labelsize``, ``tick.labelweight`` Font color, size, and weight for axis tick labels. These mirror the ``axes.labelcolor``, ``axes.labelsize``, and ``axes.labelweight`` `~matplotlib.rcParams` settings used for axes labels. ``title.loc`` Title position. For options, see `~proplot.axes.Axes.format`. ``title.border`` Boolean, indicates whether to draw a white border around titles inside an axes. ``title.linewidth`` Width of the white border around titles. ``title.pad`` Alias for ``axes.titlepad``, the title offset in arbitrary units ``title.color``, ``title.size``, ``title.weight`` Font color, size, and weight for subplot titles. ``toplabel.color``, ``toplabel.size``, ``toplabel.weight`` Font color, size, and weight for column labels on the top of the figure. =================================================================== ========================================================================================================================================================================================================================================================= ############## proplotrc file ############## To modify the global settings, edit your ``~/.proplotrc`` file. To modify settings for a particular project, create a ``.proplotrc`` file in the same directory as your ipython notebook, or in an arbitrary parent directory. As an example, the default ``.proplotrc`` file is shown below. The syntax is roughly the same as that used for ``matplotlibrc`` files, although ``.proplotrc`` strictly adheres to `YAML <https://en.wikipedia.org/wiki/YAML>`__. .. include:: ../proplot/.proplotrc :literal: """ # TODO: Add 'style' setting that overrides .proplotrc # Adapted from seaborn; see: https://github.com/mwaskom/seaborn/blob/master/seaborn/rcmod.py from . import utils from .utils import _counter, _timer, _benchmark import re import os import yaml import cycler import warnings import matplotlib.colors as mcolors import matplotlib.cm as mcm with _benchmark('pyplot'): import matplotlib.pyplot as plt try: import IPython get_ipython = IPython.get_ipython except ModuleNotFoundError: get_ipython = lambda: None __all__ = ['rc', 'rc_configurator', 'nb_setup'] # Initialize from matplotlib import rcParams as rcParams rcParamsShort = {} rcParamsCustom = {} # "Global" settings and the lower-level settings they change # NOTE: This whole section, declaring dictionaries and sets, takes 1ms RC_CHILDREN = { 'fontname': ('font.family',), 'cmap': ('image.cmap',), 'lut': ('image.lut',), 'alpha': ('axes.alpha',), # this is a custom setting 'facecolor': ('axes.facecolor', 'geoaxes.facecolor'), 'color': ('axes.edgecolor', 'geoaxes.edgecolor', 'axes.labelcolor', 'tick.labelcolor', 'hatch.color', 'xtick.color', 'ytick.color'), # change the 'color' of an axes 'small': ('font.size', 'tick.labelsize', 'xtick.labelsize', 'ytick.labelsize', 'axes.labelsize', 'legend.fontsize', 'geogrid.labelsize'), # the 'small' fonts 'large': ('abc.size', 'figure.titlesize', 'axes.titlesize', 'suptitle.size', 'title.size', 'leftlabel.size', 'toplabel.size', 'rightlabel.size', 'bottomlabel.size'), # the 'large' fonts 'linewidth': ('axes.linewidth', 'geoaxes.linewidth', 'hatch.linewidth', 'xtick.major.width', 'ytick.major.width'), 'margin': ('axes.xmargin', 'axes.ymargin'), 'grid': ('axes.grid',), 'gridminor': ('axes.gridminor',), 'geogrid': ('axes.geogrid',), 'ticklen' : ('xtick.major.size', 'ytick.major.size'), 'tickdir': ('xtick.direction', 'ytick.direction'), 'tickpad': ('xtick.major.pad', 'xtick.minor.pad', 'ytick.major.pad', 'ytick.minor.pad'), } # Names of the new settings RC_PARAMNAMES = {*rcParams.keys()} RC_SHORTNAMES = { 'abc', 'span', 'share', 'align', 'tight', 'fontname', 'cmap', 'lut', 'cycle', 'rgbcycle', 'alpha', 'facecolor', 'color', 'small', 'large', 'linewidth', 'margin', 'grid', 'gridminor', 'geogrid', 'ticklen' , 'tickdir', 'tickpad', 'tickratio', 'ticklenratio', 'gridratio', 'reso', 'land', 'ocean', 'lakes', 'coast', 'borders', 'innerborders', 'rivers', 'nbsetup', 'format', 'autosave', 'autoreload' } RC_CUSTOMNAMES = { 'axes.formatter.zerotrim', 'axes.formatter.timerotation', 'axes.gridminor', 'axes.geogrid', 'axes.alpha', 'image.levels', 'image.edgefix', 'geoaxes.linewidth', 'geoaxes.facecolor', 'geoaxes.edgecolor', 'land.color', 'ocean.color', 'lakes.color', 'coast.color', 'coast.linewidth', 'borders.color', 'borders.linewidth', 'innerborders.color', 'innerborders.linewidth', 'rivers.color', 'rivers.linewidth', 'abc.size', 'abc.weight', 'abc.color', 'abc.loc', 'abc.style', 'abc.border', 'abc.linewidth', 'title.loc', 'title.pad', 'title.color', 'title.border', 'title.linewidth', 'title.weight', 'title.size', 'suptitle.size', 'suptitle.weight', 'suptitle.color', 'leftlabel.size', 'leftlabel.weight', 'leftlabel.color', 'rightlabel.size', 'rightlabel.weight', 'rightlabel.color', 'toplabel.size', 'toplabel.weight', 'toplabel.color', 'bottomlabel.size', 'bottomlabel.weight', 'bottomlabel.color', 'gridminor.alpha', 'gridminor.color', 'gridminor.linestyle', 'gridminor.linewidth', 'geogrid.labels', 'geogrid.alpha', 'geogrid.color', 'geogrid.labelsize', 'geogrid.linewidth', 'geogrid.linestyle', 'geogrid.latmax', 'geogrid.lonstep', 'geogrid.latstep', 'tick.labelweight', 'tick.labelcolor', 'tick.labelsize', 'subplots.pad', 'subplots.axpad', 'subplots.panelpad', 'subplots.ylabspace', 'subplots.xlabspace', 'subplots.innerspace', 'subplots.titlespace', 'subplots.axwidth', 'subplots.panelwidth', 'subplots.panelspace', 'colorbar.grid', 'colorbar.frameon', 'colorbar.framealpha', 'colorbar.loc', 'colorbar.length', 'colorbar.width', 'colorbar.insetlength', 'colorbar.insetwidth', 'colorbar.extend', 'colorbar.insetextend', 'colorbar.axespad', } # Used by Axes.format, allows user to pass rc settings as keyword args, # way less verbose. For example, landcolor='b' vs. rc_kw={'land.color':'b'}. RC_NODOTSNAMES = { # useful for passing these as kwargs name.replace('.', ''):name for names in (RC_CUSTOMNAMES, RC_PARAMNAMES, RC_SHORTNAMES) for name in names } # Categories for returning dict of subcategory properties RC_CATEGORIES = { *(re.sub('\.[^.]*$', '', name) for names in (RC_CUSTOMNAMES, RC_PARAMNAMES) for name in names), *(re.sub('\..*$', '', name) for names in (RC_CUSTOMNAMES, RC_PARAMNAMES) for name in names) } # Unit conversion # See: https://matplotlib.org/users/customizing.html, all props matching # the below strings use the units 'points', and my special categories are inches! def _convert_units(key, value): """Converts certain keys to the units "points". If "key" is passed, tests that key against possible keys that accept physical units.""" # WARNING: Must keep colorbar and subplots units alive, so when user # requests em units, values change with respect to font size. The points # thing is a conveniene feature so not as important for them. if (isinstance(value,str) and key.split('.')[0] not in ('colorbar','subplots') and re.match('^.*(width|space|size|pad|len|small|large)$', key)): value = utils.units(value, 'pt') return value def _set_cycler(name): """Sets the default color cycler.""" # Draw from dictionary try: colors = mcm.cmap_d[name].colors except (KeyError, AttributeError): cycles = sorted(name for name,cmap in mcm.cmap_d.items() if isinstance(cmap, mcolors.ListedColormap)) raise ValueError(f'Invalid cycle name {name!r}. Options are: {", ".join(cycles)}') # Apply color name definitions if rcParamsShort['rgbcycle'] and name.lower() == 'colorblind': regcolors = colors + [(0.1, 0.1, 0.1)] elif mcolors.to_rgb('r') != (1.0,0.0,0.0): # reset regcolors = [(0.0, 0.0, 1.0), (1.0, 0.0, 0.0), (0.0, 1.0, 0.0), (0.75, 0.75, 0.0), (0.75, 0.75, 0.0), (0.0, 0.75, 0.75), (0.0, 0.0, 0.0)] else: regcolors = [] # no reset necessary for code,color in zip('brgmyck', regcolors): rgb = mcolors.to_rgb(color) mcolors.colorConverter.colors[code] = rgb mcolors.colorConverter.cache[code] = rgb # Pass to cycle constructor rcParams['patch.facecolor'] = colors[0] rcParams['axes.prop_cycle'] = cycler.cycler('color', colors) def _get_config_paths(): """Returns configuration file paths.""" # Get paths idir = os.getcwd() paths = [] while idir: # not empty string ipath = os.path.join(idir, '.proplotrc') if os.path.exists(ipath): paths.append(ipath) ndir, _ = os.path.split(idir) if ndir == idir: break idir = ndir paths = paths[::-1] # sort from decreasing to increasing importantce # Home configuration ipath = os.path.join(os.path.expanduser('~'), '.proplotrc') if os.path.exists(ipath) and ipath not in paths: paths.insert(0, ipath) # Global configuration ipath = os.path.join(os.path.dirname(__file__), '.proplotrc') if ipath not in paths: paths.insert(0, ipath) return paths def _get_synced_params(key=None, value=None): """Returns dictionaries for updating "child" properties in `rcParams` and `rcParamsCustom` with global property.""" kw = {} # builtin properties that global setting applies to kw_custom = {} # custom properties that global setting applies to if key is not None and value is not None: items = [(key,value)] else: items = rcParamsShort.items() for key,value in items: # Tick length/major-minor tick length ratio if key in ('ticklen', 'ticklenratio'): if key == 'ticklen': ticklen = _convert_units(key, value) ratio = rcParamsShort['ticklenratio'] else: ticklen = rcParamsShort['ticklen'] ratio = value kw['xtick.minor.size'] = ticklen*ratio kw['ytick.minor.size'] = ticklen*ratio # Spine width/major-minor tick width ratio elif key in ('linewidth', 'tickratio'): if key == 'linewidth': tickwidth = _convert_units(key, value) ratio = rcParamsShort['tickratio'] else: tickwidth = rcParamsShort['linewidth'] ratio = value kw['xtick.minor.width'] = tickwidth*ratio kw['ytick.minor.width'] = tickwidth*ratio # Grid line elif key in ('grid.linewidth', 'gridratio'): if key == 'grid.linewidth': gridwidth = _convert_units(key, value) ratio = rcParamsShort['gridratio'] else: gridwidth = rcParams['grid.linewidth'] ratio = value kw_custom['gridminor.linewidth'] = gridwidth*ratio # Now update linked settings val = None for name in RC_CHILDREN.get(key, ()): val = _convert_units(key, value) if name in rcParamsCustom: kw_custom[name] = val else: kw[name] = val if key == 'linewidth' and val == 0: ikw, ikw_custom = _get_synced_params('ticklen', 0) kw.update(ikw) kw_custom.update(ikw_custom) return kw, kw_custom class rc_configurator(object): _public_api = ( 'get', 'fill', 'category', 'reset', 'context', 'update' ) # getattr and setattr will not look for these items def __str__(self): return type(rcParams).__str__(rcParamsShort) # just show globals def __repr__(self): return type(rcParams).__repr__(rcParamsShort) def __contains__(self, key): return (key in RC_SHORTNAMES or key in RC_CUSTOMNAMES or key in RC_PARAMNAMES or key in RC_NODOTSNAMES) # query biggest lists last @_counter # about 0.05s def __init__(self): """Magical abstract class for managing matplotlib `rcParams <https://matplotlib.org/users/customizing.html>`__ settings, ProPlot :ref:`rcParamsCustom` settings, and :ref:`rcParamsShort` "global" settings. When initialized, this loads defaults settings plus any user overrides in the ``~/.proplotrc`` file. See the `~proplot.rctools` documentation for details.""" # Set the default style. Note that after first figure made, backend # is 'sticky', never changes! See: https://stackoverflow.com/a/48322150/4970632 plt.style.use('default') # Load the defaults from file for i,file in enumerate(_get_config_paths()): # Load if not os.path.exists(file): continue with open(file) as f: try: data = yaml.safe_load(f) except yaml.YAMLError as err: print('{file!r} has invalid YAML syntax.') raise err # Special duplicate keys if data is None: continue # Add keys to dictionaries gkeys, ckeys = {*()}, {*()} for key,value in data.items(): if key in RC_SHORTNAMES: rcParamsShort[key] = value if i == 0: gkeys.add(key) elif key in RC_CUSTOMNAMES: value = _convert_units(key, value) rcParamsCustom[key] = value if i == 0: ckeys.add(key) elif key in RC_PARAMNAMES: value = _convert_units(key, value) rcParams[key] = value else: raise RuntimeError(f'{file!r} has invalid key {key!r}.') # Make sure we did not miss anything if i == 0: if gkeys != RC_SHORTNAMES: raise RuntimeError(f'{file!r} has incomplete or invalid global keys {RC_SHORTNAMES - gkeys}.') if ckeys != RC_CUSTOMNAMES: raise RuntimeError(f'{file!r} has incomplete or invalid custom keys {RC_CUSTOMNAMES - ckeys}.') # Apply *global settings* to children settings rcParams['axes.titlepad'] = rcParamsCustom['title.pad'] _set_cycler(rcParamsShort['cycle']) rc, rc_new = _get_synced_params() for key,value in rc.items(): rcParams[key] = value for key,value in rc_new.items(): rcParamsCustom[key] = value # Caching stuff self._init = True self._getitem_mode = 0 self._context = {} self._cache = {} self._cache_orig = {} self._cache_restore = {} def __getitem__(self, key): """Returns `rcParams <https://matplotlib.org/users/customizing.html>`__, :ref:`rcParamsCustom`, and :ref:`rcParamsShort` settings. If we are in a `~rc_configurator.context` block, may return ``None`` if the setting is not cached (i.e. if it was not changed by the user).""" # Can get a whole bunch of different things # Get full dictionary e.g. for rc[None] if not key: return {**rcParams, **rcParamsCustom} # Standardize # NOTE: If key is invalid, raise error down the line. if '.' not in key and key not in rcParamsShort: key = RC_NODOTSNAMES.get(key, key) # Allow for special time-saving modes where we *ignore rcParams* # or even *ignore rcParamsCustom*. mode = self._getitem_mode if mode == 0: kws = (self._cache, rcParamsShort, rcParamsCustom, rcParams) elif mode == 1: kws = (self._cache, rcParamsShort, rcParamsCustom) # custom only! elif mode == 2: kws = (self._cache,) # changed only! else: raise KeyError(f'Invalid caching mode {mode!r}.') # Get individual property. Will successively index a few different dicts # Try to return the value for kw in kws: try: return kw[key] except KeyError: continue # If we were in one of the exclusive modes, return None if mode == 0: raise KeyError(f'Invalid prop name {key!r}.') else: return None def __setitem__(self, key, value): """Sets `rcParams <https://matplotlib.org/users/customizing.html>`__, :ref:`rcParamsCustom`, and :ref:`rcParamsShort` settings.""" # Check whether we are in context block # NOTE: Do not add key to cache until we are sure it is a valid key cache = self._cache context = bool(self._context) # test if context dict is non-empty if context: restore = self._cache_restore # Standardize # NOTE: If key is invalid, raise error down the line. if '.' not in key and key not in rcParamsShort: key = RC_NODOTSNAMES.get(key, key) # Special keys if key == 'title.pad': key = 'axes.titlepad' if key == 'rgbcycle': # if must re-apply cycler afterward cache[key] = value rcParamsShort[key] = value key, value = 'cycle', rcParamsShort['cycle'] # Set the default cycler if key == 'cycle': cache[key] = value if context: restore[key] = rcParamsShort[key] restore['axes.prop_cycle'] = rcParams['axes.prop_cycle'] restore['patch.facecolor'] = rcParams['patch.facecolor'] _set_cycler(value) # Gridline toggling, complicated because of the clunky way this is # implemented in matplotlib. There should be a gridminor setting! elif key in ('grid', 'gridminor'): cache[key] = value ovalue = rcParams['axes.grid'] owhich = rcParams['axes.grid.which'] if context: restore[key] = rcParamsShort[key] restore['axes.grid'] = ovalue restore['axes.grid.which'] = owhich # Instruction is to turn off gridlines if not value: # Gridlines are already off, or they are on for the particular # ones that we want to turn off. Instruct to turn both off. if not ovalue or (key == 'grid' and owhich == 'major') or (key == 'gridminor' and owhich == 'minor'): which = 'both' # disable both sides # Gridlines are currently on for major and minor ticks, so we instruct # to turn on gridlines for the one we *don't* want off elif owhich == 'both': # and ovalue is True, as we already tested value = True which = 'major' if key == 'gridminor' else 'minor' # if gridminor=False, enable major, and vice versa # Gridlines are on for the ones that we *didn't* instruct to turn # off, and off for the ones we do want to turn off. This just # re-asserts the ones that are already on. else: value = True which = owhich # Instruction is to turn on gridlines else: # Gridlines are already both on, or they are off only for the ones # that we want to turn on. Turn on gridlines for both. if owhich == 'both' or (key == 'grid' and owhich == 'minor') or (key == 'gridminor' and owhich == 'major'): which = 'both' # Gridlines are off for both, or off for the ones that we # don't want to turn on. We can just turn on these ones. else: which = owhich cache.update({'axes.grid':value, 'axes.grid.which':which}) rcParams.update({'axes.grid':value, 'axes.grid.which':which}) # Ordinary settings elif key in rcParamsShort: # Update global setting cache[key] = value if context: restore[key] = rcParamsShort[key] rcParamsShort[key] = value # Update children of setting rc, rc_new = _get_synced_params(key, value) cache.update(rc) cache.update(rc_new) if context: restore.update({key:rcParams[key] for key in rc}) restore.update({key:rcParamsCustom[key] for key in rc_new}) rcParams.update(rc) rcParamsCustom.update(rc_new) # Update normal settings elif key in RC_CUSTOMNAMES: value = _convert_units(key, value) cache[key] = value if context: restore[key] = rcParamsCustom[key] rcParamsCustom[key] = value elif key in RC_PARAMNAMES: value = _convert_units(key, value) cache[key] = value if context: restore[key] = rcParams[key] rcParams[key] = value # rcParams dict has key validation else: raise KeyError(f'Invalid key {key!r}.') self._init = False # setitem was successful, we are no longer in initial state # Attributes same as items def __getattribute__(self, attr): """Invokes `~rc_configurator.__getitem__`.""" if attr[:1] == '_' or attr in self._public_api: return object.__getattribute__(self, attr) else: return self[attr] def __setattr__(self, attr, value): """Invokes `~rc_configurator.__setitem__`.""" if attr[:1] == '_': object.__setattr__(self, attr, value) else: self[attr] = value # Immutability def __delitem__(self, *args): """Pseudo-immutability.""" raise RuntimeError('rc settings cannot be deleted.') def __delattr__(self, *args): """Pseudo-immutability.""" raise RuntimeError('rc settings cannot be deleted.') # Context tools def __enter__(self): """Apply settings from configurator cache.""" self._cache_orig = rc._cache.copy() self._cache_restore = {} # shouldn't be necessary but just in case self._cache = {} for key,value in self._context.items(): self[key] = value # applies globally linked and individual settings def __exit__(self, *args): """Restore configurator cache to initial state.""" self._context = {} self._getitem_mode = 0 for key,value in self._cache_restore.items(): self[key] = value self._cache = self._cache_orig self._cache_restore = {} self._cache_orig = {} def context(self, *args, mode=0, **kwargs): """ Temporarily modifies settings in a ``with...as`` block, used by ProPlot internally but may also be useful for power users. This function was invented to prevent successive calls to `~proplot.axes.Axes.format` from constantly looking up and re-applying unchanged settings. Testing showed that these gratuitous `rcParams <https://matplotlib.org/users/customizing.html>`__ lookups and artist updates increased runtime by seconds, even for relatively simple plots. Parameters ---------- *args Dictionaries of setting names and values. **kwargs Setting names and values passed as keyword arguments. Other parameters ---------------- mode : {0,1,2}, optional The `~rc_configurator.__getitem__` mode. Dictates the behavior of the `rc` object within a ``with...as`` block when settings are requested with e.g. :rcraw:`setting`. If you are using `~rc_configurator.context` manually, the `mode` is automatically set to ``0`` -- other input is ignored. Internally, ProPlot uses all of the three available modes. 0. All settings (`rcParams <https://matplotlib.org/users/customizing.html>`__, :ref:`rcParamsCustom`, and :ref:`rcParamsShort`) are returned, whether or not `~rc_configurator.context` has changed them. 1. Unchanged `rcParams <https://matplotlib.org/users/customizing.html>`__ return ``None``. :ref:`rcParamsCustom` and :ref:`rcParamsShort` are returned whether or not `~rc_configurator.context` has changed them. This is used in the `~proplot.axes.Axes.__init__` call to `~proplot.axes.Axes.format`. When a setting lookup returns ``None``, `~proplot.axes.Axes.format` does not apply it. 2. All unchanged settings return ``None``. This is used during user calls to `~proplot.axes.Axes.format`. Example ------- >>> import proplot as plot >>> with plot.rc.context(linewidth=2, ticklen=5): ... f, ax = plot.subplots() ... ax.plot(data) """ if mode not in range(3): raise ValueError(f'Invalid _getitem_mode {mode}.') for arg in args: if not isinstance(arg, dict): raise ValueError('Non-dictionary argument.') kwargs.update(arg) self._context = kwargs # could be empty self._getitem_mode = mode return self # Other tools def get(self, key, cache=False): """ Returns a setting. Parameters ---------- key : str The setting name. cache : bool, optional If ``False``, the `~rc_configurator.__getitem__` mode is temporarily set to ``0`` (see `~rc_configurator.context`). """ if not cache: orig = self._getitem_mode self._getitem_mode = 0 item = self[key] if not cache: self._getitem_mode = orig return item def fill(self, props, cache=True): """ Returns a dictionary filled with `rc` settings, used internally to build dictionaries for updating `~matplotlib.artist.Artist` instances. Parameters ---------- props : dict-like Dictionary whose values are names of `rc` settings. The values are replaced with the corresponding property only if `~rc_configurator.__getitem__` does not return ``None``. Otherwise, that key, value pair is omitted from the output dictionary. cache : bool, optional If ``False``, the `~rc_configurator.__getitem__` mode is temporarily set to ``0`` (see `~rc_configurator.context`). Otherwise, if an `rc` lookup returns ``None``, the setting is omitted from the output dictionary. """ if not cache: orig = self._getitem_mode self._getitem_mode = 0 props_out = {} for key,value in props.items(): item = self[value] if item is not None: props_out[key] = item if not cache: self._getitem_mode = orig return props_out def category(self, cat, cache=True): """ Returns a dictionary of settings belonging to the indicated category, i.e. settings beginning with the substring ``cat + '.'``. Parameters ---------- cat : str, optional The `rc` settings category. cache : bool, optional If ``False``, the `~rc_configurator.__getitem__` mode is temporarily set to ``0`` (see `~rc_configurator.context`). """ # Check if cat not in RC_CATEGORIES: raise ValueError(f'RC category {cat} does not exist. Valid categories are {", ".join(RC_CATEGORIES)}.') if not cache: mode = 0 else: mode = self._getitem_mode # Allow for special time-saving modes where we *ignore rcParams* # or even *ignore rcParamsCustom*. if mode == 0: kws = (self._cache, rcParamsShort, rcParamsCustom, rcParams) elif mode == 1: kws = (self._cache, rcParamsShort, rcParamsCustom) elif mode == 2: kws = (self._cache, rcParamsShort) else: raise KeyError(f'Invalid caching mode {mode}.') # Return params dictionary params = {} for kw in kws: for category,value in kw.items(): if re.search(f'^{cat}\.', category): subcategory = re.sub(f'^{cat}\.', '', category) if subcategory and '.' not in subcategory: params[subcategory] = value return params def update(self, *args, **kwargs): """ Bulk updates settings, usage is similar to python `dict` objects. Parameters ---------- *args : str, dict, or (str, dict) Positional arguments can be a dictionary of `rc` settings and/or a "category" string name. If a category name is passed, all settings in the dictionary (if it was passed) and all keyword arg names (if they were passed) are prepended with the string ``cat + '.'``. For example, ``rc.update('axes', labelsize=20, titlesize=20)`` changes the ``axes.labelsize`` and ``axes.titlesize`` properties. **kwargs `rc` settings passed as keyword args. """ # Parse args kw = {} prefix = '' if len(args) > 2: raise ValueError('Accepts 1-2 positional arguments. Use plot.rc.update(kw) to update a bunch of names, or plot.rc.update(category, kw) to update subcategories belonging to single category e.g. axes. All kwargs will be added to the dict.') elif len(args) == 2: prefix = args[0] kw = args[1] elif len(args) == 1: if isinstance(args[0], str): prefix = args[0] else: kw = args[0] # Apply settings if prefix: prefix = prefix + '.' kw.update(kwargs) for key,value in kw.items(): self[prefix + key] = value def reset(self): """Restores settings to the initial state -- ProPlot defaults, plus any user overrides in the ``~/.proplotrc`` file.""" if not self._init: # save resources if rc is unchanged! return self.__init__() # Declare rc object # WARNING: Must be instantiated after ipython notebook setup! The default # backend may change some rc settings! rc = rc_configurator() """Instance of `rc_configurator`. This is used to change global settings. See the `~proplot.rctools` documentation for details.""" # Ipython notebook behavior @_timer def nb_setup(): """ Sets up your iPython workspace, called on import if :rcraw:`nbsetup` is ``True``. For all iPython sessions, passes :rcraw:`autoreload` to the useful `autoreload <https://ipython.readthedocs.io/en/stable/config/extensions/autoreload.html>`__ extension. For iPython *notebook* sessions, results in higher-quality inline figures and passes :rcraw:`autosave` to the `autosave <https://ipython.readthedocs.io/en/stable/interactive/magics.html#magic-matplotlib>`__ extension. See the `~proplot.rctools` documentation for details. """ # Make sure we are in session ipython = get_ipython() if ipython is None: return # Only do this if not already loaded -- otherwise will get *recursive* # reloading, even with unload_ext command! if rcParamsShort['autoreload']: if 'autoreload' not in ipython.magics_manager.magics['line']: ipython.magic("reload_ext autoreload") # reload instead of load, to avoid annoying message ipython.magic("autoreload " + str(rcParamsShort['autoreload'])) # turn on expensive autoreloading # Initialize with default 'inline' settings # Reset rc object afterwards rc._init = False try: # For notebooks ipython.magic("matplotlib inline") rc.reset() except KeyError: # For terminals # TODO: Fix auto tight layout with osx backend -- currently has # standard issue where content adjusted but canvas size not adjusted # until second draw command, and other issue where window size does # not sync with figure size ipython.magic("matplotlib qt") rc.reset() else: # Choose svg vector or retina hi-res bitmap backends autosave = rcParamsShort['autosave'] if autosave: # capture annoying message + line breaks with IPython.utils.io.capture_output(): ipython.magic("autosave " + str(autosave)) ipython.magic("config InlineBackend.figure_formats = ['" + rcParamsShort['format'] + "']") ipython.magic("config InlineBackend.rc = {}") # no notebook-specific overrides ipython.magic("config InlineBackend.close_figures = True") # memory issues ipython.magic("config InlineBackend.print_figure_kwargs = {'bbox_inches':None}") # use ProPlot tight layout # Setup notebook and issue warning # TODO: Add to list of incompatible backends? if rcParamsShort['nbsetup']: nb_setup() if rcParams['backend'][:2] == 'nb' or rcParams['backend'] in ('MacOSX',): warnings.warn(f'Due to automatic figure resizing, using ProPlot with the {rcParams["backend"]!r} backend may result in unexpected behavior. Try using %matplotlib inline or %matplotlib qt, or just import ProPlot before specifying the backend and ProPlot will automatically load it.')
2.0625
2
ImageAlgoKD/ImageAlgoKD_kernel_cuda.py
ZihengChen/ImageAlgorithm
6
12784038
<filename>ImageAlgoKD/ImageAlgoKD_kernel_cuda.py ## cuda import pycuda.driver as cuda import pycuda.autoinit from pycuda.compiler import SourceModule mod = SourceModule(""" // 1.1 get rho __global__ void rho_cuda( float *d_rho, float *d_Points, float *d_wPoints, int nPoints, int kPoints, float KERNEL_R, float KERNEL_R_NORM, float KERNEL_R_POWER){ int i = blockDim.x*blockIdx.x + threadIdx.x; int idx_point = i * kPoints; if( i < nPoints ) { float rhoi = 0.0; // loop over all points to calculate rho for (int j=0; j<nPoints; j++){ float dr = 0; for (int k = 0; k < kPoints; k++){ dr += pow( d_Points[ j*kPoints + k] - d_Points[idx_point + k] , 2) ; } dr = sqrt(dr); if (dr<KERNEL_R){ float expWeight = 1.0; if (KERNEL_R_POWER != 0) expWeight = exp(- pow(dr/KERNEL_R_NORM, KERNEL_R_POWER) ); rhoi += d_wPoints[j] * expWeight; /////////////////////////////////////////////////// // some device does not support exp() function // // have to use Tylor expansion for exp() instead // /////////////////////////////////////////////////// // float d = pow(dr/KERNEL_R_NORM, KERNEL_R_POWER); // float expWeight = 1 / (1 + d + d*d/2 + d*d*d/6 + d*d*d*d/24); // rhoi += d_wPoints[j] * expWeight } } d_rho[i] = rhoi; } } // 1.2 get rho with NNBin __global__ void rho_cudabin(float *d_rho, float *d_Points, float *d_wPoints, int *d_nnbinHead, int *d_nnbinSize, int *d_nnbinList, int *d_idxPointsHead, int *d_idxPointsSize, int *d_idxPonitsList, int nPoints, int kPoints, float KERNEL_R, float KERNEL_R_NORM, float KERNEL_R_POWER){ int i = blockDim.x*blockIdx.x + threadIdx.x; int idx_point = i * kPoints; if( i < nPoints ) { float rhoi = 0.0; int a1 = d_nnbinHead[i]; int b1 = a1 + d_nnbinSize[i]; // loop over nn bins for (int l1=a1; l1<b1; l1++){ int idxNNBin = d_nnbinList[l1]; int a2 = d_idxPointsHead[idxNNBin]; int b2 = a2 + d_idxPointsSize[idxNNBin]; // loop over points in the nn bin for (int l2 = a2; l2<b2; l2++){ // get j int j = d_idxPonitsList[l2]; float dr = 0; // get dij for (int k = 0; k < kPoints; k++){ dr += pow( d_Points[ j*kPoints + k] - d_Points[idx_point + k] , 2) ; } dr = sqrt(dr); // get density if (dr<KERNEL_R){ float expWeight = 1.0; if (KERNEL_R_POWER != 0) expWeight = exp(- pow(dr/KERNEL_R_NORM, KERNEL_R_POWER) ); rhoi += d_wPoints[j] * expWeight; /////////////////////////////////////////////////// // some device does not support exp() function // // have to use Tylor expansion for exp() instead // /////////////////////////////////////////////////// // float d = pow(dr/KERNEL_R_NORM, KERNEL_R_POWER); // float expWeight = 1 / (1 + d + d*d/2 + d*d*d/6 + d*d*d*d/24); // rhoi += d_wPoints[j] * expWeight } } } d_rho[i] = rhoi; } } // 2. get rhorank and nh+nhd 2in1 __global__ void rhoranknh_cuda( int *d_rhorank, int *d_nh, float *d_nhd, float *d_Points, float *d_rho, int nPoints, int kPoints, float MAXDISTANCE){ int i = blockDim.x*blockIdx.x + threadIdx.x; int idx_point = i * kPoints; if( i < nPoints ) { float rhoi = d_rho[i]; int rhoranki = 0; int nhi = i; float nhdi = MAXDISTANCE; // loop over other points to calculate rhorank, nh,nhd for (int j=0; j<nPoints; j++){ // calculate rhorank if( d_rho[j]>rhoi ) rhoranki++; else if ( (d_rho[j]==rhoi) && (j>i)) // if same rho, by definition, larger index has higher rho rhoranki++; // find nh and nhd // if higher, larger index has higher rho bool isHigher = d_rho[j]>rhoi || (d_rho[j]==rhoi && j>i) ; if (isHigher){ float dr = 0; for (int k = 0; k < kPoints; k++){ dr += pow( d_Points[ j*kPoints + k] - d_Points[ idx_point + k] , 2 ) ; } dr = sqrt(dr); // if nearer if ( dr<nhdi ){ nhdi = dr; nhi = j; } } } d_rhorank[i] = rhoranki; d_nh[i] = nhi; d_nhd[i] = nhdi; } } """ ) rho_cuda = mod.get_function("rho_cuda") rho_cudabin = mod.get_function("rho_cudabin") rhoranknh_cuda = mod.get_function("rhoranknh_cuda")
2.453125
2
scrapelin/__init__.py
MichaelYusko/scrapelin
0
12784039
<reponame>MichaelYusko/scrapelin<filename>scrapelin/__init__.py """ :copyright: <NAME> a.k.a <NAME> :license: MIT, see LICENSE for more details. """
1.109375
1
modules/zabbix_send.py
yakumo-saki/megacli_to_zabbix
1
12784040
import json import logging import config as cfg from modules.zabbix_sender import send_to_zabbix logger = logging.getLogger(__name__) """zabbixにDevice LLDデータを送信します。 result = {"/dev/sda": {"model": EXAMPLE SSD 250, "POWER_CYCLE": 123 ...}} @param result 送信するデータ @param discoveryKey zabbix discovery key. ex) megacli.lld.adapter @param discoveryPHName discovery placeholder for values ex) SASADDR """ def send_device_discovery(result, discoveryKey, discoveryPHName): logger.info("Sending device discovery to zabbix") discovery_result = [] for key in result: discovery_result.append({discoveryPHName: key}) data = {"request": "sender data", "data":[]} valueStr = json.dumps({"data": discovery_result}) one_data = {"host": cfg.ZABBIX_HOST, "key": discoveryKey, "value": f"{valueStr}"} data["data"].append(one_data) result = send_to_zabbix(data) logger.info(result) return None """interpriterで解釈出来たデータを送信する。 smartctlが解釈してくれたもの+独自に解釈したデータ data = { "host1": { "item1": 1234, "item2": "value" }, "host2": { "item1": 5678, "item2": "value" } } """ def send_data(data): logger.info("Send data to zabbix") results = [] for mainkey in data: detail = data[mainkey] # discovery key for key in detail: results.append({ "host": cfg.ZABBIX_HOST, "key": key, "value": detail[key], }) sender_data = {"request": "sender data", "data": results} result = send_to_zabbix(sender_data) logger.info(result) return None
2.359375
2
fnx/prep.py
hoefkensj/portal
0
12784041
<gh_stars>0 #!/usr/bin/env python import os import os import shutil import lib import lib.lib src='$HOME/.bashrc' def prep_src(src,**k): """ :param src: :param k: :return: """ #this needs to be don before actually creating the propsdict as they arent done automaticly if "~" in src or '$' in src: src=os.path.expandvars(src) src=os.path.expanduser(src) props= props_path(src) #avoid this , complex behavior if props['islink'] and ('--copy-contents' in [k.get('flags')] or '--follow-links' in [k.get('flags')] ): print('flag detected') src=props['realpath'] props=props= props_path(src) for key in props.keys(): print(key,'\t:\t',props[key]) prep_src(src,flags='--copy-contents') #!/usr/bin/env python def prep_dst(dst,src_meat,**k): """ :param src: :param k: :return: """ #this needs to be don before actually creating the propsdict as they arent done automaticly if "~" in dst or '$' in dst: dst=os.path.expandvars(dst) dst=os.path.expanduser(dst) props_dst= props_path(dst) props_dstsrc= props_path(os.path.join(dst, src_meat)) if not props_dst['exists']: os.makedirs(dst) if props_dstsrc['exists']: #cannot copy use merge instead method='merge' if props_dst['islink']: dst=props_dst['realpath'] props_dst= props_path(dst) dst=props_dst['abspath'] def rmr(path) -> None: shutil.rmtree(path) def props_path(src) -> dict: props={} for fn in [(item,fn) for item,fn in os.path.__dict__.items() if callable(fn)]: try: props[fn[0]]=fn[1](src) except Exception as ERROR: props[fn[0]]=f'#!_ERROR :{ERROR}' return props
2.28125
2
doctor/command.py
jhauberg/gitdoctor
1
12784042
# coding=utf-8 """ Provides a common interface for executing commands. """ import sys import subprocess import doctor.report as report from doctor.report import supports_color def get_argv(cmd: str) -> list: """ Return a list of arguments from a fully-formed command line. """ return cmd.strip().split(' ') def display(cmd: str): """ Emit a diagnostic message that looks like the execution of a command line. """ diagnostic = f'$ {cmd}' diagnostic = f'\x1b[0;37m{diagnostic}\x1b[0m' if supports_color(sys.stderr) else diagnostic report.information(diagnostic, wrapped=False) def execute(cmd: str, show_argv: bool=False, show_output: bool=False) -> int: """ Execute a command-line process and return exit code. If show_argv is True, display the executed command with parameters/arguments. If show_output is True, display the resulting output from the executed command. The resulting output of the executed command is not redirected (unless show_output is False, in which case it is quelched), which means it might be printed on either stdout or stderr depending on the executed command. """ argv = get_argv(cmd) if show_argv: display(cmd) result = subprocess.run( argv, stdout=sys.stdout if show_output else subprocess.DEVNULL, stderr=sys.stderr if show_output else subprocess.DEVNULL) return result.returncode
3.140625
3
code/examples/02-gpio/stepper_ULN2003.py
yuanyanhui/intro-upy-esp32
0
12784043
""" Drive stepper motor 28BYJ-48 using ULN2003 """ from machine import Pin from time import sleep_ms # define pins for ULN2003 IN1 = Pin(16, Pin.OUT) IN2 = Pin(17, Pin.OUT) IN3 = Pin(5, Pin.OUT) IN4 = Pin(18, Pin.OUT) # half-step mode # counter clockwise step sequence seq_ccw = [[1, 0, 0, 0], [1, 1, 0, 0], [0, 1, 0, 0], [0, 1, 1, 0], [0, 0, 1, 0], [0, 0, 1, 1], [0, 0, 0, 1], [1, 0, 0, 1]] # clockwise step sequence seq_cw = seq_ccw[::-1] delay = 1 # ms, delay between steps # one clockwise revolution (4096 steps) for i in range(4096): step = i % 8 IN1.value(seq_cw[step][0]) IN2.value(seq_cw[step][1]) IN3.value(seq_cw[step][2]) IN4.value(seq_cw[step][3]) sleep_ms(1) # one counterclockwise revolution (4096 steps) for i in range(4096): step = i % 8 IN1.value(seq_ccw[step][0]) IN2.value(seq_ccw[step][1]) IN3.value(seq_ccw[step][2]) IN4.value(seq_ccw[step][3]) sleep_ms(1)
3.21875
3
POPGEN/make_correct_snp_file_from_snp_file.py
Hammarn/Scripts
0
12784044
<filename>POPGEN/make_correct_snp_file_from_snp_file.py #!/usr/bin/env python import pandas as pd import argparse import sys import pdb def read_input(input_file, header): if not header: DF = pd.read_csv(input_file[0], sep = "\s+" , header = None) if header: DF = pd.read_csv(input_file[0], sep = "\s+" ) return DF def save(bim, snp, filename): new_frame = bim.merge(snp, left_on=3, right_on = "V4" ) new_frame[[1,0,2,3,4,5]].to_csv(filename[0], sep = " ", index = False, header = False) if __name__ == "__main__": # Command line arguments parser = argparse.ArgumentParser("Fix SNP file") parser.add_argument("-b", "--bim", nargs = '+', help= "bim file ") parser.add_argument("-s", "--snp", nargs = '+', help= "The SNP file from MOSAIC") args = parser.parse_args() filename = args.snp bim = read_input(args.bim, None) snp = read_input(args.snp, True) save(bim, snp, filename)
2.96875
3
python/examples/test_simple_race.py
ziyuli/XWorld
83
12784045
<filename>python/examples/test_simple_race.py #!/usr/bin/python from py_simulator import Simulator from random import randint if __name__ == "__main__": options = { "pause_screen": False, "window_width": 480, "window_height": 480, "track_type": "straight", "track_width": 20.0, "track_length": 100.0, "track_radius": 30.0, "race_full_manouver": False, "random": False, "difficulty": "easy", "context": 1 } sr = Simulator.create("simple_race", options) sr.reset_game() num_actions = sr.get_num_actions() act_rep = options["context"] reward = 0 for i in range(100): game_over_str = sr.game_over() states = sr.get_state() action = randint(0, num_actions - 1) r = sr.take_actions({"action": action}, act_rep, False) if game_over_str != "alive": print "game over because of ", game_over_str sr.reset_game() continue print r reward += r print "total reward ", reward
3.40625
3
sparse_causal_model_learner_rl/test_learner.py
sergeivolodin/causality-disentanglement-rl
2
12784046
import vectorincrement import os import gin import sparse_causal_model_learner_rl.learners.rl_learner as learner import sparse_causal_model_learner_rl.learners.abstract_learner as abstract_learner import sparse_causal_model_learner_rl.config as config import pytest from sparse_causal_model_learner_rl.sacred_gin_tune.sacred_wrapper import load_config_files def test_learn_vectorincrement(): ve_config_path = os.path.join(os.path.dirname(vectorincrement.__file__), 'config', 've5.gin') learner_config_path = os.path.join(os.path.dirname(learner.__file__), '..', 'configs', 'test.gin') print(ve_config_path, learner_config_path) load_config_files([ve_config_path, learner_config_path]) l = learner.CausalModelLearnerRL(config.Config()) l.train() gin.clear_config() class EmptyLearner(abstract_learner.AbstractLearner): def maybe_write_artifacts(self, path_epoch, add_artifact_local): pass @property def _context_subclass(self): return {} def collect_steps(self): pass def __repr__(self): return "" def test_abstract_learner_create(): f = os.path.join(os.path.dirname(abstract_learner.__file__), '..', 'configs', 'base_learner.gin') load_config_files([f]) l = EmptyLearner(config.Config()) l.train() gin.clear_config() @pytest.fixture(autouse=True) def clean_gin(): gin.clear_config() yield gin.clear_config()
1.875
2
python_developer_tools/python/obj_utils.py
carlsummer/python_developer_tools
32
12784047
# !/usr/bin/env python # -- coding: utf-8 -- # @Author zengxiaohui # Datatime:4/29/2021 8:38 PM # @File:obj_utils from python_developer_tools.python.string_utils import str_is_null def obj_is_null(obj): """判断对象是否为空""" if obj is None: return True if isinstance(obj, list) and len(obj) == 0: return True if isinstance(obj, str): return str_is_null(obj)
2.375
2
teach/migrations/0003_auto_20191027_1008.py
rajk-apps/teach
0
12784048
<filename>teach/migrations/0003_auto_20191027_1008.py # Generated by Django 2.2.6 on 2019-10-27 10:08 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('teach', '0002_remove_course_additional_lecture_content'), ] operations = [ migrations.AlterField( model_name='content', name='format', field=models.CharField(choices=[('md', 'md'), ('html', 'html'), ('python', 'python')], default='md', max_length=10), ), ]
1.65625
2
Autoclock.py
Ryushane/ryuscript
0
12784049
from selenium import webdriver from selenium.webdriver import ActionChains import time # 自动小程序打卡脚本 if __name__ == '__main__': #这里改成你的统一认证用户名和密码 user_name = 'xxx' pwd = '<PASSWORD>' # 加上这两句话不打开浏览器 option = webdriver.ChromeOptions() #option.add_argument('headless') # 设置option # 用浏览器打开打卡的网址 browser = webdriver.Chrome(options=option) browser.get("https://app.buaa.edu.cn/uc/wap/login?redirect=https%3A%2F%2Fapp.buaa.edu.cn%2Fncov%2Fwap%2Fdefault%2Findex") # 输用户名和密码 user_name_input = browser.find_element_by_css_selector('#app > div.content > div:nth-child(1) > input[type=text]') user_name_input.send_keys(user_name) user_pwd_input = browser.find_element_by_css_selector('#app > div.content > div:nth-child(2) > input[type=password]') user_pwd_input.send_keys(<PASSWORD>) # 然后点击登录按钮 login_button = browser.find_element_by_css_selector('#app > div.btn') ActionChains(browser).move_to_element(login_button).click(login_button).perform() print('点击登陆') # 跳转并点击获取位置按钮 # 这样写是为了等待跳转页面加载出来 while True: try: location_button = browser.find_element_by_css_selector('body > div.item-buydate.form-detail2 > div > div > section > div.form > ul > li:nth-child(7) > div > input[type=text]') break except: time.sleep(5) browser.get("https://app.buaa.edu.cn/ncov/wap/default/index") ActionChains(browser).move_to_element(location_button).click(location_button).perform() print('获取位置') # 填写温度 temperature_button = browser.find_element_by_css_selector('body > div.item-buydate.form-detail2 > div > div > section > div.form > ul > li:nth-child(9) > div > div > div:nth-child(3)') ActionChains(browser).move_to_element(temperature_button).click(temperature_button).perform() print('填写温度') # 点击提交 submit_button = browser.find_element_by_css_selector('body > div.item-buydate.form-detail2 > div > div > section > div.list-box > div > a') ActionChains(browser).move_to_element(submit_button).click(submit_button).perform() print('点击提交') time.sleep(1) # 确定 while True: try: confirm_button = browser.find_element_by_css_selector('#wapcf > div > div.wapcf-btn-box > div.wapcf-btn.wapcf-btn-ok') print('提交成功') break except: try: confirm_button = browser.find_element_by_css_selector('#wapat > div > div.wapat-btn-box > div') print('今天已提交过') break except: time.sleep(0.5) ActionChains(browser).move_to_element(confirm_button).click(confirm_button).perform() time.sleep(1) browser.quit()
3
3
tests/test_day7.py
ghallberg/advent2020
0
12784050
<filename>tests/test_day7.py from advent2020 import day7 as d test_input_1 = [ "light red bags contain 1 bright white bag, 2 muted yellow bags.", "dark orange bags contain 3 bright white bags, 4 muted yellow bags.", "bright white bags contain 1 shiny gold bag.", "muted yellow bags contain 2 shiny gold bags, 9 faded blue bags.", "shiny gold bags contain 1 dark olive bag, 2 vibrant plum bags.", "dark olive bags contain 3 faded blue bags, 4 dotted black bags.", "vibrant plum bags contain 5 faded blue bags, 6 dotted black bags.", "faded blue bags contain no other bags.", "dotted black bags contain no other bags.", ] test_input_2 = [ "shiny gold bags contain 2 dark red bags.", "dark red bags contain 2 dark orange bags.", "dark orange bags contain 2 dark yellow bags.", "dark yellow bags contain 2 dark green bags.", "dark green bags contain 2 dark blue bags.", "dark blue bags contain 2 dark violet bags.", "dark violet bags contain no other bags.", ] small_input = [ "faded blue bags contain 2 dotted black bags.", "velvet red bags contain 5 faded blue bags, 1 dotted black bag.", "dotted black bags contain no other bags.", ] small_rules = [ ("faded blue", [(2, "dotted black")]), ("velvet red", [(5, "faded blue"), (1, "dotted black")]), ("dotted black", []), ] small_graph = { "faded blue": {"contained_in": {"velvet red": 5}, "contains": {"dotted black": 2}}, "velvet red": { "contained_in": {}, "contains": {"faded blue": 5, "dotted black": 1}, }, "dotted black": { "contained_in": {"faded blue": 2, "velvet red": 1}, "contains": {}, }, } def test_parse_line(): assert d.parse_rules(small_input) == small_rules def test_make_graph(): assert d.build_graph(small_rules) == small_graph def test_num_ancestors(): assert len(d.num_ancestors("velvet red", small_graph)) == 0 assert len(d.num_ancestors("faded blue", small_graph)) == 1 assert len(d.num_ancestors("dotted black", small_graph)) == 2 def test_num_children(): assert d.num_children("velvet red", small_graph) == 16 assert d.num_children("faded blue", small_graph) == 2 assert d.num_children("dotted black", small_graph) == 0 def test_solve(): assert d.solve(test_input_1) == (4, 32) assert d.solve(test_input_2) == (0, 126)
2.828125
3