hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
83c3ff2e3e7205ac40f986b841cd8fa73336b765
| 1,158
|
py
|
Python
|
src/mem/ruby/network/garnet/fixed-pipeline/GarnetRouter_PNET_Container_d.py
|
pnkfb9/gem5_priority
|
fbf766277df78a470758cf7d798d12fb1e7c51c4
|
[
"BSD-3-Clause"
] | null | null | null |
src/mem/ruby/network/garnet/fixed-pipeline/GarnetRouter_PNET_Container_d.py
|
pnkfb9/gem5_priority
|
fbf766277df78a470758cf7d798d12fb1e7c51c4
|
[
"BSD-3-Clause"
] | null | null | null |
src/mem/ruby/network/garnet/fixed-pipeline/GarnetRouter_PNET_Container_d.py
|
pnkfb9/gem5_priority
|
fbf766277df78a470758cf7d798d12fb1e7c51c4
|
[
"BSD-3-Clause"
] | null | null | null |
# Authors: Davide Zoni
from m5.params import *
from m5.proxy import *
from BasicRouter import BasicRouter
class GarnetRouter_PNET_Container_d(BasicRouter):
type = 'GarnetRouter_PNET_Container_d'
cxx_class = 'Router_PNET_Container_d'
cxx_header = "mem/ruby/network/garnet/fixed-pipeline/Router_PNET_Container_d.hh"
vcs_per_vnet = Param.Int(Parent.vcs_per_vnet,
"virtual channels per virtual network")
virt_nets = Param.Int(Parent.number_of_virtual_networks,
"number of virtual networks")
virt_nets_spurious = Param.Int(Parent.number_of_virtual_networks_spurious,
"number of virtual networks spurious to be used with VNET_REUSE")
# param moved to garnet network to be distributed to both routers and NICs
totVicharSlotPerVnet = Param.UInt8(Parent.totVicharSlotPerVnet, "VICHAR slots per vnet in the input buffer");
numFifoResynchSlots=Param.UInt8(Parent.numFifoResynchSlots,"Number of slots in the FIFO resynchronizer, when used");
router_frequency = Param.Counter(Parent.router_frequency, "initial router frequency for each router");
| 52.636364
| 120
| 0.740069
| 1,049
| 0.905872
| 0
| 0
| 0
| 0
| 0
| 0
| 489
| 0.42228
|
83c5d489d870a160b110a47da3fc244bdbe0bd80
| 4,699
|
py
|
Python
|
Wrappers/Python/Testing/MasterTest.py
|
gregmedlock/roadrunnerwork
|
11f18f78ef3e381bc59c546a8d5e3ed46d8ab596
|
[
"Apache-2.0"
] | null | null | null |
Wrappers/Python/Testing/MasterTest.py
|
gregmedlock/roadrunnerwork
|
11f18f78ef3e381bc59c546a8d5e3ed46d8ab596
|
[
"Apache-2.0"
] | null | null | null |
Wrappers/Python/Testing/MasterTest.py
|
gregmedlock/roadrunnerwork
|
11f18f78ef3e381bc59c546a8d5e3ed46d8ab596
|
[
"Apache-2.0"
] | null | null | null |
import os
location = os.path.join(os.path.dirname(__file__), 'Functions\\')
#location = 'Tests\\'
execfile(location + 'getVersion.py')
execfile(location + 'writeSBML.py')
#execfile(location + 'computeSteadyStateValues.py')
execfile(location + 'evalModel.py')
#execfile(location + 'getAvailableSymbols.py')
execfile(location + 'getBoundarySpeciesByIndex.py')
execfile(location + 'getBoundarySpeciesIds.py')
execfile(location + 'getBuildDate.py')
#execfile(location + 'getCCode.py')
#execfile(location + 'getCCodeHeader.py')
#execfile(location + 'getCCodeSource.py')
execfile(location + 'getCapabilities.py')
execfile(location + 'getCompartmentByIndex.py')
execfile(location + 'getCompartmentIds.py')
execfile(location + 'getConcentrationControlCoefficientIds.py')
execfile(location + 'getConservationMatrix.py')
execfile(location + 'getCopyright.py')
#execfile(location + 'getuCC.py')
#execfile(location + 'getuEE.py')
#execfile(location + 'getCC.py')
#execfile(location + 'getEE.py')
execfile(location + 'getEigenValueIds.py')
execfile(location + 'getElasticityCoefficientIds.py')
execfile(location + 'getFloatingSpeciesByIndex.py')
execfile(location + 'getFloatingSpeciesInitialConcentrations.py')
execfile(location + 'getFloatingSpeciesInitialConditionIds.py')
execfile(location + 'getFloatingSpeciesIds.py')
execfile(location + 'getFloatingSpeciesConcentrations.py')
execfile(location + 'getFluxControlCoefficientIds.py')
#execfile(location + 'getFullJacobian.py') Causes crash
execfile(location + 'getGlobalParameterByIndex.py')
execfile(location + 'getGlobalParameterIds.py')
execfile(location + 'getGlobalParameterValues.py')
execfile(location + 'getLastError.py')
execfile(location + 'getLinkMatrix.py')
execfile(location + 'getNrMatrix.py')
execfile(location + 'getL0Matrix.py')
execfile(location + 'getMatrixNumCols.py')
execfile(location + 'getMatrixNumRows.py')
execfile(location + 'getMatrixElement.py')
execfile(location + 'getNumberOfBoundarySpecies.py')
execfile(location + 'getNumberOfCompartments.py')
execfile(location + 'getNumberOfDependentSpecies.py')
execfile(location + 'getNumberOfFloatingSpecies.py')
execfile(location + 'getNumberOfGlobalParameters.py')
execfile(location + 'getNumberOfIndependentSpecies.py')
execfile(location + 'getNumberOfReactions.py')
execfile(location + 'getParamPromotedSBML.py')
execfile(location + 'getRRInstance.py')
execfile(location + 'getRateOfChange.py')
execfile(location + 'getRatesOfChange.py')
execfile(location + 'getRatesOfChangeEx.py')
execfile(location + 'getRatesOfChangeIds.py')
execfile(location + 'getReactionIds.py')
execfile(location + 'getReactionRate.py')
execfile(location + 'getReactionRates.py')
execfile(location + 'getReactionRatesEx.py')
#execfile(location + 'getReducedJacobian.py')
execfile(location + 'getResultColumnLabel.py')
execfile(location + 'getResultElement.py')
execfile(location + 'getResultNumCols.py')
execfile(location + 'getResultNumRows.py')
execfile(location + 'getSBML.py')
#execfile(location + 'getScaledElasticityMatrix.py')
#execfile(location + 'getScaledFloatingSpeciesElasticity.py')
execfile(location + 'getSelectionList.py')
execfile(location + 'getSteadyStateSelectionList.py')
execfile(location + 'getStoichiometryMatrix.py')
#execfile(location + 'getStringListElement.py')
#execfile(location + 'getStringListLength.py')
execfile(location + 'getTempFolder.py')
execfile(location + 'getValue.py')
#execfile(location + 'getVectorElement.py')
#execfile(location + 'getVectorLength.py')
#execfile(location + 'hasError.py')
execfile(location + 'loadSBML.py')
execfile(location + 'loadSBMLFromFile.py')
#execfile(location + 'oneStep.py')
execfile(location + 'printList.py')
execfile(location + 'printMatrix.py')
#execfile(location + 'printResult.py')
#execfile(location + 'printVector.py')
execfile(location + 'reset.py')
execfile(location + 'setBoundarySpeciesByIndex.py')
execfile(location + 'setCapabilities.py')
execfile(location + 'setCompartmentByIndex.py')
execfile(location + 'setComputeAndAssignConservationLaws.py')
execfile(location + 'setFloatingSpeciesByIndex.py')
execfile(location + 'setGlobalParameterByIndex.py')
execfile(location + 'setNumPoints.py')
execfile(location + 'setSelectionList.py')
execfile(location + 'setSteadyStateSelectionList.py')
execfile(location + 'setTempFolder.py')
execfile(location + 'setTimeEnd.py')
execfile(location + 'setTimeStart.py')
execfile(location + 'setValue.py')
#execfile(location + 'setVectorElement.py')
#execfile(location + 'simulate.py')
#execfile(location + 'simulateEx.py')
#execfile(location + 'steadyState.py')
| 44.330189
| 72
| 0.770377
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,878
| 0.612471
|
83c6045e3e675c58b5fec8fdf9881ebf94e9feda
| 334
|
py
|
Python
|
condition/models.py
|
SamusChief/myth-caster-api
|
76a43f48b70c6a4b509c90757d7906689799cc25
|
[
"MIT"
] | null | null | null |
condition/models.py
|
SamusChief/myth-caster-api
|
76a43f48b70c6a4b509c90757d7906689799cc25
|
[
"MIT"
] | null | null | null |
condition/models.py
|
SamusChief/myth-caster-api
|
76a43f48b70c6a4b509c90757d7906689799cc25
|
[
"MIT"
] | 1
|
2021-08-14T18:46:52.000Z
|
2021-08-14T18:46:52.000Z
|
""" Models for Conditions app """
from django.db import models
from common.models import OwnedModel
class Condition(OwnedModel):
""" Condition model """
name = models.CharField(unique=True, max_length=255, db_index=True)
description = models.TextField()
def __str__(self):
return f'Condition: {self.name}'
| 23.857143
| 71
| 0.700599
| 230
| 0.688623
| 0
| 0
| 0
| 0
| 0
| 0
| 81
| 0.242515
|
83c68825efc5cb85db8af2cf295d7be0c83834f7
| 11,214
|
py
|
Python
|
curriculum/experiments/goals/point_nd/goal_point_nd_trpo.py
|
coco-robotics/rllab-curriculum
|
f55b50224fcf5a9a5c064542eb0850a966cab223
|
[
"MIT"
] | 115
|
2017-12-06T16:31:10.000Z
|
2022-03-01T13:13:55.000Z
|
curriculum/experiments/goals/point_nd/goal_point_nd_trpo.py
|
coco-robotics/rllab-curriculum
|
f55b50224fcf5a9a5c064542eb0850a966cab223
|
[
"MIT"
] | 21
|
2017-11-15T18:28:16.000Z
|
2021-04-22T15:26:45.000Z
|
curriculum/experiments/goals/point_nd/goal_point_nd_trpo.py
|
coco-robotics/rllab-curriculum
|
f55b50224fcf5a9a5c064542eb0850a966cab223
|
[
"MIT"
] | 46
|
2017-12-22T22:26:01.000Z
|
2022-02-17T06:34:15.000Z
|
from curriculum.utils import set_env_no_gpu, format_experiment_prefix
set_env_no_gpu()
import argparse
import math
import os
import os.path as osp
import sys
import random
from multiprocessing import cpu_count
import numpy as np
import tensorflow as tf
from rllab.misc.instrument import run_experiment_lite
from rllab import config
from rllab.misc.instrument import VariantGenerator
from rllab.algos.trpo import TRPO
from rllab.baselines.linear_feature_baseline import LinearFeatureBaseline
from curriculum.envs.ndim_point.point_env import PointEnv
from rllab.envs.normalized_env import normalize
from rllab.policies.gaussian_mlp_policy import GaussianMLPPolicy
from curriculum.envs.goal_env import GoalExplorationEnv, evaluate_goal_env
from curriculum.envs.base import FixedStateGenerator, UniformStateGenerator
from curriculum.state.evaluator import *
from curriculum.logging.html_report import format_dict, HTMLReport
from curriculum.logging.visualization import *
from curriculum.logging.logger import ExperimentLogger
from curriculum.experiments.goals.point_nd.utils import plot_policy_performance
EXPERIMENT_TYPE = osp.basename(__file__).split('.')[0]
def run_task(v):
random.seed(v['seed'])
np.random.seed(v['seed'])
# goal generators
logger.log("Initializing the goal generators and the inner env...")
inner_env = normalize(PointEnv(dim=v['goal_size'], state_bounds=v['state_bounds']))
print("the state_bounds are: ", v['state_bounds'])
center = np.zeros(v['goal_size'])
uniform_goal_generator = UniformStateGenerator(state_size=v['goal_size'], bounds=v['goal_range'],
center=center)
feasible_goal_ub = np.array(v['state_bounds'])[:v['goal_size']]
print("the feasible_goal_ub is: ", feasible_goal_ub)
uniform_feasible_goal_generator = UniformStateGenerator(state_size=v['goal_size'], bounds=[-1 * feasible_goal_ub,
feasible_goal_ub])
env = GoalExplorationEnv(
env=inner_env, goal_generator=uniform_goal_generator,
obs2goal_transform=lambda x: x[:int(len(x) / 2)],
terminal_eps=v['terminal_eps'],
only_feasible=v['only_feasible'],
distance_metric=v['distance_metric'],
terminate_env=True, goal_weight=v['goal_weight'],
) # this goal_generator will be updated by a uniform after
if v['sample_unif_feas']:
env.update_goal_generator(uniform_feasible_goal_generator)
policy = GaussianMLPPolicy(
env_spec=env.spec,
hidden_sizes=(32, 32),
# Fix the variance since different goals will require different variances, making this parameter hard to learn.
learn_std=False,
output_gain=v['output_gain'],
init_std=v['policy_init_std'],
)
baseline = LinearFeatureBaseline(env_spec=env.spec)
n_traj = 3
# feasible_goals = generate_initial_goals(env, policy, v['goal_range'], horizon=v['horizon'], size=10000) #v['horizon'])
# print(feasible_goals)
# uniform_list_goal_generator = UniformListStateGenerator(goal_list=feasible_goals.tolist())
# env.update_goal_generator(uniform_list_goal_generator)
# env.update_goal_generator(fixed_goal_generator)
logger.log("Initializing report and plot_policy_reward...")
log_dir = logger.get_snapshot_dir()
inner_log_dir = osp.join(log_dir, 'inner_iters')
report = HTMLReport(osp.join(log_dir, 'report.html'), images_per_row=3)
report.add_header("{}".format(EXPERIMENT_TYPE))
report.add_text(format_dict(v))
logger.log("Starting the outer iterations")
for outer_iter in range(v['outer_iters']):
logger.log("Outer itr # %i" % outer_iter)
logger.log("Perform TRPO with UniformListStateGenerator...")
with ExperimentLogger(inner_log_dir, outer_iter, snapshot_mode='last', hold_outter_log=True):
algo = TRPO(
env=env,
policy=policy,
baseline=baseline,
batch_size=v['pg_batch_size'],
max_path_length=v['horizon'],
n_itr=v['inner_iters'],
discount=0.995,
step_size=0.01,
plot=False,
)
algo.train()
report.add_image(
plot_policy_performance(policy, env, v['horizon'])
)
# log some more on how the pendulum performs the upright and general task
old_goal_generator = env.goal_generator
logger.log("Evaluating performance on Unif and Fix Goal Gen...")
with logger.tabular_prefix('UnifFeasGoalGen_'):
env.update_goal_generator(uniform_feasible_goal_generator)
evaluate_goal_env(env, policy=policy, horizon=v['horizon'], n_goals=50,
fig_prefix='UnifFeasGoalGen_itr%d' % outer_iter,
report=report, n_traj=n_traj)
# back to old goal generator
with logger.tabular_prefix("UnifGoalGen_"):
env.update_goal_generator(old_goal_generator)
evaluate_goal_env(env, policy=policy, horizon=v['horizon'], n_goals=50,
fig_prefix='UnifGoalGen_itr%d' % outer_iter,
report=report, n_traj=n_traj)
logger.dump_tabular(with_prefix=False)
report.save()
report.new_row()
with logger.tabular_prefix('FINALUnifFeasGoalGen_'):
env.update_goal_generator(uniform_feasible_goal_generator)
evaluate_goal_env(env, policy=policy, horizon=v['horizon'], n_goals=5e3, fig_prefix='FINAL1UnifFeasGoalGen_',
report=report, n_traj=n_traj)
evaluate_goal_env(env, policy=policy, horizon=v['horizon'], n_goals=5e3, fig_prefix='FINAL2UnifFeasGoalGen_',
report=report, n_traj=n_traj)
logger.dump_tabular(with_prefix=False)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--ec2', '-e', action='store_true', default=False, help="add flag to run in ec2")
parser.add_argument('--clone', '-c', action='store_true', default=False,
help="add flag to copy file and checkout current")
parser.add_argument('--local_docker', '-d', action='store_true', default=False,
help="add flag to run in local dock")
parser.add_argument('--type', '-t', type=str, default='', help='set instance type')
parser.add_argument('--price', '-p', type=str, default='', help='set betting price')
parser.add_argument('--subnet', '-sn', type=str, default='', help='set subnet like us-west-1a')
parser.add_argument('--name', '-n', type=str, default='', help='set exp prefix name and new file name')
parser.add_argument('--debug', action='store_true', default=False, help="run code without multiprocessing")
parser.add_argument(
'--prefix', type=str, default=None,
help='set the additional name for experiment prefix'
)
args = parser.parse_args()
# setup ec2
ec2_instance = args.type if args.type else 'm4.4xlarge'
# configure instance
info = config.INSTANCE_TYPE_INFO[ec2_instance]
config.AWS_INSTANCE_TYPE = ec2_instance
config.AWS_SPOT_PRICE = str(info["price"])
n_parallel = int(info["vCPU"]) # make the default 4 if not using ec2
if args.ec2:
mode = 'ec2'
elif args.local_docker:
mode = 'local_docker'
n_parallel = cpu_count() if not args.debug else 1
else:
mode = 'local'
n_parallel = cpu_count() if not args.debug else 1
default_prefix = 'goal-point-nd-trpo'
if args.prefix is None:
exp_prefix = format_experiment_prefix(default_prefix)
elif args.prefix == '':
exp_prefix = default_prefix
else:
exp_prefix = '{}_{}'.format(default_prefix, args.prefix)
vg = VariantGenerator()
vg.add('seed', range(30, 90, 20))
# # GeneratorEnv params
vg.add('goal_size', [2, 3, 4, 5, 6]) # this is the ultimate goal we care about: getting the pendulum upright
vg.add('terminal_eps', lambda goal_size: [math.sqrt(goal_size) / math.sqrt(2) * 0.3])
vg.add('only_feasible', [True])
vg.add('goal_range', [5]) # this will be used also as bound of the state_space
vg.add('state_bounds', lambda goal_range, goal_size, terminal_eps:
[(1, goal_range) + (0.3,) * (goal_size - 2) + (goal_range, ) * goal_size])
vg.add('sample_unif_feas', [True])
vg.add('distance_metric', ['L2'])
vg.add('goal_weight', [1])
#############################################
vg.add('min_reward', lambda goal_weight: [goal_weight * 0.1]) # now running it with only the terminal reward of 1!
vg.add('max_reward', lambda goal_weight: [goal_weight * 0.9])
vg.add('horizon', [200])
vg.add('outer_iters', [200])
vg.add('inner_iters', [5])
vg.add('pg_batch_size', [20000])
# policy initialization
vg.add('output_gain', [1])
vg.add('policy_init_std', [1])
print('Running {} inst. on type {}, with price {}, parallel {}'.format(
vg.size, config.AWS_INSTANCE_TYPE,
config.AWS_SPOT_PRICE, n_parallel
))
for vv in vg.variants():
if mode in ['ec2', 'local_docker']:
run_experiment_lite(
# use_cloudpickle=False,
stub_method_call=run_task,
variant=vv,
mode=mode,
# Number of parallel workers for sampling
n_parallel=n_parallel,
# Only keep the snapshot parameters for the last iteration
snapshot_mode="last",
seed=vv['seed'],
# plot=True,
exp_prefix=exp_prefix,
# exp_name=exp_name,
sync_s3_pkl=True,
# for sync the pkl file also during the training
sync_s3_png=True,
sync_s3_html=True,
# # use this ONLY with ec2 or local_docker!!!
pre_commands=[
'export MPLBACKEND=Agg',
'pip install --upgrade pip',
'pip install --upgrade -I tensorflow',
'pip install git+https://github.com/tflearn/tflearn.git',
'pip install dominate',
'pip install multiprocessing_on_dill',
'pip install scikit-image',
'conda install numpy -n rllab3 -y',
],
)
if mode == 'local_docker':
sys.exit()
else:
run_experiment_lite(
# use_cloudpickle=False,
stub_method_call=run_task,
variant=vv,
mode='local',
n_parallel=n_parallel,
# Only keep the snapshot parameters for the last iteration
snapshot_mode="last",
seed=vv['seed'],
exp_prefix=exp_prefix,
print_command=False,
)
if args.debug:
sys.exit()
| 40.05
| 124
| 0.626895
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 3,253
| 0.290084
|
83c70c28fab499273c642ae79b683ba8ed829bd8
| 307
|
py
|
Python
|
pyNastran/bdf/bdf_interface/encoding.py
|
ACea15/pyNastran
|
5ffc37d784b52c882ea207f832bceb6b5eb0e6d4
|
[
"BSD-3-Clause"
] | 293
|
2015-03-22T20:22:01.000Z
|
2022-03-14T20:28:24.000Z
|
pyNastran/bdf/bdf_interface/encoding.py
|
ACea15/pyNastran
|
5ffc37d784b52c882ea207f832bceb6b5eb0e6d4
|
[
"BSD-3-Clause"
] | 512
|
2015-03-14T18:39:27.000Z
|
2022-03-31T16:15:43.000Z
|
pyNastran/bdf/bdf_interface/encoding.py
|
ACea15/pyNastran
|
5ffc37d784b52c882ea207f832bceb6b5eb0e6d4
|
[
"BSD-3-Clause"
] | 136
|
2015-03-19T03:26:06.000Z
|
2022-03-25T22:14:54.000Z
|
def decode_lines(lines_bytes, encoding: str):
if isinstance(lines_bytes[0], bytes):
lines_str = [line.decode(encoding) for line in lines_bytes]
elif isinstance(lines_bytes[0], str):
lines_str = lines_bytes
else:
raise TypeError(type(lines_bytes[0]))
return lines_str
| 34.111111
| 67
| 0.687296
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
83c83c646b4979fd4f5db5513084e63e8c7ce3e0
| 2,176
|
py
|
Python
|
health_reminder.py
|
carlkho-cvk/tbe_discord
|
f1dc05d0cd288b1be4e8d164f58056422627fcc1
|
[
"MIT"
] | null | null | null |
health_reminder.py
|
carlkho-cvk/tbe_discord
|
f1dc05d0cd288b1be4e8d164f58056422627fcc1
|
[
"MIT"
] | null | null | null |
health_reminder.py
|
carlkho-cvk/tbe_discord
|
f1dc05d0cd288b1be4e8d164f58056422627fcc1
|
[
"MIT"
] | null | null | null |
# Fitness monday variables
morning_1 = "10:00"
morning_2 = "8:00"
afternoon_1 = "13:00"
afternoon_2 = "14:30"
afternoon_3 = "15:30"
afternoon_4 = "17:55"
evening_1 = "20:30"
evening_2 = "21:10"
date_announce = [1, 2, 3, 4, 5]
image_file_list = [
'Exercise_Three.png',
'Exercise_Two_2.png'
]
# Messages to be display for fitness monday
# messageContentVariables
morning_1_msg = "Do some stretches! @everyone."
morning_2_msg = "Drink water! @everyone."
afternoon_1_msg = "Breath fresh air outside before starting your afternoon shift. @everyone."
afternoon_2_msg = "Drink a glass of water. Stay hydrated, @everyone!"
afternoon_3_msg = "Get up and stretch! @everyone."
afternoon_4_msg = "Go out and breathe before the evening sync. @everyone."
evening_1_msg = "Do some stretches! @everyone."
evening_2_msg = "Drink water. Good night, @everyone."
# Handler for all task in fitness monday
async def health_reminder(current_time, current_week_day, client, channel_id):
'''
current_time - required
current_week_day - Monday=1 ... Sunday=7
client - discord client
channel_id - this is where the bot will sel
discord bot will validate on what to
send to the text channel
'''
text_channel = client.get_channel(channel_id)
if current_week_day in date_announce:
if f'{morning_1}:00' == current_time:
await text_channel.send(morning_1_msg)
if f'{morning_2}:00' == current_time:
await text_channel.send(morning_2_msg)
if f'{afternoon_1}:00' == current_time:
await text_channel.send(afternoon_1_msg)
if f'{afternoon_2}:00' == current_time:
await text_channel.send(afternoon_2_msg)
if f'{afternoon_3}:00' == current_time:
await text_channel.send(afternoon_3_msg)
if f'{afternoon_4}:00' == current_time:
await text_channel.send(afternoon_4_msg)
if f'{evening_1}:00' == current_time:
await text_channel.send(evening_1_msg)
if f'{evening_2}:00' == current_time:
await text_channel.send(evening_2_msg)
| 31.085714
| 94
| 0.664522
| 0
| 0
| 0
| 0
| 0
| 0
| 1,239
| 0.569393
| 952
| 0.4375
|
83ca8eae1114abccb3186c9a6251ba6c788bcf35
| 6,559
|
py
|
Python
|
Image_Content_Analysis/deeplab-pytorch-master/labelImsTest.py
|
PonceLab/as-simple-as-possible
|
a4093651f226d749b204c48b623acb28221c3bc2
|
[
"MIT"
] | 1
|
2021-04-16T02:08:39.000Z
|
2021-04-16T02:08:39.000Z
|
Image_Content_Analysis/deeplab-pytorch-master/labelImsTest.py
|
PonceLab/as-simple-as-possible
|
a4093651f226d749b204c48b623acb28221c3bc2
|
[
"MIT"
] | 1
|
2021-07-27T16:17:41.000Z
|
2021-07-27T16:17:41.000Z
|
Image_Content_Analysis/deeplab-pytorch-master/labelImsTest.py
|
PonceLab/as-simple-as-possible
|
a4093651f226d749b204c48b623acb28221c3bc2
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
#
# Author: Kazuto Nakashima
# URL: https://kazuto1011.github.io
# Date: 07 January 2019
from __future__ import absolute_import, division, print_function
import click
import cv2
import matplotlib
import matplotlib.cm as cm
import matplotlib.pyplot as plt
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from omegaconf import OmegaConf
from libs.models import *
from libs.utils import DenseCRF
def get_device(cuda):
cuda = cuda and torch.cuda.is_available()
device = torch.device("cuda" if cuda else "cpu")
if cuda:
current_device = torch.cuda.current_device()
print("Device:", torch.cuda.get_device_name(current_device))
else:
print("Device: CPU")
return device
def get_classtable(CONFIG):
with open(CONFIG.DATASET.LABELS) as f:
classes = {}
for label in f:
label = label.rstrip().split("\t")
classes[int(label[0])] = label[1].split(",")[0]
return classes
def setup_postprocessor(CONFIG):
# CRF post-processor
postprocessor = DenseCRF(
iter_max=CONFIG.CRF.ITER_MAX,
pos_xy_std=CONFIG.CRF.POS_XY_STD,
pos_w=CONFIG.CRF.POS_W,
bi_xy_std=CONFIG.CRF.BI_XY_STD,
bi_rgb_std=CONFIG.CRF.BI_RGB_STD,
bi_w=CONFIG.CRF.BI_W,
)
return postprocessor
def preprocessing(image, device, CONFIG):
# Resize
scale = CONFIG.IMAGE.SIZE.TEST / max(image.shape[:2])
image = cv2.resize(image, dsize=None, fx=scale, fy=scale)
raw_image = image.astype(np.uint8)
# Subtract mean values
image = image.astype(np.float32)
image -= np.array(
[
float(CONFIG.IMAGE.MEAN.B),
float(CONFIG.IMAGE.MEAN.G),
float(CONFIG.IMAGE.MEAN.R),
]
)
# Convert to torch.Tensor and add "batch" axis
image = torch.from_numpy(image.transpose(2, 0, 1)).float().unsqueeze(0)
image = image.to(device)
return image, raw_image
def inference(model, image, raw_image=None, postprocessor=None):
_, _, H, W = image.shape
# Image -> Probability map
logits = model(image)
logits = F.interpolate(logits, size=(H, W), mode="bilinear", align_corners=False)
probs = F.softmax(logits, dim=1)[0]
probs = probs.cpu().numpy()
# Refine the prob map with CRF
if postprocessor and raw_image is not None:
probs = postprocessor(raw_image, probs)
labelmap = np.argmax(probs, axis=0)
return labelmap
def inferenceHierarchy(model, image, raw_image=None, postprocessor=None, sizeThresh=1/9, nIterations=10):
_, _, H, W = image.shape
# Image -> Probability map
logits = model(image)
logits = F.interpolate(logits, size=(H, W), mode="bilinear", align_corners=False)
probs = F.softmax(logits, dim=1)[0]
probs = probs.cpu().numpy()
probsList = []
probsList.append(probs)
labelmapList=[]
for ndx in range(0,nIterations):
# Refine the prob map with CRF
if postprocessor and raw_image is not None:
probs2 = postprocessor(raw_image, probs)
labelmap2 = np.argmax(probs2, axis=0)
labelmapList.append(labelmap2)
labels = np.unique(labelmap2)
hasBigSegs=False
for label in labels:
if (np.sum(labelmap2==label)/labelmap2.size)>sizeThresh:
probs[label,:,:]=probs.min()
hasBigSegs=True
probs=probs/np.sum(probs,(0))
probsList.append(probs)
if not hasBigSegs:
break
return labelmapList,probsList
def singleHierarchy(config_path, model_path, image_path, cuda, crf, sizeThresh=1/9, nIterations=10, doPlot=True):
"""
Inference from a single image
"""
# Setup
CONFIG = OmegaConf.load(config_path)
device = get_device(cuda)
torch.set_grad_enabled(False)
classes = get_classtable(CONFIG)
postprocessor = setup_postprocessor(CONFIG) if crf else None
model = eval(CONFIG.MODEL.NAME)(n_classes=CONFIG.DATASET.N_CLASSES)
state_dict = torch.load(model_path, map_location=lambda storage, loc: storage)
model.load_state_dict(state_dict)
model.eval()
model.to(device)
print("Model:", CONFIG.MODEL.NAME)
# Inference
image = cv2.imread(image_path, cv2.IMREAD_COLOR)
image, raw_image = preprocessing(image, device, CONFIG)
# labelmap = inference(model, image, raw_image, postprocessor)
labelmapList = inferenceHierarchy(model, image, raw_image, postprocessor, sizeThresh, nIterations)
if doPlot:
for labelmap in labelmapList:
labels = np.unique(labelmap)
# Show result for each class
rows = np.floor(np.sqrt(len(labels) + 1))
cols = np.ceil((len(labels) + 1) / rows)
plt.figure(figsize=(10, 10))
ax = plt.subplot(rows, cols, 1)
ax.set_title("Input image")
ax.imshow(raw_image[:, :, ::-1])
ax.axis("off")
for i, label in enumerate(labels):
mask = labelmap == label
ax = plt.subplot(rows, cols, i + 2)
ax.set_title(classes[label])
ax.imshow(raw_image[..., ::-1])
ax.imshow(mask.astype(np.float32), alpha=0.5)
ax.axis("off")
plt.tight_layout()
plt.show()
#single(r".\configs\cocostuff164k.yaml",r"C:\Users\ponce\Desktop\CarlosSetUpFilesHere\CompressionPaperReviewResponse\resources\deeplab-pytorch-master\data\models\coco\deeplabv1_resnet101\caffemodel\deeplabv2_resnet101_msc-cocostuff164k-100000.pth",r"image.jpg",True,True)
#python demo.py single --config-path .\configs\voc12.yaml --model-path "C:\Users\ponce\Desktop\CarlosSetUpFilesHere\CompressionPaperReviewResponse\resources\deeplab-pytorch-master\data\models\voc12\deeplabv2_resnet101_msc\caffemodel\deeplabv2_resnet101_msc-vocaug.pth" --image-path image.jpg
#python demo.py single --config-path .\configs\cocostuff164k.yaml --model-path "C:\Users\ponce\Desktop\CarlosSetUpFilesHere\CompressionPaperReviewResponse\resources\deeplab-pytorch-master\data\models\coco\deeplabv1_resnet101\caffemodel\deeplabv2_resnet101_msc-cocostuff164k-100000.pth" --image-path image.jpg
if __name__ == "__main__":
singleHierarchy()
| 34.161458
| 309
| 0.639579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,481
| 0.225797
|
83cb1b811a0c4db430f4a4ca89a5f71fcbd3b310
| 1,131
|
py
|
Python
|
setup.py
|
kvietcong/md-tangle
|
4170c72f7119adc62eeb75822081a6858ed3c9dc
|
[
"MIT"
] | 14
|
2019-04-15T08:51:10.000Z
|
2022-03-25T20:37:28.000Z
|
setup.py
|
kvietcong/md-tangle
|
4170c72f7119adc62eeb75822081a6858ed3c9dc
|
[
"MIT"
] | 4
|
2019-03-09T22:02:50.000Z
|
2021-08-24T21:03:48.000Z
|
setup.py
|
kvietcong/md-tangle
|
4170c72f7119adc62eeb75822081a6858ed3c9dc
|
[
"MIT"
] | 3
|
2020-12-24T05:23:53.000Z
|
2022-03-23T14:00:44.000Z
|
import setuptools
import md_tangle
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name=md_tangle.__title__,
version=md_tangle.__version__,
license=md_tangle.__license__,
author=md_tangle.__author__,
author_email=md_tangle.__author_email__,
description="Generates ('tangles') source code from Markdown documents",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/joakimmj/md-tangle",
packages=setuptools.find_packages(),
keywords=['markdown', 'tangle', 'literate programming'],
platforms=['any'],
classifiers=[
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Natural Language :: English",
'Topic :: Text Processing :: Markup',
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3"
],
entry_points={
'console_scripts': [
'md-tangle = md_tangle.main:main',
]
},
)
| 31.416667
| 76
| 0.650752
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 493
| 0.435897
|
83cb7ebba4b89b28bb78615faadb44744d2cc3e7
| 3,658
|
py
|
Python
|
src/dvi/bayes_models.py
|
luoyan407/predict_trustworthiness
|
8f394fc511b9aa31a766a30f0e1b059481aa5f76
|
[
"MIT"
] | 5
|
2021-10-04T06:11:21.000Z
|
2022-02-22T17:57:43.000Z
|
src/dvi/bayes_models.py
|
luoyan407/predict_trustworthiness
|
8f394fc511b9aa31a766a30f0e1b059481aa5f76
|
[
"MIT"
] | null | null | null |
src/dvi/bayes_models.py
|
luoyan407/predict_trustworthiness
|
8f394fc511b9aa31a766a30f0e1b059481aa5f76
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import torch.nn.functional as F
from .bayes_layers import VariationalLinearCertainActivations, VariationalLinearReLU
from .variables import GaussianVar
class MLP(nn.Module):
def __init__(self, x_dim, y_dim, hidden_size=None):
super(MLP, self).__init__()
self.sizes = [x_dim]
if hidden_size is not None:
self.sizes += hidden_size
self.sizes += [y_dim]
self.make_layers()
def make_layers(self):
# layers = [VariationalLinearCertainActivations(self.sizes[0], self.sizes[1])]
# for in_dim, out_dim in zip(self.sizes[1:-1], self.sizes[2:]):
# print('in_dim:{}, out_dim:{}'.format(in_dim, out_dim))
# layers.append(VariationalLinearReLU(in_dim, out_dim))
# self.layers = nn.Sequential(*layers)
layers = [VariationalLinearCertainActivations(self.sizes[0], self.sizes[1])]
for in_dim, out_dim in zip(self.sizes[1:-1], self.sizes[2:]):
# print('in_dim:{}, out_dim:{}'.format(in_dim, out_dim))
layers.append(VariationalLinearReLU(in_dim, out_dim))
self.layers = nn.Sequential(*layers)
# self.layers = nn.Sequential(
# VariationalLinearCertainActivations(1, 128),
# VariationalLinearReLU(128, 128),
# VariationalLinearReLU(128, 2)
# )
#
# self.layers = nn.Sequential(VariationalLinearCertainActivations(self.sizes[0], self.sizes[1]))
# for in_dim, out_dim in zip(self.sizes[1:-1], self.sizes[2:]):
# print('in_dim:{}, out_dim:{}'.format(in_dim, out_dim))
# self.layers.add_module('{}-{}'.format(in_dim, out_dim), VariationalLinearReLU(in_dim, out_dim))
def forward(self, input):
return self.layers(input)
def surprise(self):
all_surprise = 0
for layer in self.layers:
all_surprise += layer.surprise()
return all_surprise
def forward_mcmc(self, input, n_samples=None, average=False):
h = self.layers[0].forward_mcmc(input)
for layer in self.layers[1:]:
h = layer.forward_mcmc(F.relu(h), n_samples)
return h
class AdaptedMLP(object):
def __init__(self, mlp, adapter, device=torch.device('cpu')):
self.mlp = mlp.to(device)
self.__dict__.update(mlp.__dict__)
self.device = device
self.make_adapters(adapter)
def make_adapters(self, adapter):
self.adapter = {}
for ad in ['in', 'out']:
self.adapter[ad] = {
'scale': torch.tensor(adapter[ad]['scale']).to(self.device),
'shift': torch.tensor(adapter[ad]['shift']).to(self.device)
}
def __call__(self, input):
x_ad = self.adapter['in']['scale'] * input + self.adapter['in']['shift']
self.pre_adapt = self.mlp(x_ad)
mean = self.adapter['out']['scale'] * self.pre_adapt.mean + self.adapter['out']['shift']
cov = self.adapter['out']['scale'].reshape(-1, 1) * self.adapter['out']['scale'].reshape(1, -1) * self.pre_adapt.var
return GaussianVar(mean, cov)
def __repr__(self):
return "AdaptedMLP(\n" + self.mlp.__repr__() + ")"
def surprise(self):
return self.mlp.surprise()
def parameters(self):
return self.mlp.parameters()
def mcmc(self, input, n_samples=None):
x_ad = self.adapter['in']['scale'] * input + self.adapter['in']['shift']
self.pre_adapt = self.mlp.forward_mcmc(x_ad, n_samples)
mean = self.adapter['out']['scale'] * self.pre_adapt + self.adapter['out']['shift']
return mean
| 37.71134
| 124
| 0.61099
| 3,452
| 0.943685
| 0
| 0
| 0
| 0
| 0
| 0
| 1,007
| 0.275287
|
83cdd7dc6122382de1e26b815171dea31d9c7f62
| 917
|
py
|
Python
|
rondgang/models.py
|
eternallyBaffled/rondgang
|
6d0442279b60a75518579df88bfc53a57d98c2b3
|
[
"MIT"
] | null | null | null |
rondgang/models.py
|
eternallyBaffled/rondgang
|
6d0442279b60a75518579df88bfc53a57d98c2b3
|
[
"MIT"
] | null | null | null |
rondgang/models.py
|
eternallyBaffled/rondgang
|
6d0442279b60a75518579df88bfc53a57d98c2b3
|
[
"MIT"
] | null | null | null |
from datetime import date
from django.db import models
# Create your models here.
class Gemeente(models.Model):
naam_text = models.CharField(max_length=50)
deelgemeente_text = models.CharField(max_length=50)
class Segment(models.Model):
gemeente = models.CharField(max_length=55)
straat = models.CharField(max_length=50)
commentaar = models.TextField(blank=True)
def __str__(self):
return '%s, %s' % (self.straat, self.gemeente)
class Meta:
verbose_name_plural = 'segmenten'
class Status(models.Model):
segment = models.ForeignKey(Segment, on_delete=models.CASCADE)
afgewerkt = models.BooleanField()
# lid = models.ForeignKey(Lid, to_field='naam')
lid = models.CharField(max_length=50, null=True)
datum = models.DateField(default=date.today)
commentaar = models.TextField(blank=True)
class Meta:
verbose_name_plural = 'statussen'
| 28.65625
| 66
| 0.715376
| 827
| 0.901854
| 0
| 0
| 0
| 0
| 0
| 0
| 106
| 0.115594
|
83cfac902258a852b3126b17d68f53a3484fe5c0
| 1,757
|
py
|
Python
|
code_examples/Python/app_debugger/test_client/test_debugger.py
|
VPoser/docs-and-training
|
55d4aa4c5b964f3c810d58008db4c5b75260322b
|
[
"Zlib"
] | null | null | null |
code_examples/Python/app_debugger/test_client/test_debugger.py
|
VPoser/docs-and-training
|
55d4aa4c5b964f3c810d58008db4c5b75260322b
|
[
"Zlib"
] | 4
|
2019-05-29T06:16:56.000Z
|
2021-03-31T19:03:36.000Z
|
code_examples/Python/app_debugger/test_client/test_debugger.py
|
VPoser/docs-and-training
|
55d4aa4c5b964f3c810d58008db4c5b75260322b
|
[
"Zlib"
] | 3
|
2019-04-02T08:48:31.000Z
|
2020-09-23T08:13:35.000Z
|
#!/usr/bin/env python
"""Simple test client to call the debugger SOAP service"""
import os
import sys
import base64
import getpass
from suds.client import Client
from suds.cache import NoCache
from suds import WebFault, MethodNotFound
from clfpy import AuthClient
auth_endpoint = 'https://api.hetcomp.org/authManager/AuthManager?wsdl'
extra_pars = "auth={},WFM=dummy,".format(auth_endpoint)
def soap_call(wsdl_url, methodname, method_args):
"""Calls a SOAP webmethod at a given URL with given arguments."""
client = Client(wsdl_url, cache=NoCache())
try:
method = getattr(client.service, methodname)
except MethodNotFound as error:
return(error)
try:
response = method(*method_args)
except WebFault as error:
return(error)
return response
def main():
port = 80
try:
context_root = os.environ["CONTEXT_ROOT"]
except KeyError:
print("Error: environment variable CONTEXT_ROOT not set.")
exit(1)
url = "http://localhost:{}{}/Debugger?wsdl".format(port, context_root)
print("wsdl URL is {}".format(url))
print("Obtaining session token")
user = input("Enter username: ")
project = input("Enter project: ")
password = getpass.getpass(prompt="Enter password: ")
auth = AuthClient(auth_endpoint)
token = auth.get_session_token(user, project, password)
print("Calling parameterDebugger()")
response = soap_call(url, "parameterDebugger", ["serviceID1", token, extra_pars, "Some input", "Label1", "Second input", "Label2"])
html = base64.b64decode(response).decode()
with open("test.html", 'w') as fout:
fout.write(html)
print("Result written to test.html")
if __name__ == "__main__":
main()
| 27.030769
| 135
| 0.682413
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 569
| 0.323847
|
83cfd9aa79927b2baa0758f343509a236b7d9e4c
| 393
|
py
|
Python
|
bai01/keocatgiay.py
|
YtalYa/CSx101-A1-2021-02
|
5d95faa483c7a98d8ea75fb3a1720c12e1c1e727
|
[
"MIT"
] | null | null | null |
bai01/keocatgiay.py
|
YtalYa/CSx101-A1-2021-02
|
5d95faa483c7a98d8ea75fb3a1720c12e1c1e727
|
[
"MIT"
] | null | null | null |
bai01/keocatgiay.py
|
YtalYa/CSx101-A1-2021-02
|
5d95faa483c7a98d8ea75fb3a1720c12e1c1e727
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
# from time import time
# from math import sqrt
# with open("inp.txt", "r") as f:
# a, b = list(i for i in f.read().split())
a, b = input().split()
# print(a,b,c, type(a), type(int(a)))
a = int(a)
b = int(b)
# st = time()
# -----
s1 = a * (a - 1) // 2
cuoi = b - 2
dau = b - a
s2 = (dau + cuoi) * (a - 1) // 2
result = s1 + s2
# -----
print(result)
# print(time() - st)
| 17.086957
| 43
| 0.508906
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 224
| 0.569975
|
83d10767f4acd2d7c3295abb41942f7d2223b741
| 6,295
|
py
|
Python
|
data_manager/acs/gui_ACS_sched_blocks_script_0.py
|
IftachSadeh/ctaOperatorGUI
|
f6365a86440dd2404da0bc139cd9345eb3dcb566
|
[
"MIT"
] | 3
|
2018-08-28T22:44:23.000Z
|
2018-10-24T09:16:34.000Z
|
data_manager/acs/gui_ACS_sched_blocks_script_0.py
|
IftachSadeh/ctaOperatorGUI
|
f6365a86440dd2404da0bc139cd9345eb3dcb566
|
[
"MIT"
] | 28
|
2020-04-02T14:48:29.000Z
|
2021-05-27T08:10:36.000Z
|
data_manager/acs/gui_ACS_sched_blocks_script_0.py
|
IftachSadeh/ctaOperatorGUI
|
f6365a86440dd2404da0bc139cd9345eb3dcb566
|
[
"MIT"
] | null | null | null |
# import tcs
# import daqctrl, inspect
# ------------------------------------------------------------------
# install the script by:
# cd $INTROOT/config/scripts
# ln -s $guiInstalDir/ctaOperatorGUI/ctaGuiBack/ctaGuiBack/acs/guiACS_schedBlocks_script0.py
# ------------------------------------------------------------------
# ------------------------------------------------------------------
from random import Random
rndGen = Random(10987268332)
waitTime = dict()
waitTime['config_daq'] = rndGen.randint(1, 3)
waitTime['config_camera'] = rndGen.randint(1, 5)
waitTime['config_mount'] = rndGen.randint(2, 7)
waitTime['finish_daq'] = rndGen.randint(1, 6)
waitTime['finish_camera'] = rndGen.randint(1, 3)
waitTime['finish_mount'] = rndGen.randint(1, 2)
def get_short_wait(duration, wait_type):
return waitTime[wait_type] if duration > 1 else 1
# ------------------------------------------------------------------
#
# ------------------------------------------------------------------
__phases__ = [
"configuring",
"config_daq",
"config_camera",
"config_mount",
"take_data",
"closing",
"finish_daq",
"finish_camera",
"finish_mount",
]
# ------------------------------------------------------------------
#
# ------------------------------------------------------------------
def configuring():
coords = observationBlock.src.coords
p = None
try:
p = (coords.equatorial.ra, coords.equatorial.dec)
except:
pass
if not p:
try:
p = (coords.horizontal.alt, coords.horizontal.az)
except:
pass
if not p:
try:
p = (coords.galactic.lon, coords.galactic.lat)
except:
pass
if not p:
p = (0, 0)
print "Coordinates used: (" + str(p[0]) + ", " + str(p[1]) + ")"
try:
divergence = schedulingBlock.config.instrument.pointing_mode.divergent_.divergence
print "Divergence used: " + str(divergence)
except:
print "Pointing mode is not divergent"
pass
# resources.target = tcs.SkyEquatorialTarget(
# p[0], p[1], tcs.ICRS, tcs.J2000, 0.0, 0.0, 0.0, 0.0
# )
allowPhaseStart("config_daq")
allowPhaseStart("config_camera")
allowPhaseStart("config_mount")
return
# ------------------------------------------------------------------
def config_daq():
updatePhase("config_daq", "config_daq has began ...", 0)
allowPhaseStart("config_camera")
allowPhaseStart("config_mount")
# operationStatus = daq().operationStatus
# # Check daq operational status
# if operationStatus != daqctrl.NOMINAL and operationStatus != daqctrl.IDLE:
# raise RuntimeError('DAQ status not idle/nominal: ' + operationStatus)
# # Configure daq
# daqConfigured = configureDAQ()
# if not daqConfigured:
# raise RuntimeError('DAQ configuration failed')
# add wiating time since waitToFinish is useless ............
# telescopes.waitToFinish()
wait(get_short_wait(blockDuration, 'config_daq'))
updatePhase("config_daq", "config_daq has ended...", 100)
return
# ------------------------------------------------------------------
def config_camera():
updatePhase("config_camera", "config_camera has began ...", 0)
allowPhaseStart("config_mount")
# cameraConfig = schedulingBlock.config.camera_configuration
# telescopes.configureCameras(cameraConfig)
# add wiating time since waitToFinish is useless ............
# telescopes.waitToFinish()
wait(get_short_wait(blockDuration, 'config_camera'))
updatePhase("config_camera", "config_camera has ended...", 100)
return
# ------------------------------------------------------------------
def config_mount():
updatePhase("config_mount", "config_mount has began ...", 0)
# telescopes.startSlewing(resources.target)
# add wiating time since waitToFinish is useless ............
# telescopes.waitToFinish()
wait(get_short_wait(blockDuration, 'config_mount'))
updatePhase("config_mount", "config_mount has ended...", 100)
return
# ------------------------------------------------------------------
def take_data():
updatePhase("take_data", "take_data has began ...", 0)
# daq().moveToNextOutputBlock(daqctrl.ZFITS_ZLIB)
# resources.trackingDuration = blockDuration
# telescopes.startTracking(resources.trackingDuration,resources.target)
# telescopes.startDataTaking()
# add wiating time since waitToFinish is useless ............
# telescopes.waitToFinish()
wait(blockDuration)
# telescopes.stopDataTaking()
updatePhase("take_data", "take_data has ended...", 100)
return
# ------------------------------------------------------------------
def closing():
allowPhaseStart("finish_daq")
allowPhaseStart("finish_camera")
allowPhaseStart("finish_mount")
return
# ------------------------------------------------------------------
def finish_daq():
updatePhase("finish_daq", "finish_daq has began ...", 0)
allowPhaseStart("finish_camera")
allowPhaseStart("finish_mount")
# add wiating time since waitToFinish is useless ............
# telescopes.waitToFinish()
wait(get_short_wait(blockDuration, 'finish_daq'))
updatePhase("finish_daq", "finish_daq has ended...", 100)
return
# ------------------------------------------------------------------
def finish_camera():
updatePhase("finish_camera", "finish_camera has began ...", 0)
allowPhaseStart("finish_mount")
# add wiating time since waitToFinish is useless ............
# telescopes.waitToFinish()
wait(get_short_wait(blockDuration, 'finish_camera'))
updatePhase("finish_camera", "finish_camera has ended...", 100)
return
# ------------------------------------------------------------------
def finish_mount():
updatePhase("finish_mount", "finish_mount has began ...", 0)
# add wiating time since waitToFinish is useless ............
# telescopes.waitToFinish()
wait(get_short_wait(blockDuration, 'finish_mount'))
updatePhase("finish_mount", "finish_mount has ended...", 100)
return
# ------------------------------------------------------------------
def cleanUp():
pass
| 28.2287
| 94
| 0.550278
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 3,796
| 0.603018
|
83d111ca41c3bb0510b5e6661f1236eaf7537220
| 576
|
py
|
Python
|
msort/check/age.py
|
leighmacdonald/msort
|
b9182d7e3f01ffdb85229dd6e74ad270c766a2d8
|
[
"MIT"
] | 4
|
2015-02-22T04:27:23.000Z
|
2021-11-30T14:39:10.000Z
|
msort/check/age.py
|
leighmacdonald/msort
|
b9182d7e3f01ffdb85229dd6e74ad270c766a2d8
|
[
"MIT"
] | null | null | null |
msort/check/age.py
|
leighmacdonald/msort
|
b9182d7e3f01ffdb85229dd6e74ad270c766a2d8
|
[
"MIT"
] | null | null | null |
"""
Module to scan for empty folders and directories
"""
from time import time
from msort.check import BaseCheck, CheckSkip
class AgeCheck(BaseCheck):
"""
A simple checker which will validate a file or folders age.
"""
def __call__(self, section, path):
if self.conf.getboolean('minimum_age', 'enabled'):
min_age = self.conf.getint('minimum_age', 'days') * 86400
file_age = time() - path.age
if file_age <= min_age:
raise CheckSkip('Path does not meet minimum age requirements: {0}'.format(path))
| 30.315789
| 96
| 0.642361
| 448
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 222
| 0.385417
|
83d13060a3394fdf762b857adb260865c20a7f38
| 143
|
py
|
Python
|
AtC_Beg_Con_081-090/ABC089/B.py
|
yosho-18/AtCoder
|
50f6d5c92a01792552c31ac912ce1cd557b06fb0
|
[
"MIT"
] | null | null | null |
AtC_Beg_Con_081-090/ABC089/B.py
|
yosho-18/AtCoder
|
50f6d5c92a01792552c31ac912ce1cd557b06fb0
|
[
"MIT"
] | null | null | null |
AtC_Beg_Con_081-090/ABC089/B.py
|
yosho-18/AtCoder
|
50f6d5c92a01792552c31ac912ce1cd557b06fb0
|
[
"MIT"
] | null | null | null |
n = int(input())
a = input().split()
a = [str(m) for m in a]
for i in a:
if i == "Y":
print("Four")
exit()
print("Three")
| 14.3
| 23
| 0.461538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 16
| 0.111888
|
83d316b3fd73a29aececfa45fc1d41b8ed48ae12
| 5,141
|
py
|
Python
|
scripts/component_graph/server/fpm/package_manager.py
|
winksaville/Fuchsia
|
a0ec86f1d51ae8d2538ff3404dad46eb302f9b4f
|
[
"BSD-3-Clause"
] | 3
|
2020-08-02T04:46:18.000Z
|
2020-08-07T10:10:53.000Z
|
scripts/component_graph/server/fpm/package_manager.py
|
winksaville/Fuchsia
|
a0ec86f1d51ae8d2538ff3404dad46eb302f9b4f
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/component_graph/server/fpm/package_manager.py
|
winksaville/Fuchsia
|
a0ec86f1d51ae8d2538ff3404dad46eb302f9b4f
|
[
"BSD-3-Clause"
] | 1
|
2020-08-07T10:11:49.000Z
|
2020-08-07T10:11:49.000Z
|
#!/usr/bin/env python3
# Copyright 2019 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""PackageManager provides an interface to the JSON FPM API.
The PackageManager interface provides a simple way to retrieve data from the
package manager. It combines this data with annotated data from the disk
(which would be packages if not packaged in BootFS due to implementation
details). It does minimal parsing on this data and passes it back to the user.
"""
import json
import os
import re
import urllib.request
from far.far_reader import far_read
from server.util.url import strip_package_version, package_to_url
from server.util.logging import get_logger
def read_package(far_buffer):
"""Performs a raw_read then intelligently restructures known package structures."""
files = far_read(far_buffer)
if "meta/contents" in files:
content = files["meta/contents"].decode()
files["meta/contents"] = dict(
[tuple(e.rsplit("=", maxsplit=1)) for e in content.split("\n") if e])
if "meta/package" in files:
files["meta/package"] = json.loads(files["meta/package"].decode())
json_extensions = [".cm", ".cmx"]
for ext in json_extensions:
for path in files.keys():
if path.endswith(ext):
files[path] = json.loads(files[path])
return files
class PackageManager:
""" Interface for communicating with a remote package manager. """
def __init__(self, url, fuchsia_root):
self.url = url
if not self.url.endswith("/"):
self.url += "/"
self.package_manager_targets_url = self.url + "targets.json"
self.package_manager_blobs_url = self.url + "blobs/"
self.builtin_path = fuchsia_root + \
"scripts/component_graph/server/static/builtins.json"
self.logger = get_logger(__name__)
def ping(self):
""" Returns true if the ping succeeds else a failure. """
try:
with urllib.request.urlopen(self.url):
return True
except (urllib.error.URLError, urllib.error.HTTPError):
return False
def get_blob(self, merkle):
""" Returns a blob or none if there is any error. """
try:
with urllib.request.urlopen(self.package_manager_blobs_url +
merkle) as blob_response:
return blob_response.read()
except (urllib.error.URLError, urllib.error.HTTPError):
self.logger.warning("Blob: %s does not exist", merkle)
return None
def get_builtin_data(self):
""" Returns the builtin config data as a text string. """
if os.path.exists(self.builtin_path):
return open(self.builtin_path, "r").read()
return ""
def get_builtin_packages(self):
""" Returns the builtin packages as a python dict. """
builtin_data = self.get_builtin_data()
if builtin_data:
return json.loads(builtin_data)["packages"]
def get_matching_package_contents(self, package, data_name_pattern):
"""
This is a general function that searches the contents of the given package, gets blobs for
all files in the package whoes name matches the given regex, and then returns the matching
filenames and contents in a list of tuples.
"""
if not "meta/contents" in package["files"]:
return {}
pattern = re.compile(data_name_pattern)
data = []
for name, merkle in package["files"]["meta/contents"].items():
if not pattern.match(name):
continue
blob = self.get_blob(merkle)
if blob:
data.append((name, blob))
return data
def get_packages(self):
""" Returns a list of packages available on the system. """
with urllib.request.urlopen(
self.package_manager_targets_url) as response:
targets = json.loads(response.read().decode())
packages = []
for pkg_name, pkg_data in targets["signed"]["targets"].items():
# TODO(benwright) - strip_package_version is likely to change as we may include
# the variant in a future release.
package = {
"url": package_to_url(strip_package_version(pkg_name)),
"merkle": pkg_data["custom"]["merkle"],
"type": "package",
"files": {},
}
blob = self.get_blob(package["merkle"])
if not blob:
continue
package["files"] = read_package(blob)
packages.append(package)
# Append annotations
for package in self.get_builtin_packages():
builtin_package = package
builtin_package["files"] = {}
builtin_package["merkle"] = "0"
builtin_package["type"] = "builtin"
packages.append(builtin_package)
return packages
return None
| 40.480315
| 96
| 0.614861
| 3,720
| 0.723595
| 0
| 0
| 0
| 0
| 0
| 0
| 1,752
| 0.34079
|
83d375aa877a85c2432fbed5fdd969dd8542a727
| 977
|
py
|
Python
|
D01/main.py
|
itscassie/advent-of-code-2021
|
731f7b8593e827de7d098f311ab19813f3f1a38d
|
[
"MIT"
] | null | null | null |
D01/main.py
|
itscassie/advent-of-code-2021
|
731f7b8593e827de7d098f311ab19813f3f1a38d
|
[
"MIT"
] | null | null | null |
D01/main.py
|
itscassie/advent-of-code-2021
|
731f7b8593e827de7d098f311ab19813f3f1a38d
|
[
"MIT"
] | null | null | null |
""" Solve 2021 Day 1: Sonar Sweep Problem """
def solver_problem1(inputs):
""" Count the number of increasement from given list """
num_increased = 0
for i in range(1, len(inputs)):
if inputs[i] > inputs[i - 1]:
num_increased += 1
return num_increased
def solver_problem2(inputs):
""" Count the number of increasement from each sum of 3 number from give list """
num_increased = 0
for i in range(1, len(inputs) - 2):
# sum_prev = inputs[i-1] + inputs[i] + inputs[i+1]
# sum_curr = inputs[i] + inputs[i+1] + inputs[i+2]
# (sum_curr - sum_prev) = inputs[i+2] - inputs[i-1]
if inputs[i + 2] > inputs[i - 1]:
num_increased += 1
return num_increased
if __name__ == "__main__":
with open("./input/d01.txt", encoding='UTF-8') as file:
data = [int(line.strip()) for line in file]
print(solver_problem1(data))
print(solver_problem2(data))
| 34.892857
| 86
| 0.590583
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 370
| 0.37871
|
83d3b34c981cd51adb859cdd0943e06deba009df
| 928
|
py
|
Python
|
tests/test_power_converter.py
|
LauWien/smooth
|
3d2ee96e3c2b2f9d5d805da1a920748f2dbbd538
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 5
|
2019-10-15T15:56:35.000Z
|
2021-02-04T10:11:31.000Z
|
tests/test_power_converter.py
|
LauWien/smooth
|
3d2ee96e3c2b2f9d5d805da1a920748f2dbbd538
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 121
|
2020-01-06T14:32:30.000Z
|
2021-09-23T11:26:11.000Z
|
tests/test_power_converter.py
|
LauWien/smooth
|
3d2ee96e3c2b2f9d5d805da1a920748f2dbbd538
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 6
|
2019-10-21T08:36:05.000Z
|
2021-03-26T10:37:17.000Z
|
from smooth.components.component_power_converter import PowerConverter
import oemof.solph as solph
def test_init():
power_converter = PowerConverter({})
params = {"efficiency": 0, "output_power_max": 100}
power_converter = PowerConverter(params)
assert power_converter.efficiency == params["efficiency"]
assert power_converter.output_power_max == params["output_power_max"]
def test_add_to_oemof_model():
power_converter = PowerConverter({
"bus_input": "bus_input",
"bus_output": "bus_output"
})
oemof_model = solph.EnergySystem()
component = power_converter.add_to_oemof_model({
"bus_input": solph.Bus(label="bus_input"),
"bus_output": solph.Bus(label="bus_output"),
}, oemof_model)
assert len(oemof_model.entities) == 1
assert type(component) == solph.Transformer
assert len(component.inputs) == 1
assert len(component.outputs) == 1
| 33.142857
| 73
| 0.710129
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 152
| 0.163793
|
83d597052cf5a96babe41243ddbe009226025de6
| 2,094
|
py
|
Python
|
compecon/demos/demapp06.py
|
daniel-schaefer/CompEcon-python
|
d3f66e04a7e02be648fc5a68065806ec7cc6ffd6
|
[
"MIT"
] | 23
|
2016-12-14T13:21:27.000Z
|
2020-08-23T21:04:34.000Z
|
compecon/demos/demapp06.py
|
daniel-schaefer/CompEcon-python
|
d3f66e04a7e02be648fc5a68065806ec7cc6ffd6
|
[
"MIT"
] | 1
|
2017-09-10T04:48:54.000Z
|
2018-03-31T01:36:46.000Z
|
compecon/demos/demapp06.py
|
daniel-schaefer/CompEcon-python
|
d3f66e04a7e02be648fc5a68065806ec7cc6ffd6
|
[
"MIT"
] | 13
|
2017-02-25T08:10:38.000Z
|
2020-05-15T09:49:16.000Z
|
from demos.setup import np, plt
from compecon import BasisChebyshev, BasisSpline
from compecon.tools import nodeunif
__author__ = 'Randall'
# DEMAPP06 Chebychev and cubic spline derivative approximation errors
# Function to be approximated
def f(x):
g = np.zeros((3, x.size))
g[0], g[1], g[2] = np.exp(-x), -np.exp(-x), np.exp(-x)
return g
# Set degree of approximation and endpoints of approximation interval
a = -1 # left endpoint
b = 1 # right endpoint
n = 10 # order of interpolatioin
# Construct refined uniform grid for error ploting
x = nodeunif(1001, a, b)
# Compute actual and fitted values on grid
y, d, s = f(x) # actual
# Construct and evaluate Chebychev interpolant
C = BasisChebyshev(n, a, b, f=f) # chose basis functions
yc = C(x) # values
dc = C(x, 1) # first derivative
sc = C(x, 2) # second derivative
# Construct and evaluate cubic spline interpolant
S = BasisSpline(n, a, b, f=f) # chose basis functions
ys = S(x) # values
ds = S(x, 1) # first derivative
ss = S(x, 2) # second derivative
# Plot function approximation error
plt.figure()
plt.subplot(2, 1, 1),
plt.plot(x, y - yc[0])
plt.ylabel('Chebychev')
plt.title('Function Approximation Error')
plt.subplot(2, 1, 2)
plt.plot(x, y - ys[0])
plt.ylabel('Cubic Spline')
plt.xlabel('x')
# Plot first derivative approximation error
plt.figure()
plt.subplot(2, 1, 1),
plt.plot(x, d - dc[0])
plt.ylabel('Chebychev')
plt.title('First Derivative Approximation Error')
plt.subplot(2, 1, 2)
plt.plot(x, d - ds[0], 'm')
plt.ylabel('Cubic Spline')
plt.xlabel('x')
# Plot second derivative approximation error
plt.figure()
plt.subplot(2, 1, 1),
plt.plot(x, s - sc[0])
plt.ylabel('Chebychev')
plt.title('Second Derivative Approximation Error')
plt.subplot(2, 1, 2)
plt.plot(x, s - ss[0], 'm')
plt.ylabel('Cubic Spline')
plt.xlabel('x')
plt.show()
| 26.506329
| 69
| 0.608883
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 883
| 0.421681
|
83d5ab6a69ea7c486e04c2f09093c01b18d52c8b
| 5,411
|
py
|
Python
|
v3_inc_mem_dropout_dqn_model.py
|
kucharzyk-sebastian/aigym_dqn
|
eef88dafce3f2a1e13ab91a92089ea6a6c359cd6
|
[
"MIT"
] | 2
|
2021-03-25T17:55:58.000Z
|
2021-07-24T14:43:24.000Z
|
v3_inc_mem_dropout_dqn_model.py
|
kucharzyk-sebastian/aigym_dqn
|
eef88dafce3f2a1e13ab91a92089ea6a6c359cd6
|
[
"MIT"
] | null | null | null |
v3_inc_mem_dropout_dqn_model.py
|
kucharzyk-sebastian/aigym_dqn
|
eef88dafce3f2a1e13ab91a92089ea6a6c359cd6
|
[
"MIT"
] | null | null | null |
import random
import gym
import numpy as np
from collections import deque
from keras.models import Sequential
from keras.layers import Dense, Dropout
from keras.optimizers import Adam
import tensorflow as tf
import os
import logging
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
logging.getLogger('tensorflow').disabled = True
class SimpleDqnNpcV3:
"Klasa implementująca agenta DQN opartego o prostą sieć neuronową"
def __init__(self, num_of_inputs, num_of_outputs):
"""
num_of_inputs - długość wektora będącego wejściem dla sieci neuronowej
num_of_outputs - ilość wyjść z sieci neuronowej
"""
self._num_of_inputs = num_of_inputs
self._num_of_outputs = num_of_outputs
self._exploration_rate = 1.0
self._exploration_rate_min = 0.1
self._exploration_rate_decay = 0.997
self._discout_rate = 0.95
self.memory = deque(maxlen=4096)
self._init_model()
def _init_model(self):
"""
Inicjalizuje model sieci neuronowej.
Wybraliśmy (w naszym mniemaniu) najproszte parametry i kształt.
"""
self._model = Sequential()
self._model.add(Dense(5 * self._num_of_inputs, input_dim=self._num_of_inputs, activation='relu'))
self._model.add(Dropout(0.15))
self._model.add(Dense(4 * self._num_of_inputs, activation='sigmoid'))
self._model.add(Dropout(0.15))
self._model.add(Dense(self._num_of_outputs, activation='linear'))
self._model.compile(optimizer=Adam(), loss='mean_squared_error')
def act(self, state):
"""Przewiduje i zwraca akcję, którą należy wykonać"""
if np.random.rand() <= self._exploration_rate:
return random.randrange(self._num_of_outputs)
act_values = self._model.predict(state)
return np.argmax(act_values[0])
def retain(self, current_state, taken_action, gained_reward, next_state, is_done):
"""Zapisuje dyn przypadku w pamięci agenta"""
self.memory.append((current_state, taken_action, gained_reward, next_state, is_done))
def replay(self, batch_size):
"""
Doszkala sieć neuronową na losowym fragmencie z jego pamięci
batch-size - rozmiar fragmentu pamięci
"""
batch = random.sample(self.memory, batch_size)
for current_state, taken_action, gained_reward, next_state, is_done in batch:
next_act_best_profit = gained_reward
if not is_done:
future_act_profits = self._model.predict(next_state)
next_act_best_profit = gained_reward + self._discout_rate * np.amax(future_act_profits[0])
current_act_profits = self._model.predict(current_state)
current_act_profits[0][taken_action] = gained_reward + self._discout_rate * next_act_best_profit
with tf.device('/device:GPU:0'):
self._model.fit(x=current_state, y=current_act_profits, epochs=1, verbose=0)
if self._exploration_rate > self._exploration_rate_min:
self._exploration_rate *= self._exploration_rate_decay
def load(self, model_path):
"""Wczytuje model z pamięci"""
self._model.load_weights(model_path)
def save(self, model_path):
"""Zapisuje modele do pamięci"""
self._model.save_weights(model_path)
NUM_OF_AGENTS = 4
NUM_OF_EPISODES = 75
FRAMES_PER_EPISODE = 1000
BATCH_SIZE = 16
GAME_ID = "LunarLander-v2"
if __name__ == "__main__":
with tf.device('/device:CPU:0'):
game = gym.make(GAME_ID)
num_of_actions = game.action_space.n
observation_size = game.observation_space.shape[0]
npc = SimpleDqnNpcV3(observation_size, num_of_actions)
is_done = False
avgs = []
for model in range(NUM_OF_AGENTS):
scores = []
for episode in range(NUM_OF_EPISODES):
score = 0
current_state = np.reshape(game.reset(), [1, observation_size])
for frame in range(FRAMES_PER_EPISODE):
# game.render()
action = npc.act(current_state)
new_state, gained_reward, is_done, info = game.step(action)
new_state = np.reshape(new_state, [1, observation_size])
npc.retain(current_state, action, gained_reward, new_state, is_done)
score += gained_reward
current_state = new_state
if len(npc.memory) > BATCH_SIZE:
npc.replay(BATCH_SIZE)
if is_done:
print("episode: {0}/{1}; result: {2}; e: {3} used memory: {4}/{5}; time: {5}"
.format(episode, NUM_OF_EPISODES, score, npc._exploration_rate, len(npc.memory), npc.memory.maxlen, frame))
break
scores.append(score)
if not is_done:
print("episode: {0}/{1}; result: {2}; used memory: {3}/{4}; time: {5}"
.format(episode, NUM_OF_EPISODES, score, len(npc.memory), npc.memory.maxlen, frame))
npc.save("evo_dqn_" + str(model) + ".h5")
avgs.append(sum(scores) / len(scores))
for i, avg in enumerate(avgs):
print("Model {} has avarage: {}".format(i, avg))
print("Overall avg: {}".format(sum(avgs) / len(avgs)))
| 40.684211
| 137
| 0.62798
| 3,048
| 0.560397
| 0
| 0
| 0
| 0
| 0
| 0
| 1,011
| 0.18588
|
83d7033bcfe2791f10a0c9ef5053fa59b2220a75
| 370
|
py
|
Python
|
phlcensus/acs/percapitaincome.py
|
PhiladelphiaController/phlcensus
|
8e15d7c993e397bec4cb06a2144e134ec96c48a1
|
[
"MIT"
] | null | null | null |
phlcensus/acs/percapitaincome.py
|
PhiladelphiaController/phlcensus
|
8e15d7c993e397bec4cb06a2144e134ec96c48a1
|
[
"MIT"
] | null | null | null |
phlcensus/acs/percapitaincome.py
|
PhiladelphiaController/phlcensus
|
8e15d7c993e397bec4cb06a2144e134ec96c48a1
|
[
"MIT"
] | null | null | null |
from .core import ACSDataset
import collections
__all__ = ["PerCapitaIncome"]
class PerCapitaIncome(ACSDataset):
"""
PER CAPITA INCOME IN THE PAST 12 MONTHS (IN 2018 INFLATION-ADJUSTED DOLLARS)
"""
AGGREGATION = None
UNIVERSE = "total population"
TABLE_NAME = "B19301"
RAW_FIELDS = collections.OrderedDict({"001": "per_capita_income"})
| 23.125
| 81
| 0.705405
| 288
| 0.778378
| 0
| 0
| 0
| 0
| 0
| 0
| 161
| 0.435135
|
83d7cca1abc5dcfe213ee77fb80532cd598c02d8
| 1,642
|
py
|
Python
|
card-games/lists.py
|
vietanhtran2710/python-exercism
|
1f88dfca56928276ab81a274e8259ce465a2d425
|
[
"MIT"
] | null | null | null |
card-games/lists.py
|
vietanhtran2710/python-exercism
|
1f88dfca56928276ab81a274e8259ce465a2d425
|
[
"MIT"
] | null | null | null |
card-games/lists.py
|
vietanhtran2710/python-exercism
|
1f88dfca56928276ab81a274e8259ce465a2d425
|
[
"MIT"
] | null | null | null |
"""
Card games exercise
"""
def get_rounds(number):
"""
:param number: int - current round number.
:return: list - current round and the two that follow.
"""
return [i + number for i in range(3)]
def concatenate_rounds(rounds_1, rounds_2):
"""
:param rounds_1: list - first rounds played.
:param rounds_2: list - second set of rounds played.
:return: list - all rounds played.
"""
return rounds_1 + rounds_2
def list_contains_round(rounds, number):
"""
:param rounds: list - rounds played.
:param number: int - round number.
:return: bool - was the round played?
"""
return number in rounds
def card_average(hand):
"""
:param hand: list - cards in hand.
:return: float - average value of the cards in the hand.
"""
return sum(hand) / len(hand)
def approx_average_is_average(hand):
"""
:param hand: list - cards in hand.
:return: bool - if approximate average equals to the `true average`.
"""
return card_average(hand) in (hand[len(hand) // 2], (hand[0] + hand[-1]) / 2)
def average_even_is_average_odd(hand):
"""
:param hand: list - cards in hand.
:return: bool - are even and odd averages equal?
"""
even = [hand[index] for index in range(0, len(hand), 2)]
odd = [hand[index] for index in range(1, len(hand), 2)]
return card_average(even) == card_average(odd)
def maybe_double_last(hand):
"""
:param hand: list - cards in hand.
:return: list - hand with Jacks (if present) value doubled.
"""
if hand[-1] == 11:
hand[-1] *= 2
return hand
| 20.78481
| 81
| 0.612667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 899
| 0.547503
|
83d86b44c36b2efbdda4224e3eee5b832e8c3e4e
| 3,109
|
py
|
Python
|
stock_quantity_history_location/tests/test_stock_quantity_history_location.py
|
NextERP-Romania/addons_extern
|
d08f428aeea4cda1890adfd250bc359bda0c33f3
|
[
"Apache-2.0"
] | null | null | null |
stock_quantity_history_location/tests/test_stock_quantity_history_location.py
|
NextERP-Romania/addons_extern
|
d08f428aeea4cda1890adfd250bc359bda0c33f3
|
[
"Apache-2.0"
] | null | null | null |
stock_quantity_history_location/tests/test_stock_quantity_history_location.py
|
NextERP-Romania/addons_extern
|
d08f428aeea4cda1890adfd250bc359bda0c33f3
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 ForgeFlow S.L.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo.tests.common import SavepointCase
class TestStockQuantityHistoryLocation(SavepointCase):
@classmethod
def setUpClass(cls):
super(TestStockQuantityHistoryLocation, cls).setUpClass()
cls.supplier_location = cls.env.ref("stock.stock_location_suppliers")
cls.main_company = cls.env.ref("base.main_company")
cls.product = cls.env.ref("product.product_product_3")
cls.test_stock_loc = cls.env["stock.location"].create(
{
"usage": "internal",
"name": "Test Stock Location",
"company_id": cls.main_company.id,
}
)
cls.child_test_stock_loc = cls.env["stock.location"].create(
{
"usage": "internal",
"name": "Child Test Stock Location",
"location_id": cls.test_stock_loc.id,
"company_id": cls.main_company.id,
}
)
# Create a move for the past
move = cls.env["stock.move"].create(
{
"name": "Stock move in",
"location_id": cls.supplier_location.id,
"location_dest_id": cls.child_test_stock_loc.id,
"product_id": cls.product.id,
"product_uom": cls.product.uom_id.id,
"product_uom_qty": 100.0,
}
)
move._action_confirm()
move._action_assign()
move_line = move.move_line_ids[0]
move_line.qty_done = 100.0
move._action_done()
move.date = "2019-08-11"
def test_wizard_past_date(self):
wizard = self.env["stock.quantity.history"].create(
{
"location_id": self.test_stock_loc.id,
"include_child_locations": True,
"inventory_datetime": "2019-08-12 00:00:00",
}
)
action = wizard.with_context(company_owned=True).open_at_date()
self.assertEquals(
self.product.with_context(action["context"]).qty_available, 100.0
)
self.assertEquals(
self.product.with_context(
location=self.child_test_stock_loc.id, to_date="2019-08-10"
).qty_available,
0.0,
)
def test_wizard_current(self):
wizard = self.env["stock.quantity.history"].create(
{"location_id": self.test_stock_loc.id, "include_child_locations": False}
)
action = wizard.with_context().open_at_date()
self.assertEquals(action["context"]["compute_child"], False)
self.assertEquals(action["context"]["location"], self.test_stock_loc.id)
wizard = self.env["stock.quantity.history"].create(
{"location_id": self.test_stock_loc.id, "include_child_locations": True}
)
action = wizard.with_context().open_at_date()
self.assertEquals(action["context"]["compute_child"], True)
self.assertEquals(action["context"]["location"], self.test_stock_loc.id)
| 39.35443
| 85
| 0.587327
| 2,965
| 0.953683
| 0
| 0
| 1,466
| 0.471534
| 0
| 0
| 815
| 0.262142
|
83d8ec7a846eebb200f4cc5baae5280d08288d60
| 1,463
|
py
|
Python
|
viterbi_tagging.py
|
cryingmiso/Natural-Language-Processing
|
471c3e69c65cab90fb7c432d2b632801c87f7c8e
|
[
"MIT"
] | null | null | null |
viterbi_tagging.py
|
cryingmiso/Natural-Language-Processing
|
471c3e69c65cab90fb7c432d2b632801c87f7c8e
|
[
"MIT"
] | null | null | null |
viterbi_tagging.py
|
cryingmiso/Natural-Language-Processing
|
471c3e69c65cab90fb7c432d2b632801c87f7c8e
|
[
"MIT"
] | 1
|
2018-04-22T11:41:10.000Z
|
2018-04-22T11:41:10.000Z
|
# -*- coding:utf-8 -*-
states = ("B","M","E","S")
test_input = "BBMESBMEBEBESSMEBBME"
observations = [obs for obs in test_input]
#시작확률
start_prob = {"B":0.4,"M":0.2,"E":0.2,"S":0.2}
#전이확률
transit_prob = {"B": {"B": 0.1, "M": 0.4, "E": 0.4, "S": 0.1},
"M": {"B": 0.1, "M": 0.4, "E": 0.4, "S": 0.1},
"E": {"B": 0.4, "M": 0.1, "E": 0.1, "S": 0.4},
"S": {"B": 0.4, "M": 0.1, "E": 0.1, "S": 0.4}}
#출력확률
emission_prob = {'B': {"B": 0.4, "M": 0.2, "E": 0.2, "S": 0.2},
"M": {"B": 0.2, "M": 0.4, "E": 0.2, "S": 0.2},
"E": {"B": 0.2, "M": 0.2, "E": 0.4, "S": 0.2},
"S": {"B": 0.2, "M": 0.2, "E": 0.2, "S": 0.4}}
def viterbi(observs,states,sp,tp,ep):
T = {} # present state
for st in states:
T[st] = (sp[st]*ep[st][observs[0]],[st])
for ob in observs[1:]:
T = next_state(ob,states,T,tp,ep)
prob,labels = max([T[st] for st in T])
return prob,labels
def next_state(ob,states,T,tp,ep):
U = {} # next state
for next_s in states:
U[next_s] = (0,[])
for now_s in states:
p = T[now_s][0] * tp[now_s][next_s] * ep[next_s][ob]
if p>U[next_s][0]:
U[next_s] = [p,T[now_s][1]+[next_s]]
return U
if __name__=="__main__":
print observations
per,last = viterbi(observations,states,
start_prob,transit_prob,emission_prob)
print last
print per
| 29.857143
| 64
| 0.455913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 264
| 0.177539
|
83d96b48773397f017510e5831d9b5aab1d08ad6
| 2,534
|
py
|
Python
|
src/haddock/core/cns_paths.py
|
sverhoeven/haddock3
|
d863106f21ebc128f18c6d73a0d15b97824d050c
|
[
"Apache-2.0"
] | null | null | null |
src/haddock/core/cns_paths.py
|
sverhoeven/haddock3
|
d863106f21ebc128f18c6d73a0d15b97824d050c
|
[
"Apache-2.0"
] | null | null | null |
src/haddock/core/cns_paths.py
|
sverhoeven/haddock3
|
d863106f21ebc128f18c6d73a0d15b97824d050c
|
[
"Apache-2.0"
] | null | null | null |
"""
Path to CNS-related files.
Most paths are defined by dictionaries that gather several related
paths. Here, instead of defining the dictionaries with static paths, we
have functions that create those dict-containing paths dynamically. The
default values are defined by:
- axis
- tensors
- translation_vectors
- water_box
But you can re-use the functions to create new dictionaries with updated
paths. This is useful for those cases when the `cns/` folder is moved
to a different folder.
"""
from pathlib import Path
from haddock import toppar_path
# exact file names as present in the cns/ scripts folder
PARAMETERS_FILE = "haddock.param"
TOPOLOGY_FILE = "haddock.top"
LINK_FILE = "protein-allhdg5-4-noter.link"
SCATTER_LIB = "scatter.lib"
INITIAL_POSITIONS_DIR = "initial_positions"
# default prepared paths
parameters_file = Path(toppar_path, PARAMETERS_FILE)
topology_file = Path(toppar_path, TOPOLOGY_FILE)
link_file = Path(toppar_path, LINK_FILE)
scatter_lib = Path(toppar_path, SCATTER_LIB)
def get_translation_vectors(path):
"""
Generate paths for translation vectors.
Parameters
----------
path : pathlib.Path
If absolute, paths will be absolute, if relative paths will be
relative. Adds the INITIAL_POSITIONS_DIR path before the file
name.
"""
translation_vectors = {}
for i in range(51):
_s = f'trans_vector_{i}'
_p = Path(path, INITIAL_POSITIONS_DIR, _s)
translation_vectors[_s] = _p
return translation_vectors
def get_tensors(path):
"""Generate paths for tensors."""
tensors = {
"tensor_psf": Path(path, "tensor.psf"),
"tensor_pdb": Path(path, "tensor.pdb"),
"tensor_para_psf": Path(path, "tensor_para.psf"),
"tensor_para_pdb": Path(path, "tensor_para.pdb"),
"tensor_dani_psf": Path(path, "tensor_dani.psf"),
"tensor_dani_pdb": Path(path, "tensor_dani.pdb"),
}
return tensors
def get_axis(path):
"""Generate paths for axis."""
axis = {
"top_axis": Path(path, "top_axis.pro"),
"par_axis": Path(path, "par_axis.pro"),
"top_axis_dani": Path(path, "top_axis_dani.pro"),
}
return axis
def get_water_box(path):
"""Generate paths for water box."""
water_box = {
"boxtyp20": Path(path, "boxtyp20.pdb"),
}
return water_box
axis = get_axis(toppar_path)
tensors = get_tensors(toppar_path)
translation_vectors = get_translation_vectors(toppar_path)
water_box = get_water_box(toppar_path)
| 27.543478
| 72
| 0.696527
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,338
| 0.528019
|
83d9e4d213f9057ac120341c7210734a02cf3aa5
| 3,185
|
py
|
Python
|
src/reanalysis_dbns/utils/__init__.py
|
azedarach/reanalysis-dbns
|
160f405762fb33cfde38b1d3d63cc19e0bb3d591
|
[
"MIT"
] | null | null | null |
src/reanalysis_dbns/utils/__init__.py
|
azedarach/reanalysis-dbns
|
160f405762fb33cfde38b1d3d63cc19e0bb3d591
|
[
"MIT"
] | null | null | null |
src/reanalysis_dbns/utils/__init__.py
|
azedarach/reanalysis-dbns
|
160f405762fb33cfde38b1d3d63cc19e0bb3d591
|
[
"MIT"
] | null | null | null |
"""
Provides helper routines for reanalysis DBNs study.
"""
# License: MIT
from __future__ import absolute_import
from .computation import (calc_truncated_svd, downsample_data,
meridional_mean,
pattern_correlation, select_lat_band,
select_latlon_box, select_lon_band,
standardized_anomalies, zonal_mean)
from .defaults import (get_coordinate_standard_name,
get_default_coefficient_name,
get_default_indicator_name, get_lat_name,
get_level_name, get_lon_name, get_time_name)
from .eofs import (eofs, reofs)
from .preprocessing import (construct_lagged_data,
get_offset_variable_name,
remove_polynomial_trend,
standardize_time_series)
from .time_helpers import datetime_to_string
from .validation import (check_array_shape, check_base_period,
check_fixed_missing_values,
check_max_memory, check_max_parents,
check_number_of_chains,
check_number_of_initializations,
check_number_of_iterations,
check_tolerance, check_warmup,
detect_frequency, ensure_data_array,
ensure_variables_in_data,
has_fixed_missing_values,
is_daily_data,
is_dask_array, is_data_array, is_dataset,
is_integer, is_monthly_data, is_pandas_dataframe,
is_pandas_object, is_pandas_series, is_scalar,
is_xarray_object, remove_missing_features,
restore_missing_features)
__all__ = [
'calc_truncated_svd',
'check_array_shape',
'check_fixed_missing_values',
'check_base_period',
'check_max_memory',
'check_max_parents',
'check_number_of_chains',
'check_number_of_initializations',
'check_number_of_iterations',
'check_tolerance',
'check_warmup',
'construct_lagged_data',
'datetime_to_string',
'detect_frequency',
'downsample_data',
'ensure_data_array',
'ensure_variables_in_data',
'eofs',
'get_coordinate_standard_name',
'get_default_coefficient_name',
'get_default_indicator_name',
'get_lat_name',
'get_level_name',
'get_lon_name',
'get_offset_variable_name',
'get_time_name',
'get_valid_variables',
'has_fixed_missing_values',
'is_daily_data',
'is_dask_array',
'is_data_array',
'is_dataset',
'is_integer',
'is_monthly_data',
'is_pandas_dataframe',
'is_pandas_object',
'is_pandas_series',
'is_scalar',
'is_xarray_object',
'meridional_mean',
'pattern_correlation',
'remove_missing_features',
'remove_polynomial_trend',
'restore_missing_features',
'reofs',
'select_lat_band',
'select_latlon_box',
'select_lon_band',
'standardized_anomalies',
'standardize_time_series',
'zonal_mean'
]
| 33.526316
| 74
| 0.621978
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,068
| 0.335322
|
83da20131082094621964e1f90f87f88548deff3
| 121
|
py
|
Python
|
output/models/ms_data/regex/re_k14_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | 1
|
2021-08-14T17:59:21.000Z
|
2021-08-14T17:59:21.000Z
|
output/models/ms_data/regex/re_k14_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | 4
|
2020-02-12T21:30:44.000Z
|
2020-04-15T20:06:46.000Z
|
output/models/ms_data/regex/re_k14_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | null | null | null |
from output.models.ms_data.regex.re_k14_xsd.re_k14 import (
Regex,
Doc,
)
__all__ = [
"Regex",
"Doc",
]
| 12.1
| 59
| 0.603306
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 12
| 0.099174
|
83db648d31e6571eb460e05dda3b0b88c276583d
| 2,364
|
py
|
Python
|
generator/src/googleapis/codegen/utilities/json_expander.py
|
romulobusatto/google-api-php-client-services
|
7f3d938a1e4b364afa633b5ba13a0d3c9bc156bf
|
[
"Apache-2.0"
] | 709
|
2018-09-13T01:13:59.000Z
|
2022-03-31T10:28:41.000Z
|
generator/src/googleapis/codegen/utilities/json_expander.py
|
romulobusatto/google-api-php-client-services
|
7f3d938a1e4b364afa633b5ba13a0d3c9bc156bf
|
[
"Apache-2.0"
] | 1,351
|
2018-10-12T23:07:12.000Z
|
2022-03-05T09:25:29.000Z
|
generator/src/googleapis/codegen/utilities/json_expander.py
|
romulobusatto/google-api-php-client-services
|
7f3d938a1e4b364afa633b5ba13a0d3c9bc156bf
|
[
"Apache-2.0"
] | 307
|
2018-09-04T20:15:31.000Z
|
2022-03-31T09:42:39.000Z
|
#!/usr/bin/python2.7
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Support for simple JSON templates.
A JSON template is a dictionary of JSON data in which string values
may be simple templates in string.Template format (i.e.,
$dollarSignEscaping). By default, the template is expanded against
its own data, optionally updated with additional context.
"""
import json
from string import Template
import sys
__author__ = 'smulloni@google.com (Jacob Smullyan)'
def ExpandJsonTemplate(json_data, extra_context=None, use_self=True):
"""Recursively template-expand a json dict against itself or other context.
The context for string expansion is the json dict itself by default, updated
by extra_context, if supplied.
Args:
json_data: (dict) A JSON object where string values may be templates.
extra_context: (dict) Additional context for template expansion.
use_self: (bool) Whether to expand the template against itself, or only use
extra_context.
Returns:
A dict where string template values have been expanded against
the context.
"""
if use_self:
context = dict(json_data)
else:
context = {}
if extra_context:
context.update(extra_context)
def RecursiveExpand(obj):
if isinstance(obj, list):
return [RecursiveExpand(x) for x in obj]
elif isinstance(obj, dict):
return dict((k, RecursiveExpand(v)) for k, v in obj.iteritems())
elif isinstance(obj, (str, unicode)):
return Template(obj).safe_substitute(context)
else:
return obj
return RecursiveExpand(json_data)
if __name__ == '__main__':
if len(sys.argv) > 1:
json_in = open(sys.argv[1])
else:
json_in = sys.stdin
data = json.load(json_in)
expanded = ExpandJsonTemplate(data)
json.dump(expanded, sys.stdout, indent=2)
| 31.52
| 79
| 0.730118
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,498
| 0.633672
|
83dc4959d0371e253276a653ff679aa4ad785db5
| 10,237
|
py
|
Python
|
pyeventbus/tests/IO_performance_testing.py
|
n89nanda/EventBus
|
d1e35fa1ce9a2cb502404ecc2328c6c59745fce6
|
[
"MIT"
] | 24
|
2018-02-02T03:12:05.000Z
|
2021-11-11T10:06:22.000Z
|
pyeventbus/tests/IO_performance_testing.py
|
n89nanda/EventBus
|
d1e35fa1ce9a2cb502404ecc2328c6c59745fce6
|
[
"MIT"
] | 2
|
2018-08-13T14:08:51.000Z
|
2020-02-18T20:11:19.000Z
|
pyeventbus/tests/IO_performance_testing.py
|
n89nanda/EventBus
|
d1e35fa1ce9a2cb502404ecc2328c6c59745fce6
|
[
"MIT"
] | 2
|
2020-01-17T12:47:30.000Z
|
2020-05-05T14:10:10.000Z
|
from pyeventbus import *
from timeit import default_timer as timer
import numpy
import sys
from os import getcwd
import json
class Events:
class IOHeavyTestEvent:
start = 0
finish = 0
duration = 0
def __init__(self):
pass
def setStart(self, time):
self.start = time
def setFinish(self, time):
self.finish = time
def getDuration(self):
return self.finish - self.start
class IOHeavyTestEventBG:
start = 0
finish = 0
duration = 0
def __init__(self):
pass
def setStart(self, time):
self.start = time
def setFinish(self, time):
self.finish = time
def getDuration(self):
return self.finish - self.start
class IOHeavyTestEventGreenlet:
start = 0
finish = 0
duration = 0
def __init__(self):
pass
def setStart(self, time):
self.start = time
def setFinish(self, time):
self.finish = time
def getDuration(self):
return self.finish - self.start
class IOHeavyTestEventParallel:
start = 0
finish = 0
duration = 0
def __init__(self):
pass
def setStart(self, time):
self.start = time
def setFinish(self, time):
self.finish = time
def getDuration(self):
return self.finish - self.start
class IOHeavyTestEventConcurrent:
start = 0
finish = 0
duration = 0
def __init__(self):
pass
def setStart(self, time):
self.start = time
def setFinish(self, time):
self.finish = time
def getDuration(self):
return self.finish - self.start
class PerformanceTester:
def __init__(self):
pass
def register(self, aInstance):
PyBus.Instance().register(aInstance, self.__class__.__name__)
def startIOHeavyTestInMain(self):
event = Events.IOHeavyTestEvent()
event.setStart(timer())
start = timer()
PyBus.Instance().post(event)
print("{} got control back in {} seconds.".format('startIOHeavyTestInMain: ', timer() - start))
def startIOHeavyTestInBackground(self):
event = Events.IOHeavyTestEventBG()
event.setStart(timer())
start = timer()
PyBus.Instance().post(event)
print("{} got control back in {} seconds.".format('startIOHeavyTestInBackground: ', timer() - start))
def startIOHeavyTestInGreenlet(self):
event = Events.IOHeavyTestEventGreenlet()
event.setStart(timer())
start = timer()
PyBus.Instance().post(event)
print("{} got control back in {} seconds.".format('startIOHeavyTestInGreenlet: ', timer() - start))
def startIOHeavyTestInParallel(self):
event = Events.IOHeavyTestEventParallel()
event.setStart(timer())
start = timer()
PyBus.Instance().post(event)
print("{} got control back in {} seconds.".format('startIOHeavyTestInParallel: ', timer() - start))
def startIOHeavyTestInConcurrent(self):
event = Events.IOHeavyTestEventConcurrent()
event.setStart(timer())
start = timer()
PyBus.Instance().post(event)
print("{} got control back in {} seconds.".format('startIOHeavyTestInConcurrent: ', timer() - start))
class PerformanceExecuter:
def __init__(self):
pass
def register(self, bInstance):
PyBus.Instance().register(bInstance, self.__class__.__name__)
@subscribe(onEvent=Events.IOHeavyTestEvent)
def IOHeavyTest1(self, event):
for i in range(3000):
with open('{}/generated.json'.format(getcwd())) as f:
data = json.load(f)
event.setFinish(timer())
print("{} ran the code in {} seconds.".format('IOHeavyTest1: Main-thread', event.getDuration()))
@subscribe(threadMode = Mode.BACKGROUND, onEvent=Events.IOHeavyTestEventBG)
def IOHeavyTest2(self, event):
for i in range(3000):
with open('{}/generated.json'.format(getcwd())) as f:
data = json.load(f)
event.setFinish(timer())
print("{} ran the code in {} seconds.".format('IOHeavyTest2: Background-thread', event.getDuration()))
@subscribe(threadMode = Mode.GREENLET, onEvent=Events.IOHeavyTestEventGreenlet)
def IOHeavyTest3(self, event):
for i in range(3000):
with open('{}/generated.json'.format(getcwd())) as f:
data = json.load(f)
event.setFinish(timer())
print("{} ran the code in {} seconds.".format('IOHeavyTest3: Greenlet', event.getDuration()))
@subscribe(threadMode = Mode.PARALLEL, onEvent=Events.IOHeavyTestEventParallel)
def IOHeavyTest4(self, event):
for i in range(3000):
with open('{}/generated.json'.format(getcwd())) as f:
data = json.load(f)
event.setFinish(timer())
print("{} ran the code in {} seconds.".format('IOHeavyTest4: parallel', event.getDuration()))
@subscribe(threadMode = Mode.CONCURRENT, onEvent=Events.IOHeavyTestEventConcurrent)
def IOHeavyTest5(self, event):
for i in range(3000):
with open('{}/generated.json'.format(getcwd())) as f:
data = json.load(f)
event.setFinish(timer())
print("{} ran the code in {} seconds.".format('IOHeavyTest5: Concurrent', event.getDuration()))
@subscribe(onEvent=Events.IOHeavyTestEvent)
def IOHeavyTest6(self, event):
for i in range(3000):
with open('{}/generated.json'.format(getcwd())) as f:
data = json.load(f)
event.setFinish(timer())
print("{} ran the code in {} seconds.".format('IOHeavyTest6: Main-thread', event.getDuration()))
@subscribe(threadMode = Mode.BACKGROUND, onEvent=Events.IOHeavyTestEventBG)
def IOHeavyTest7(self, event):
for i in range(3000):
with open('{}/generated.json'.format(getcwd())) as f:
data = json.load(f)
event.setFinish(timer())
print("{} ran the code in {} seconds.".format('IOHeavyTest7: Background-thread', event.getDuration()))
@subscribe(threadMode = Mode.GREENLET, onEvent=Events.IOHeavyTestEventGreenlet)
def IOHeavyTest8(self, event):
for i in range(3000):
with open('{}/generated.json'.format(getcwd())) as f:
data = json.load(f)
event.setFinish(timer())
print("{} ran the code in {} seconds.".format('IOHeavyTest8: Greenlet', event.getDuration()))
@subscribe(threadMode = Mode.PARALLEL, onEvent=Events.IOHeavyTestEventParallel)
def IOHeavyTest9(self, event):
for i in range(3000):
with open('{}/generated.json'.format(getcwd())) as f:
data = json.load(f)
event.setFinish(timer())
print("{} ran the code in {} seconds.".format('IOHeavyTest9: parallel', event.getDuration()))
@subscribe(threadMode = Mode.CONCURRENT, onEvent=Events.IOHeavyTestEventConcurrent)
def IOHeavyTest10(self, event):
for i in range(3000):
with open('{}/generated.json'.format(getcwd())) as f:
data = json.load(f)
event.setFinish(timer())
print("{} ran the code in {} seconds.".format('IOHeavyTest10: Concurrent', event.getDuration()))
@subscribe(onEvent=Events.IOHeavyTestEvent)
def IOHeavyTest11(self, event):
for i in range(3000):
with open('{}/generated.json'.format(getcwd())) as f:
data = json.load(f)
event.setFinish(timer())
print("{} ran the code in {} seconds.".format('IOHeavyTest11: Main-thread', event.getDuration()))
@subscribe(threadMode = Mode.BACKGROUND, onEvent=Events.IOHeavyTestEventBG)
def IOHeavyTest12(self, event):
for i in range(3000):
with open('{}/generated.json'.format(getcwd())) as f:
data = json.load(f)
event.setFinish(timer())
print("{} ran the code in {} seconds.".format('IOHeavyTest12: Background-thread', event.getDuration()))
@subscribe(threadMode = Mode.GREENLET, onEvent=Events.IOHeavyTestEventGreenlet)
def IOHeavyTest13(self, event):
for i in range(3000):
with open('{}/generated.json'.format(getcwd())) as f:
data = json.load(f)
event.setFinish(timer())
print("{} ran the code in {} seconds.".format('IOHeavyTest13: Greenlet', event.getDuration()))
@subscribe(threadMode = Mode.PARALLEL, onEvent=Events.IOHeavyTestEventParallel)
def IOHeavyTest14(self, event):
for i in range(3000):
with open('{}/generated.json'.format(getcwd())) as f:
data = json.load(f)
event.setFinish(timer())
print("{} ran the code in {} seconds.".format('IOHeavyTest14: parallel', event.getDuration()))
@subscribe(threadMode = Mode.CONCURRENT, onEvent=Events.IOHeavyTestEventConcurrent)
def IOHeavyTest15(self, event):
for i in range(3000):
with open('{}/generated.json'.format(getcwd())) as f:
data = json.load(f)
event.setFinish(timer())
print("{} ran the code in {} seconds.".format('IOHeavyTest15: Concurrent', event.getDuration()))
if __name__ == '__main__':
tester = PerformanceTester()
tester.register(tester)
executer = PerformanceExecuter()
executer.register(executer)
print sys.argv[1:][0]
arg = sys.argv[1:][0]
if arg == 'startIOHeavyTestInMain': tester.startIOHeavyTestInMain()
elif arg == 'startIOHeavyTestInBackground': tester.startIOHeavyTestInBackground()
elif arg == 'startIOHeavyTestInGreenlet': tester.startIOHeavyTestInGreenlet()
elif arg == 'startIOHeavyTestInParallel': tester.startIOHeavyTestInParallel()
elif arg == 'startIOHeavyTestInConcurrent': tester.startIOHeavyTestInConcurrent()
# tester.startIOHeavyTestInMain()
# tester.startIOHeavyTestInBackground()
# tester.startIOHeavyTestInGreenlet()
# tester.startIOHeavyTestInParallel()
# tester.startIOHeavyTestInConcurrent()
| 38.197761
| 111
| 0.62069
| 9,287
| 0.907199
| 0
| 0
| 5,662
| 0.553092
| 0
| 0
| 1,838
| 0.179545
|
83dcc185970f786453677691d5a450058ad2e7d1
| 511
|
py
|
Python
|
book/migrations/0006_alter_book_cover_img.py
|
KhudadadKhawari/the-library
|
a6acd2e8ce9ca350339d99775f1e7906d343c7d4
|
[
"MIT"
] | null | null | null |
book/migrations/0006_alter_book_cover_img.py
|
KhudadadKhawari/the-library
|
a6acd2e8ce9ca350339d99775f1e7906d343c7d4
|
[
"MIT"
] | null | null | null |
book/migrations/0006_alter_book_cover_img.py
|
KhudadadKhawari/the-library
|
a6acd2e8ce9ca350339d99775f1e7906d343c7d4
|
[
"MIT"
] | null | null | null |
# Generated by Django 4.0 on 2021-12-15 09:04
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('book', '0005_alter_book_rented_count'),
]
operations = [
migrations.AlterField(
model_name='book',
name='cover_img',
field=models.ImageField(default=django.utils.timezone.now, upload_to='static/media/book/covers'),
preserve_default=False,
),
]
| 24.333333
| 109
| 0.639922
| 391
| 0.765166
| 0
| 0
| 0
| 0
| 0
| 0
| 124
| 0.242661
|
83dd12a100f10a5e78beecf49e3037dfe7dab6b8
| 79
|
py
|
Python
|
hrsxrate.py
|
fabiovitoriano7/pythoncourse
|
cceb9b727abd15c4a63f08b5678b224011441997
|
[
"MIT"
] | null | null | null |
hrsxrate.py
|
fabiovitoriano7/pythoncourse
|
cceb9b727abd15c4a63f08b5678b224011441997
|
[
"MIT"
] | null | null | null |
hrsxrate.py
|
fabiovitoriano7/pythoncourse
|
cceb9b727abd15c4a63f08b5678b224011441997
|
[
"MIT"
] | null | null | null |
hrs = input("Enter Hours:")
rate=2.75
print ("Pay: " + str(float(rate) * hrs))
| 19.75
| 40
| 0.607595
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 21
| 0.265823
|
83dda5970adb161d516652e3bbdec232d2bc568b
| 34,005
|
py
|
Python
|
main5.py
|
LinXueyuanStdio/MyTransE
|
971901757aba6af22fc2791b5bb32028390b9625
|
[
"Apache-2.0"
] | null | null | null |
main5.py
|
LinXueyuanStdio/MyTransE
|
971901757aba6af22fc2791b5bb32028390b9625
|
[
"Apache-2.0"
] | null | null | null |
main5.py
|
LinXueyuanStdio/MyTransE
|
971901757aba6af22fc2791b5bb32028390b9625
|
[
"Apache-2.0"
] | 1
|
2020-10-11T02:22:33.000Z
|
2020-10-11T02:22:33.000Z
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import _thread
import sys
import time
from math import exp
from random import random
from typing import List, Tuple, Set
from scipy import spatial
import numpy as np
import torch
from torch import nn
from torch.optim import optimizer
from torch.utils import tensorboard
from torch.utils.data import DataLoader
import torch.nn.functional as F
from dataloader import BidirectionalOneShotIterator
from dataloader import TrainDataset
from dataloader import TestDataset
import tensorflow as tf
import tensorboard as tb
import logging
tf.io.gfile = tb.compat.tensorflow_stub.io.gfile
torch.random.manual_seed(123456)
# region model
class KGEModel(nn.Module):
def __init__(self, train_seeds, nentity, nrelation, nvalue, hidden_dim, gamma, double_entity_embedding=False,
double_relation_embedding=False):
super(KGEModel, self).__init__()
# self.model_name = model_name
self.nentity = nentity
self.nrelation = nrelation
self.nvalue = nvalue
self.hidden_dim = hidden_dim
self.epsilon = 2.0
self.gamma = nn.Parameter(
torch.Tensor([gamma]),
requires_grad=False
)
self.embedding_range = nn.Parameter(
torch.Tensor([(self.gamma.item() + self.epsilon) / hidden_dim]),
requires_grad=False
)
self.entity_dim = hidden_dim * 2 if double_entity_embedding else hidden_dim
self.relation_dim = hidden_dim * 2 if double_relation_embedding else hidden_dim
self.value_dim = hidden_dim * 2 if double_entity_embedding else hidden_dim
entity_weight = torch.zeros(nentity, self.entity_dim)
nn.init.uniform_(
tensor=entity_weight,
a=-self.embedding_range.item(),
b=self.embedding_range.item()
)
for left_entity, right_entity in train_seeds:
entity_weight[left_entity] = entity_weight[right_entity]
self.entity_embedding = nn.Parameter(entity_weight)
# nn.init.normal_(self.entity_embedding)
self.relation_embedding = nn.Parameter(torch.zeros(nrelation, self.relation_dim))
# nn.init.normal_(self.relation_embedding)
nn.init.uniform_(
tensor=self.relation_embedding,
a=-self.embedding_range.item(),
b=self.embedding_range.item()
)
self.value_embedding = nn.Parameter(torch.zeros(nvalue, self.value_dim))
# nn.init.normal_(self.value_embedding)
nn.init.uniform_(
tensor=self.value_embedding,
a=-self.embedding_range.item(),
b=self.embedding_range.item()
)
def forward(self, sample, mode='single'):
if mode == 'single':
batch_size, negative_sample_size = sample.size(0), 1
head = torch.index_select(
self.entity_embedding,
dim=0,
index=sample[:, 0]
).unsqueeze(1)
relation = torch.index_select(
self.relation_embedding,
dim=0,
index=sample[:, 1]
).unsqueeze(1)
tail = torch.index_select(
self.value_embedding,
dim=0,
index=sample[:, 2]
).unsqueeze(1)
elif mode == 'head-batch':
tail_part, head_part = sample
batch_size, negative_sample_size = head_part.size(0), head_part.size(1)
head = torch.index_select(
self.entity_embedding,
dim=0,
index=head_part.view(-1)
).view(batch_size, negative_sample_size, -1)
relation = torch.index_select(
self.relation_embedding,
dim=0,
index=tail_part[:, 1]
).unsqueeze(1)
tail = torch.index_select(
self.value_embedding,
dim=0,
index=tail_part[:, 2]
).unsqueeze(1)
elif mode == 'tail-batch':
head_part, tail_part = sample
batch_size, negative_sample_size = tail_part.size(0), tail_part.size(1)
head = torch.index_select(
self.entity_embedding,
dim=0,
index=head_part[:, 0]
).unsqueeze(1)
relation = torch.index_select(
self.relation_embedding,
dim=0,
index=head_part[:, 1]
).unsqueeze(1)
tail = torch.index_select(
self.value_embedding,
dim=0,
index=tail_part.view(-1)
).view(batch_size, negative_sample_size, -1)
else:
raise ValueError('mode %s not supported' % mode)
score = self.TransE(head, relation, tail, mode)
return score
def TransE(self, head, relation, tail, mode):
if mode == 'head-batch':
score = head + (relation - tail)
else:
score = (head + relation) - tail
score = self.gamma.item() - torch.norm(score, p=1, dim=2)
return score
def RotatE(self, head, relation, tail, mode):
pi = 3.14159265358979323846
re_head, im_head = torch.chunk(head, 2, dim=2)
re_tail, im_tail = torch.chunk(tail, 2, dim=2)
# Make phases of relations uniformly distributed in [-pi, pi]
phase_relation = relation / (self.embedding_range.item() / pi)
re_relation = torch.cos(phase_relation)
im_relation = torch.sin(phase_relation)
if mode == 'head-batch':
re_score = re_relation * re_tail + im_relation * im_tail
im_score = re_relation * im_tail - im_relation * re_tail
re_score = re_score - re_head
im_score = im_score - im_head
else:
re_score = re_head * re_relation - im_head * im_relation
im_score = re_head * im_relation + im_head * re_relation
re_score = re_score - re_tail
im_score = im_score - im_tail
score = torch.stack([re_score, im_score], dim=0)
score = score.norm(dim=0)
score = self.gamma.item() - score.sum(dim=2)
return score
@staticmethod
def train_step(model, optimizer, positive_sample, negative_sample, subsampling_weight, mode, device="cuda"):
model.train()
optimizer.zero_grad()
positive_sample = positive_sample.to(device)
negative_sample = negative_sample.to(device)
subsampling_weight = subsampling_weight.to(device)
negative_score = model((positive_sample, negative_sample), mode=mode)
negative_score = F.logsigmoid(-negative_score).mean(dim=1)
positive_score = model(positive_sample)
positive_score = F.logsigmoid(positive_score).squeeze(dim=1)
positive_sample_loss = - (subsampling_weight * positive_score).sum() / subsampling_weight.sum()
negative_sample_loss = - (subsampling_weight * negative_score).sum() / subsampling_weight.sum()
loss = (positive_sample_loss + negative_sample_loss) / 2
loss.backward()
optimizer.step()
return loss.item()
# endregion
# region 日志
def get_logger(filename):
"""
Return instance of logger
统一的日志样式
"""
logger = logging.getLogger('logger')
logger.setLevel(logging.INFO)
logging.basicConfig(format='%(message)s', level=logging.INFO)
handler = logging.FileHandler(filename)
handler.setLevel(logging.INFO)
handler.setFormatter(logging.Formatter('%(asctime)s:%(levelname)s: %(message)s'))
logging.getLogger().addHandler(handler)
return logger
logger = get_logger("./train.log")
# endregion
# region 进度条
class Progbar(object):
"""
Progbar class inspired by keras
进度条
```
progbar = Progbar(max_step=100)
for i in range(100):
progbar.update(i, [("step", i), ("next", i+1)])
```
"""
def __init__(self, max_step, width=30):
self.max_step = max_step
self.width = width
self.last_width = 0
self.sum_values = {}
self.start = time.time()
self.last_step = 0
self.info = ""
self.bar = ""
def _update_values(self, curr_step, values):
for k, v in values:
if k not in self.sum_values:
self.sum_values[k] = [v * (curr_step - self.last_step), curr_step - self.last_step]
else:
self.sum_values[k][0] += v * (curr_step - self.last_step)
self.sum_values[k][1] += (curr_step - self.last_step)
def _write_bar(self, curr_step):
last_width = self.last_width
sys.stdout.write("\b" * last_width)
sys.stdout.write("\r")
numdigits = int(np.floor(np.log10(self.max_step))) + 1
barstr = '%%%dd/%%%dd [' % (numdigits, numdigits)
bar = barstr % (curr_step, self.max_step)
prog = float(curr_step) / self.max_step
prog_width = int(self.width * prog)
if prog_width > 0:
bar += ('=' * (prog_width - 1))
if curr_step < self.max_step:
bar += '>'
else:
bar += '='
bar += ('.' * (self.width - prog_width))
bar += ']'
sys.stdout.write(bar)
return bar
def _get_eta(self, curr_step):
now = time.time()
if curr_step:
time_per_unit = (now - self.start) / curr_step
else:
time_per_unit = 0
eta = time_per_unit * (self.max_step - curr_step)
if curr_step < self.max_step:
info = ' - ETA: %ds' % eta
else:
info = ' - %ds' % (now - self.start)
return info
def _get_values_sum(self):
info = ""
for name, value in self.sum_values.items():
info += ' - %s: %.6f' % (name, value[0] / max(1, value[1]))
return info
def _write_info(self, curr_step):
info = ""
info += self._get_eta(curr_step)
info += self._get_values_sum()
sys.stdout.write(info)
return info
def _update_width(self, curr_step):
curr_width = len(self.bar) + len(self.info)
if curr_width < self.last_width:
sys.stdout.write(" " * (self.last_width - curr_width))
if curr_step >= self.max_step:
sys.stdout.write("\n")
sys.stdout.flush()
self.last_width = curr_width
def update(self, curr_step, values):
"""Updates the progress bar.
Args:
values: List of tuples (name, value_for_last_step).
The progress bar will display averages for these values.
"""
self._update_values(curr_step, values)
self.bar = self._write_bar(curr_step)
self.info = self._write_info(curr_step)
self._update_width(curr_step)
self.last_step = curr_step
# endregion
# region 测试对齐实体
class Tester:
left_ids: List[int] = [] # test_seeds 中对齐实体的左实体id
right_ids: List[int] = [] # test_seeds 中对齐实体的右实体id
seeds: List[Tuple[int, int]] = [] # (m, 2) 对齐的实体对(a,b)称a为左实体,b为右实体
train_seeds: List[Tuple[int, int]] = [] # (0.8m, 2)
test_seeds: List[Tuple[int, int]] = [] # (0.2m, 2)
linkEmbedding = []
kg1E = []
kg2E = []
EA_results = {}
def read_entity_align_list(self, entity_align_file_path):
ret = []
with open(entity_align_file_path, encoding='utf-8') as f:
for line in f:
th = line[:-1].split('\t')
ret.append((int(th[0]), int(th[1])))
self.seeds = ret
# 80%训练集,20%测试集
train_percent = 0.3
train_max_idx = int(train_percent * len(self.seeds))
self.train_seeds = self.seeds[:]
self.test_seeds = self.seeds[:]
self.left_ids = []
self.right_ids = []
for left_entity, right_entity in self.test_seeds:
self.left_ids.append(left_entity) # 对齐的左边的实体
self.right_ids.append(right_entity) # 对齐的右边的实体
def XRA(self, entity_embedding_file_path):
self.linkEmbedding = []
with open(entity_embedding_file_path, 'r', encoding='utf-8') as f:
lines = f.readlines()
for i in range(len(lines)):
aline = lines[i].strip()
aline_list = aline.split()
self.linkEmbedding.append(aline_list)
@staticmethod
def get_vec(entities_embedding, id_list: List[int], device="cuda"):
tensor = torch.LongTensor(id_list).view(-1, 1).to(device)
return entities_embedding(tensor).view(-1, 200).cpu().detach().numpy()
@staticmethod
def get_vec2(entities_embedding, id_list: List[int], device="cuda"):
all_entity_ids = torch.LongTensor(id_list).view(-1).to(device)
all_entity_vec = torch.index_select(
entities_embedding,
dim=0,
index=all_entity_ids
).view(-1, 200).cpu().detach().numpy()
return all_entity_vec
def calculate(self, top_k=(1, 10, 50, 100)):
Lvec = np.array([self.linkEmbedding[e1] for e1, e2 in self.test_seeds])
Rvec = np.array([self.linkEmbedding[e2] for e1, e2 in self.test_seeds])
return self.get_hits(Lvec, Rvec, top_k)
def get_hits2(self, Lvec, Rvec, top_k=(1, 10, 50, 100)):
sim = spatial.distance.cdist(Lvec, Rvec, metric='cityblock')
return self.get_hits(Lvec, Rvec, sim, top_k)
def get_hits(self, Lvec, Rvec, sim, top_k=(1, 10, 50, 100)):
# Lvec (m, d), Rvec (m, d)
# Lvec和Rvec分别是对齐的左右实体的嵌入组成的列表,d是嵌入维度,m是实体个数
# sim=distance(Lvec, Rvec) (m, m)
# sim[i, j] 表示在 Lvec 的实体 i 到 Rvec 的实体 j 的距离
top_lr = [0] * len(top_k)
for i in range(Lvec.shape[0]): # 对于每个KG1实体
rank = sim[i, :].argsort()
# sim[i, :] 是一个行向量,表示将 Lvec 中的实体 i 到 Rvec 的所有实体的距离
# argsort 表示将距离按大小排序,返回排序后的下标。比如[6,3,5]下标[0,1,2],排序后[3,5,6],则返回[1,2,0]
rank_index = np.where(rank == i)[0][0]
# 对于一维向量,np.where(rank == i) 等价于 list(rank).index(i),即查找元素 i 在 rank 中的下标
# 这里的 i 不是代表 Lvec 中的实体 i 的下标,而是代表 Rvec 中和 i 对齐的实体的下标。
for j in range(len(top_k)):
if rank_index < top_k[j]: # index 从 0 开始,因此用 '<' 号
top_lr[j] += 1
top_rl = [0] * len(top_k)
for i in range(Rvec.shape[0]):
rank = sim[:, i].argsort()
rank_index = np.where(rank == i)[0][0]
for j in range(len(top_k)):
if rank_index < top_k[j]:
top_rl[j] += 1
logger.info('For each left:')
left = []
for i in range(len(top_lr)):
hits = top_k[i]
hits_value = top_lr[i] / len(self.test_seeds) * 100
left.append((hits, hits_value))
logger.info('Hits@%d: %.2f%%' % (hits, hits_value))
logger.info('For each right:')
right = []
for i in range(len(top_rl)):
hits = top_k[i]
hits_value = top_rl[i] / len(self.test_seeds) * 100
right.append((hits, hits_value))
logger.info('Hits@%d: %.2f%%' % (hits, hits_value))
return {
"left": left,
"right": right,
}
# endregion
# region 保存与加载模型,恢复训练状态
_MODEL_STATE_DICT = "model_state_dict"
_OPTIMIZER_STATE_DICT = "optimizer_state_dict"
_EPOCH = "epoch"
_STEP = "step"
_BEST_SCORE = "best_score"
_LOSS = "loss"
def load_checkpoint(model: nn.Module, optim: optimizer.Optimizer,
checkpoint_path="./result/fr_en/checkpoint.tar") -> Tuple[int, int, float, float]:
"""Loads training checkpoint.
:param checkpoint_path: path to checkpoint
:param model: model to update state
:param optim: optimizer to update state
:return tuple of starting epoch id, starting step id, best checkpoint score
"""
checkpoint = torch.load(checkpoint_path)
model.load_state_dict(checkpoint[_MODEL_STATE_DICT])
optim.load_state_dict(checkpoint[_OPTIMIZER_STATE_DICT])
start_epoch_id = checkpoint[_EPOCH] + 1
step = checkpoint[_STEP] + 1
best_score = checkpoint[_BEST_SCORE]
loss = checkpoint[_LOSS]
return start_epoch_id, step, best_score, loss
def save_checkpoint(model: nn.Module, optim: optimizer.Optimizer,
epoch_id: int, step: int, best_score: float, loss: float,
save_path="./result/fr_en/checkpoint.tar"):
torch.save({
_MODEL_STATE_DICT: model.state_dict(),
_OPTIMIZER_STATE_DICT: optim.state_dict(),
_EPOCH: epoch_id,
_STEP: step,
_BEST_SCORE: best_score,
_LOSS: loss,
}, save_path)
def save_entity_embedding_list(model, embedding_path="./result/fr_en/ATentsembed.txt"):
with open(embedding_path, 'w') as f:
d = model.entity_embedding.data.detach().cpu().numpy()
for i in range(len(d)):
f.write(" ".join([str(j) for j in d[i].tolist()]))
f.write("\n")
# endregion
# region 数据集
def read_ids_and_names(dir_path, sp="\t"):
ids = []
names = []
with open(dir_path, encoding="utf-8") as file:
lines = file.readlines()
for line in lines:
id_to_name = line.strip().split(sp)
ids.append(int(id_to_name[0]))
names.append(id_to_name[1])
return ids, names
def read_triple(triple_path):
with open(triple_path, 'r') as fr:
triple = set()
for line in fr:
line_split = line.split()
head = int(line_split[0])
tail = int(line_split[1])
rel = int(line_split[2])
triple.add((head, rel, tail))
return list(triple)
def append_align_triple(triple: List[Tuple[int, int, int]], entity_align_list: List[Tuple[int, int]]):
# 使用对齐实体替换头节点,构造属性三元组数据,从而达到利用对齐实体数据的目的
align_set = {}
for i in entity_align_list:
align_set[i[0]] = i[1]
align_set[i[1]] = i[0]
triple_replace_with_align = []
bar = Progbar(max_step=len(triple))
count = 0
for entity, attr, value in triple:
if entity in align_set:
triple_replace_with_align.append((align_set[entity], attr, value))
count += 1
bar.update(count, [("step", count)])
return triple + triple_replace_with_align
# endregion
class TransE:
def __init__(self,
# input paths
entity_align_file="data/fr_en/ref_ent_ids",
all_entity_file="data/fr_en/ent_ids_all",
all_attr_file="data/fr_en/att2id_all",
all_value_file="data/fr_en/att_value2id_all",
all_triple_file="data/fr_en/att_triple_all",
# output paths
checkpoint_path="./result/TransE/fr_en/checkpoint.tar",
embedding_path="./result/TransE/fr_en/ATentsembed.txt",
tensorboard_log_dir="./result/TransE/fr_en/log/",
device="cuda",
learning_rate=0.001,
visualize=False
):
self.entity_align_file = entity_align_file
self.all_entity_file = all_entity_file
self.all_attr_file = all_attr_file
self.all_value_file = all_value_file
self.all_triple_file = all_triple_file
self.device = device
self.visualize = visualize
self.tensorboard_log_dir = tensorboard_log_dir
self.checkpoint_path = checkpoint_path
self.embedding_path = embedding_path
self.learning_rate = learning_rate
def init_data(self):
self.t = Tester()
self.t.read_entity_align_list(self.entity_align_file) # 得到已知对齐实体
self.entity_list, self.entity_name_list = read_ids_and_names(self.all_entity_file)
self.attr_list, _ = read_ids_and_names(self.all_attr_file)
self.value_list, _ = read_ids_and_names(self.all_value_file)
self.train_triples = read_triple(self.all_triple_file)
self.entity_count = len(self.entity_list)
self.attr_count = len(self.attr_list)
self.value_count = len(self.value_list)
logger.info("entity: " + str(self.entity_count)
+ " attr: " + str(self.attr_count)
+ " value: " + str(self.value_count))
def append_align_triple(self):
self.train_triples = append_align_triple(self.train_triples, self.t.train_seeds)
def init_dataset(self):
train_dataloader_head = DataLoader(
TrainDataset(self.train_triples, self.entity_count, self.attr_count, self.value_count, 512, 'head-batch'),
batch_size=1024,
shuffle=False,
num_workers=4,
collate_fn=TrainDataset.collate_fn
)
train_dataloader_tail = DataLoader(
TrainDataset(self.train_triples, self.entity_count, self.attr_count, self.value_count, 512, 'tail-batch'),
batch_size=1024,
shuffle=False,
num_workers=4,
collate_fn=TrainDataset.collate_fn
)
self.train_iterator = BidirectionalOneShotIterator(train_dataloader_head, train_dataloader_tail)
def init_model(self):
self.model = KGEModel(
self.t.seeds, # 所有seed
nentity=self.entity_count,
nrelation=self.attr_count,
nvalue=self.value_count,
hidden_dim=200,
gamma=24.0,
).to(self.device)
def init_optimizer(self):
self.optim = torch.optim.Adam(
filter(lambda p: p.requires_grad, self.model.parameters()),
lr=self.learning_rate
)
def init_soft_align(self):
self.combination_threshold = 3 # 小于这个距离则模型认为已对齐
self.combination_restriction = 5000 # 模型认为对齐的实体对的个数
self.distance2entitiesPair: List[Tuple[int, Tuple[int, int]]] = []
self.combinationProbability: List[float] = [0] * self.entity_count # [0, 1)
self.correspondingEntity = {}
self.model_think_align_entities = []
self.model_is_able_to_predict_align_entities = False
def soft_align(self, positive_sample, mode='single'):
batch_size = positive_sample.size()[0]
# positive_sample (batch_size, 3)
# batch_size 个 (entity, attr, value) 的三元组
# negative_sample (batch_size, negative_sample_size)
# batch_size 个长度为 negative_sample_size 的 (neg_id1, neg_id2, ...) 替换用的待使用id
# 设 e 是正例实体,e' 是负例实体,e* 是模型认为的e的对齐实体
# 1. head-batch
# (e, a, v) + (e'1, e'2, ..., e'n) ->
# ((e, a, v), (e'1, a, v))
# ((e, a, v), (e'2, a, v))
# ...
# ((e, a, v), (e'n, a, v))
# 2. tail-batch
# (e, a, v) + (v'1, v'2, ..., v'n) ->
# ((e, a, v), (e, a, v'1))
# ((e, a, v), (e, a, v'2))
# ...
# ((e, a, v), (e, a, v'n))
soft_positive_sample = positive_sample.clone()
if mode == "head-batch":
# 负例是随机替换头部
# (neg_id1, neg_id2, ...) 是实体id
# ((e, a, v), (e'1, a, v))
# 已有 (e, a, v) + (e'1, e'2, ..., e'n)
for i in range(batch_size):
# 1. 模型认为头部是对齐的
h1 = soft_positive_sample[i][0].item()
if self.combinationProbability[h1] >= 0.5 and h1 in self.correspondingEntity: # 如果可信
# 希望 (e, a, v) (e', a, v) -> (e*, a, v) (e', a, v)
h1_cor = self.correspondingEntity[h1] # 获取模型认为的对齐实体
soft_positive_sample[i][0] = h1_cor # 替换为模型认为的对齐实体
elif mode == "tail-batch":
# 负例是随机替换尾部
# (neg_id1, neg_id2, ...) 是属性值id
# ((e, a, v), (e, a, v'2))
# 已有 (e, a, v) + (v'1, v'2, ..., v'n)
for i in range(batch_size):
# 1. 模型认为头部是对齐的
h1 = soft_positive_sample[i][0].item()
if self.combinationProbability[h1] >= 0.5 and h1 in self.correspondingEntity: # 如果可信
# 希望 (e, a, v) (e', a, v) -> (e*, a, v) (e', a, v)
h1_cor = self.correspondingEntity[h1] # 获取模型认为的对齐实体
soft_positive_sample[i][0] = h1_cor # 替换为模型认为的对齐实体
return soft_positive_sample
def do_combine(self, thread_name, sim):
# sim[i, j] 表示在 Lvec 的实体 i 到 Rvec 的实体 j 的距离
logger.info(thread_name + " " + "模型对齐中")
computing_time = time.time()
# 1. 按距离排序
self.distance2entitiesPair: List[Tuple[int, Tuple[int, int]]] = []
filtered = np.where(sim <= self.combination_threshold)
for i, j in zip(filtered[0], filtered[1]):
self.distance2entitiesPair.append((sim[i, j], (self.t.left_ids[i], self.t.right_ids[j])))
filter_time = time.time()
logger.info(thread_name + " " + "距离小于 "
+ str(self.combination_threshold) + " 的实体对有 "
+ str(len(self.distance2entitiesPair)) + " 个")
logger.info(thread_name + " " + "扁平化,用时 " + str(int(filter_time - computing_time)) + " 秒")
# 2.初始化"模型认为两实体是对齐的"这件事的可信概率
combinationProbability: List[float] = [0] * self.entity_count # [0, 1)
# 3.模型认为的对齐实体
correspondingEntity = {}
self.model_think_align_entities = []
occupied: Set[int] = set()
combination_counter = 0
sigmoid = lambda x: 1.0 / (1.0 + exp(-x))
for dis, (ent1, ent2) in self.distance2entitiesPair:
if dis > self.combination_threshold:
# 超过可信范围,不可信
continue
# 距离在可信范围内
if ent1 in occupied or ent2 in occupied:
continue
if combination_counter >= self.combination_restriction:
break
combination_counter += 1
self.correspondingEntity[ent1] = ent2
self.correspondingEntity[ent2] = ent1
self.model_think_align_entities.append((ent1, ent2))
occupied.add(ent1)
occupied.add(ent2)
combinationProbability[ent1] = sigmoid(self.combination_threshold - dis) # 必有 p > 0.5
combinationProbability[ent2] = sigmoid(self.combination_threshold - dis)
logger.info(thread_name + " " + "对齐了 " + str(len(self.model_think_align_entities)) + " 个实体")
self.combination_restriction += 1000
self.model_is_able_to_predict_align_entities = False # 上锁
self.combinationProbability = combinationProbability
self.correspondingEntity = correspondingEntity
self.model_is_able_to_predict_align_entities = True # 解锁
align_time = time.time()
logger.info(thread_name + " " + "模型对齐完成,用时 " + str(int(align_time - filter_time)) + " 秒")
def run_train(self, need_to_load_checkpoint=True):
logger.info("start training")
init_step = 1
total_steps = 20001
test_steps = 5000
last_loss = 100
score = 0
last_score = score
if need_to_load_checkpoint:
_, init_step, score, last_loss = load_checkpoint(self.model, self.optim, self.checkpoint_path)
last_score = score
summary_writer = tensorboard.SummaryWriter(log_dir=self.tensorboard_log_dir)
progbar = Progbar(max_step=total_steps - init_step)
start_time = time.time()
for step in range(init_step, total_steps):
positive_sample, negative_sample, subsampling_weight, mode = next(self.train_iterator)
loss = self.model.train_step(self.model, self.optim,
positive_sample, negative_sample,
subsampling_weight, mode, self.device)
# 软对齐
# 根据模型认为的对齐实体,修改 positive_sample,negative_sample,再训练一轮
if self.model_is_able_to_predict_align_entities:
soft_positive_sample = self.soft_align(positive_sample, mode)
loss2 = self.model.train_step(self.model, self.optim,
soft_positive_sample, negative_sample,
subsampling_weight, mode, self.device)
loss = (loss + loss2) / 2
progbar.update(step - init_step + 1, [
("loss", loss),
("cost", round((time.time() - start_time))),
("aligned", len(self.model_think_align_entities))
])
if self.visualize:
summary_writer.add_scalar(tag='Loss/train', scalar_value=loss, global_step=step)
if step == 12000 or step == 13000 or step == 14000:
logger.info("\n计算距离中")
computing_time = time.time()
left_vec = self.t.get_vec2(self.model.entity_embedding, self.t.left_ids)
right_vec = self.t.get_vec2(self.model.entity_embedding, self.t.right_ids)
sim = spatial.distance.cdist(left_vec, right_vec, metric='euclidean')
logger.info("计算距离完成,用时 " + str(int(time.time() - computing_time)) + " 秒")
# self.do_combine("Thread-" + str(step), sim)
# try:
# logger.info("启动线程,获取模型认为的对齐实体")
# _thread.start_new_thread(self.do_combine, ("Thread of step-" + str(step), sim,))
# except SystemExit:
# logger.error("Error: 无法启动线程")
logger.info("属性消融实验")
hits = self.t.get_hits(left_vec, right_vec, sim)
hits_left = hits["left"]
hits_right = hits["right"]
left_hits_10 = hits_left[2][1]
right_hits_10 = hits_right[2][1]
score = (left_hits_10 + right_hits_10) / 2
logger.info("score = " + str(score))
if self.visualize:
summary_writer.add_embedding(tag='Embedding',
mat=self.model.entity_embedding,
metadata=self.entity_name_list,
global_step=step)
summary_writer.add_scalar(tag='Hits@1/left', scalar_value=hits_left[0][1], global_step=step)
summary_writer.add_scalar(tag='Hits@10/left', scalar_value=hits_left[1][1], global_step=step)
summary_writer.add_scalar(tag='Hits@50/left', scalar_value=hits_left[2][1], global_step=step)
summary_writer.add_scalar(tag='Hits@100/left', scalar_value=hits_left[3][1], global_step=step)
summary_writer.add_scalar(tag='Hits@1/right', scalar_value=hits_right[0][1], global_step=step)
summary_writer.add_scalar(tag='Hits@10/right', scalar_value=hits_right[1][1], global_step=step)
summary_writer.add_scalar(tag='Hits@50/right', scalar_value=hits_right[2][1], global_step=step)
summary_writer.add_scalar(tag='Hits@100/right', scalar_value=hits_right[3][1], global_step=step)
if score > last_score:
last_score = score
save_checkpoint(self.model, self.optim, 1, step, score, loss, self.checkpoint_path)
save_entity_embedding_list(self.model, self.embedding_path)
def run_test(self):
load_checkpoint(self.model, self.optim, self.checkpoint_path)
logger.info("\n属性消融实验")
left_vec = self.t.get_vec2(self.model.entity_embedding, self.t.left_ids)
right_vec = self.t.get_vec2(self.model.entity_embedding, self.t.right_ids)
hits = self.t.get_hits(left_vec, right_vec)
hits_left = hits["left"]
hits_right = hits["right"]
left_hits_10 = hits_left[2][1]
right_hits_10 = hits_right[2][1]
score = (left_hits_10 + right_hits_10) / 2
logger.info("score = " + str(score))
def train_model_for_fr_en():
m = TransE(
checkpoint_path="./result/TransE2/fr_en/checkpoint.tar",
embedding_path="./result/TransE2/fr_en/ATentsembed.txt",
tensorboard_log_dir="./result/TransE2/fr_en/log/"
)
m.init_data()
# m.append_align_triple()
m.init_soft_align()
m.init_dataset()
m.init_model()
m.init_optimizer()
m.run_train(need_to_load_checkpoint=False)
def train_model_for_ja_en():
m = TransE(entity_align_file="data/ja_en/ref_ent_ids",
all_entity_file="data/ja_en/ent_ids_all",
all_attr_file="data/ja_en/att2id_all",
all_value_file="data/ja_en/att_value2id_all",
all_triple_file="data/ja_en/att_triple_all",
checkpoint_path="./result/TransE2/ja_en/checkpoint.tar",
embedding_path="./result/TransE2/ja_en/ATentsembed.txt",
tensorboard_log_dir="./result/TransE2/ja_en/log/")
m.init_data()
# m.append_align_triple()
m.init_soft_align()
m.init_dataset()
m.init_model()
m.init_optimizer()
m.run_train(need_to_load_checkpoint=False)
def train_model_for_zh_en():
m = TransE(entity_align_file="data/zh_en/ref_ent_ids",
all_entity_file="data/zh_en/ent_ids_all",
all_attr_file="data/zh_en/att2id_all",
all_value_file="data/zh_en/att_value2id_all",
all_triple_file="data/zh_en/att_triple_all",
checkpoint_path="./result/TransE2/zh_en/checkpoint.tar",
embedding_path="./result/TransE2/zh_en/ATentsembed.txt",
tensorboard_log_dir="./result/TransE2/zh_en/log/")
m.init_data()
# m.append_align_triple()
m.init_soft_align()
m.init_dataset()
m.init_model()
m.init_optimizer()
m.run_train(need_to_load_checkpoint=False)
def test_model():
m = TransE()
m.init_data()
m.init_model()
m.init_optimizer()
m.run_test()
# train_model_for_fr_en()
# train_model_for_ja_en()
train_model_for_zh_en()
| 37.327113
| 118
| 0.589002
| 28,756
| 0.813811
| 0
| 0
| 1,554
| 0.043979
| 0
| 0
| 6,393
| 0.180925
|
83deb844d22e41b2c14e852a19602c5b2980d2b2
| 25,395
|
py
|
Python
|
cogs/profiles.py
|
Greenfoot5/BattleBot
|
f4318124bb85786c3d0ff562132121c382445c36
|
[
"MIT"
] | 2
|
2020-01-13T22:58:22.000Z
|
2020-02-19T16:47:17.000Z
|
cogs/profiles.py
|
Greenfoot5/BattleBot
|
f4318124bb85786c3d0ff562132121c382445c36
|
[
"MIT"
] | 29
|
2020-01-13T23:30:03.000Z
|
2020-06-26T18:08:01.000Z
|
cogs/profiles.py
|
Greenfoot5/BattleBot
|
f4318124bb85786c3d0ff562132121c382445c36
|
[
"MIT"
] | 2
|
2020-01-15T00:20:10.000Z
|
2020-02-18T00:02:55.000Z
|
import discord
import time
import random
import datetime
import asyncio
import json
import config
from discord.ext import commands
from data.data_handler import data_handler
from itertools import chain
from collections import OrderedDict
def gainedRP(player, gained_rp):
if player['Level']['timeOfNextEarn'] > time.time():
return True, False, player['Level']['rank']
rank = get_rank_from(player['Level']['rp'] + gained_rp)
if rank > player['Level']['rank']:
return False, True, rank
return False, False, rank
# Function to get a user's rank and remaining rp to next rank.
# Takes current rp as parameter
def get_rank_from(rp):
# Sets the starting value to be our remaining rp
rem_rp = int(rp)
# Starts the rank at 0
rank = 0
# Loops throught the ranks and checks if the user had enough rp to rank up
# If so, take that rp away from rem_rp and add one to their rank
while rem_rp >= config.rp_ranks[rank]:
rem_rp -= config.rp_ranks[rank]
rank += 1
# Returns the final values for rank and rem_rp.
return rank
# Function to get profile pages (1 - 3)
async def get_page(self, ctx, number, userid):
clans = data_handler.load("clans")
profiles = data_handler.load("profiles")
user = await self.bot.fetch_user(userid)
player = profiles[str(userid)]
rank = player['Level']['rank']
title = config.rp_ranktitles[rank]
page = discord.Embed(title = f"{user.display_name}'s profile",
colour = int(player['Settings']['colours'][player['Settings']['colour']], 16),
description = f"{user.name}#{user.discriminator}")
page.set_thumbnail(url = user.avatar_url_as(static_format = 'png'))
page.set_footer(text = f"Requested by {ctx.author.display_name}",
icon_url = ctx.author.avatar_url_as(static_format='png'))
if number == 1:
# Page 1
try:
clan = clans[player['Base']['clanID']]['Base']['name']
except:
clan = "None"
page.add_field(name = "Base Info",
value = f"Account Name: {player['Base']['username']} \nClan: {clan} \nCountry: {player['Base']['country']}",
inline = False)
page.add_field(name = "Level Info",
value = f"Level: {player['Level']['rank']} \nTotal Experience: {player['Level']['rp']} \nTitle: {title}",
inline = False)
if number == 2:
# Page 2
page.add_field(name = "Achievements",
value = f"Amount of Lord titles: {player['Achievements']['lords']} \nAmount of Squire titles: {player['Achievements']['squires']} \nBest :trophy: rating: {player['Achievements']['rating']}",
inline = False)
page.add_field(name = "Fun Favourites",
value = f"Favourite unit: {player['Favourites']['unit']} \nFavourite Tactic: {player['Favourites']['tactic']} \nFavourite Tome: {player['Favourites']['tome']} \nFavourite Skin: {player['Favourites']['skin']}",
inline = False)
if number == 3 and userid is not None:
# Page 3
member = discord.utils.find(lambda g: g.get_member(userid), self.bot.guilds)
if member is not None:
member = member.get_member(userid)
days = int(int(time.time() - (member.created_at - datetime.datetime.utcfromtimestamp(0)).total_seconds())/ 86400)
discord_date = f"{member.created_at.ctime()} ({days} days ago)"
page.add_field(name = "Discord Info",
value = f"Joined Discord on: {discord_date} \nStatus: {member.status} \nid: `{member.id}` \nAvatar Link: {member.avatar_url_as(format='png')}")
return page
# get reaction with number + vice versa
def get_reaction(number, reaction = None):
reactions = {
1: "1\u20e3",
2: "2\u20e3",
3: "3\u20e3",
4: "4\u20e3",
5: "5\u20e3",
6: "6\u20e3",
7: "7\u20e3",
8: "8\u20e3",
9: "9\u20e3",
10: "10\u20e3"
}
if reaction is None:
return reactions.get(number, 0)
else:
return list(reactions.keys())[list(reactions.values()).index(reaction)]
# async handling of user reactions
async def handle_reactions(self, ctx, userid, pages, page1, message):
profiles = data_handler.load("profiles")
page = 0
while True:
def check(reaction, user):
if user.bot == True:
return False
if reaction.message.id != message.id:
return False
reactions = ['⏪', '◀', '⏺️', '▶', '⏩']
return user.id == ctx.author.id and str(reaction) in reactions
try:
reaction, user = await self.bot.wait_for('reaction_add', timeout=60.0, check=check)
except asyncio.TimeoutError:
break
reaction = str(reaction)
if reaction == '⏺️':
playerid = int(random.choice(list(profiles)))
while playerid == userid:
playerid = int(random.choice(list(profiles)))
page1 = await get_page(self, ctx, 1, playerid)
page2 = await get_page(self, ctx, 2, playerid)
page3 = await get_page(self, ctx, 3, playerid)
pages = [page1, page2, page3]
await message.edit(embed=pages[0])
await handle_reactions(self, ctx, playerid, pages, page1, message)
return
elif reaction == '⏪':
page = 0
elif reaction == '◀':
page -= 1
if page < 0:
page = 0
elif reaction == '▶':
page += 1
if page >= 3:
page = 2
elif reaction == '⏩':
page = 2
await message.edit(embed=pages[page])
class Profiles(commands.Cog):
# Initialises the variables and sets the bot.
def __init__(self, bot):
self.bot = bot
# Our base level command. Due to invoke_without_command=True it means that this command is only run when no
# sub-command is run. Makes it a command group with a name.
@commands.group(name='profile', invoke_without_command = True, aliases = ['p', 'P', 'Profile'])
# Defines it as a function.
async def profile(self, ctx, *, userName:str = None):
"""
Check your profile or that of another member.
You no longer need to mention the user to check their profile!
"""
profiles = data_handler.load("profiles")
userids = list()
if userName is None:
# if user wants to display his own profile, display only his own.
userid = ctx.message.author.id
else:
for user in list(filter(lambda u: userName in u.name, self.bot.users)):
userids.append(user.id)
for guild in self.bot.guilds:
for member in list(filter(lambda m: userName in m.display_name, guild.members)):
userids.append(member.id)
for profile in profiles:
if userName.casefold() in profiles[profile]['Base']['username'].casefold():
userids.append(int(profile))
# distinct result list
userids = list(OrderedDict.fromkeys(userids))
# filter out userids without existing user profile
tempUserids = list()
for userid in userids:
try:
player = profiles[str(userid)]
if config.rp_showHistoricProfiles == False:
member = discord.utils.find(lambda g: g.get_member(userid), self.bot.guilds).get_member(userid)
tempUserids.append(userid)
except:
continue
userids = tempUserids
if len(userids) <= 0:
await ctx.send("I don't know that Discord User/profile")
return
if len(userids) > 10:
await ctx.send("I found more than 10 matching profiles. Please be more specific.")
return
if len(userids) > 1:
# more then 1 possilbe profile found, let the user decide which should be shown
selectionpage = discord.Embed(title = "I found more than one matching profile. Please select the correct one:", description = "")
selectionpage.set_footer(text = f"Requested by {ctx.author.display_name}", icon_url = ctx.author.avatar_url_as(static_format='png'))
selection = await ctx.send(embed=selectionpage)
foundUser = list()
i = 1
for userid in userids:
player = profiles[str(userid)]
user = await self.bot.fetch_user(userid)
reactionString = str(get_reaction(i))
selectionpage.add_field(name = f"{reactionString}", value = f"{user.name}#{user.discriminator} - Account Name: {player['Base']['username']}", inline = False)
foundUser.append(userid)
await selection.add_reaction(reactionString)
i += 1
await selection.edit(embed=selectionpage)
try:
reaction, user = await self.bot.wait_for('reaction_add', timeout=30.0, check=lambda r, u: u.id == ctx.author.id and u.bot == False)
except asyncio.TimeoutError:
return
# show the profile of this id:
userid = foundUser[int(get_reaction(0, str(reaction))) - 1]
else:
userid = userids[0]
# display profile of found user
page1 = await get_page(self, ctx, 1, userid)
page2 = await get_page(self, ctx, 2, userid)
page3 = await get_page(self, ctx, 3, userid)
pages = [page1, page2, page3]
message = await ctx.send(embed=page1)
await message.add_reaction("⏪")
await message.add_reaction("◀")
await message.add_reaction("⏺️")
await message.add_reaction("▶")
await message.add_reaction("⏩")
await handle_reactions(self, ctx, userid, pages, page1, message)
@profile.command(name="set")
async def setProfile(self, ctx, attribute, *, value):
try:
"""
Change values on your profile. You can change:
`username`, `clan`, `country`, `lords`, `squires`, `rating`, `unit`, `tactic`, `tome`, `skin`, `colour`.
"""
profiles = data_handler.load("profiles")
player = profiles[str(ctx.author.id)]
attribute = attribute.lower()
if attribute in ['colour', 'color', 'colours', 'colors']:
await self.changeProfileColour(ctx, int(value))
return
if attribute in ["lords", "lord"]:
if str(value)[0] == "+":
player['Achievements']['lords'] += int(value)
else:
player['Achievements']['lords'] = int(value)
elif attribute in ["clans" "clan"]:
player["Base"]["clan"] = value
elif attribute in ["squires", "squire"]:
if str(value)[0] == "+":
player['Achievements']['squires'] += int(value)
else:
player['Achievements']['squires'] = int(value)
elif attribute in ["rating"]:
player['Achievements']['rating'] = int(value)
elif attribute in ["unit", "units", "troop"]:
player['Favourites']['unit'] = value
elif attribute in ["tactic", "strategy", "layout"]:
player['Favourites']['tactic'] = value
elif attribute in ["tome", "masteryskill", "book"]:
player['Favourites']['tome'] = value
elif attribute in ["skin", "look"]:
player['Favourites']['skin'] = value
elif attribute in ["country", "location"]:
player['Base']['country'] = value
elif attribute in ["name", "accountname", "account", "username"]:
player['Base']['username'] = value
else:
await ctx.send("This is not a valid setting. You can change: " +
"`username`, `clan`, `country`, `lords`, `squires`, `rating`, `unit`, `tactic`, `tome`, `skin`, `colour`.")
return
except ValueError:
await ctx.send("Invalid Value. Please choose a number.")
else:
await ctx.send("Profile updated.")
data_handler.dump(profiles, "profiles")
@profile.command(name='colour', aliases = ['color', 'colours', 'colors'])
async def changeProfileColour(self, ctx, colour:int = None):
"""
Allows you to change the colour of all your profile based information!
"""
profiles = data_handler.load("profiles")
try:
player = profiles[str(ctx.author.id)]
except:
await ctx.send("An error occured. Please try again.")
return
colourList = list(player['Settings']['colours'])
if colour is None or colour >= len(colourList) or colour < 0:
description = "Unlocked Colours:"
for colourIndex in range(len(colourList)):
description = description + f"\n{colourIndex}. {colourList[colourIndex]} - `#{player['Settings']['colours'][colourList[colourIndex]]}`"
embed = discord.Embed(title = "Please select a valid colour.",
colour = int(player['Settings']['colours'][player['Settings']['colour']], 16),
description = description)
Color = str(colourList.index(player['Settings']['colour'])) + ". " + player['Settings']['colour'] + " - `#" + player['Settings']['colours'][f"{player['Settings']['colour']}"] + "`"
embed.add_field(name = "Current Colour:",
value = Color)
embed.set_footer(text = f"Requested by {ctx.author.display_name}",
icon_url = ctx.author.avatar_url_as(static_format='png'))
await ctx.send(embed=embed)
return
player['Settings']['colour'] = colourList[colour]
profiles[ctx.author.id] = player
data_handler.dump(profiles, "profiles")
await ctx.send("Updated your colour.")
@commands.Cog.listener()
async def on_message(self, ctx):
"""
Gives you rank points per message on a one minute cooldown.
"""
if ctx.author.bot:
return
profiles = data_handler.load("profiles")
try:
player = profiles[str(ctx.author.id)]
except KeyError:
profiles[f"{ctx.author.id}"] = {
"Base": {
"username": f"{ctx.author.display_name}", "clanID": "None", "country": "Earth"},
"Level": {
"rp": 0, "rank": 0, "timeOfNextEarn": 0},
"Achievements": {
"lords": 0, "squires": 0, "rating": 1000},
"Favourites": {
"unit": "None", "tactic": "None", "tome": "None", "skin": "None"},
"Settings": {
"rankUpMessage": "chat", "colour": "Default", "colours": {"Default": "000000"},"permissions": []}}
player = profiles[str(ctx.author.id)]
gained_rp = int(random.randint(config.rp_min, config.rp_max) * config.rp_mult)
cooldown, rankedUp, rank = gainedRP(player, gained_rp)
if cooldown:
return
player['Level']['rank'] = rank
player['Level']['timeOfNextEarn'] = time.time() + config.rp_cooldown
player['Level']['rp'] += gained_rp
pRUM = player['Settings']['rankUpMessage']
if rankedUp and pRUM in ['any','dm','chat']:
servers = data_handler.load("servers")
try:
sRUM = servers[str(ctx.guild.id)]['Messages']['rankUpMessages']
except KeyError:
sRUM = "any"
if sRUM == "channel":
destination = ctx.guild.get_channel(servers[str(ctx.guild.id)]['Messages']['rankUpChannel'])
elif sRUM == "any" and pRUM in ["chat", "any"]:
destination = ctx.channel
elif pRUM in ["dm", "any"]:
destination = ctx.author
try:
await destination.send(f"Congrats {ctx.author.mention}! You've earned enough rank points to rank up to Rank {rank}!")
except discord.Forbidden:
if pRUM == "any":
destination = ctx.author
await destination.send(f"Congrats {ctx.author.mention}! You've earned enough rank points to rank up to Rank {rank}!")
if rank == 1:
await destination.send("You've also unlocked a new colour: Rank 1!")
player['Settings']['colours']['Rank 1'] = "fefefe"
elif rank == 5:
await destination.send("You've also unlocked a new colour: Rank 5!")
player['Settings']['colours']['Rank 5'] = "7af8d3"
elif rank == 10:
await destination.send("You've also unlocked a new colour: Level 10!")
player['Settings']['colours']['Rank 10'] = "327c31"
data_handler.dump(profiles, "profiles")
@profile.group(name="leaderboard", aliases=["lb"], invoke_without_command = True)
async def levelLB(self, ctx, page: int = 1):
"""
Check where people are relative to each other! Not specifying a page will select the first page.
"""
if page < 1:
await ctx.send("That isn't a valid page.")
return
# Sort the dictionary into a list.
profiles = data_handler.load("profiles")
rankings = []
description = ""
for player in profiles:
try:
rankings.append({'id': player, 'rp': profiles[player]['Level']['rp']})
except KeyError:
pass
if page > ((len(rankings) // 10) + 1):
await ctx.send("That page is too large.")
return
def getKey(item):
return item['rp']
rankings = sorted(rankings, reverse = True, key = getKey)
# Add the top 10
end = 10 * page
if len(rankings) < (10 * page):
end = len(rankings)
for i in range((page * 10) - 10, end):
user = await ctx.bot.fetch_user(rankings[i]['id'])
description += f"**{i + 1}.** {user.name}#{user.discriminator} - {rankings[i]['rp']} rank points.\n"
# Add member
index = -1
print(rankings)
for i in range(len(rankings)):
if int(rankings[i]['id']) == ctx.author.id:
index = i
if index <= (end) and index >= (end - 10):
embed = discord.Embed(title="Global rank point leaderboard",
colour=discord.Colour(0xa72693),
description=description,
inline=True)
embed.set_footer(text=f"Requested by {ctx.author.display_name}",
icon_url=ctx.author.avatar_url_as(static_format="png"))
await ctx.send(content="Here you go!", embed=embed)
return
description += "--==ME==--"
for i in [index - 2, index - 1, index, index + 1, index + 2]:
if i != len(rankings):
user = await ctx.bot.fetch_user(rankings[i]['id'])
description += f"\n**{i + 1}.** {user.name}#{user.discriminator} - {rankings[i]['rp']} rank points."
embed = discord.Embed(title="Rank leaderboard",
colour=discord.Colour(0xa72693),
description=description,
inline=True)
embed.set_footer(text=f"Requested by {ctx.author.display_name}",
icon_url=ctx.author.avatar_url_as(static_format="png"))
# Send embed
await ctx.send(content="Here you go!", embed=embed)
@profile.group(name = 'options', aliases = ['option', 'o', 'O'])
async def pOptions(self, ctx, option:str = None, value:str = None):
"""
Checks or change profile options.
To check options, don't specify an option or values.
To change an option, specify the option and it's new value.
Leave the value blank to see possible settings.
"""
profiles = data_handler.load("profiles")
try:
player = profiles[str(ctx.author.id)]
except KeyError:
await ctx.send("An error occured. Please try again.")
if option is None:
embed = discord.Embed(title = "Personal Settings",
description = "To change an option, specify the option and it's new value.\nLeave the value blank to see possible settings.",
colour = int(player["Settings"]["colours"][player["Settings"]["colour"]], 16))
# rankUpMessage setting
if player["Settings"]["rankUpMessage"] == "any":
embed.add_field(name = "`RankUpMessage` **:** `any`",
value = "This means the bot will try to tell you in chat when you level up, or in the server's level up channel. If it can't do either, it will DM you.")
elif player["Settings"]["rankUpMessage"] == "chat":
embed.add_field(name = "`RankUpMessage` **:** `chat`",
value = "This means the bot will try to tell you in chat when you level up, or in the server's level up channel. If it can't do either, it will **not** DM you.")
elif player["Settings"]["rankUpMessage"] == "dm":
embed.add_field(name = "`RankUpMessage` **:** `dm`",
value = "This means the bot shall try to DM you with the rank up message. If that's not possible, you won't be informed.")
elif player["Settings"]["rankUpMessage"] == "none":
embed.add_field(name = "`RankUpMessage` **:** `none`",
value = "This means you will not be told when you rank up.")
# Not sure if I want to use this feature...
# permissions = "None"
# if "*" in player["Settings"]["permissions"]:
# permissions = "*"
# embed.add_field(name = "Permissions",
# value = permissions)
embed.set_footer(text = f"Requested by {ctx.author.display_name}",
icon_url = ctx.author.avatar_url_as(static_format='png'))
embed.set_thumbnail(url = ctx.author.avatar_url_as(static_format = 'png'))
await ctx.send(content = "", embed=embed)
elif option.lower() in ["rum", "rankupmessage", "rankup"]:
if value is None:
embed = discord.Embed(title = "Rank Up Message",
description = "Specify where rank up messages should be allowed.",
colour = int(player["Settings"]["colours"][player["Settings"]["colour"]], 16))
embed.add_field(name = "`any`",
value = "This means the bot will try to tell you in chat when you level up, or in the server's level up channel. If it can't do either, it will DM you.")
embed.add_field(name = "`chat`",
value = "This means the bot will try to tell you in chat when you level up, or in the server's level up channel. If it can't do either, it will **not** DM you.")
embed.add_field(name = "`dm`",
value = "This means the bot shall try to DM you with the rank up message. If that's not possible, you won't be informed.")
embed.add_field(name = "`none`",
value = "This means you will not be told when you rank up.")
embed.set_footer(text = f"Requested by {ctx.author.display_name}",
icon_url = ctx.author.avatar_url_as(static_format='png'))
await ctx.send(content = "", embed=embed)
elif value.lower() == "any":
player["Settings"]["rankUpMessage"] = "any"
await ctx.send(f"{option} updated.")
elif value.lower() == "chat":
player["Settings"]["rankUpMessage"] = "chat"
await ctx.send(f"{option} updated.")
elif value.lower() == "dm":
player["Settings"]["rankUpMessage"] = "dm"
await ctx.send(f"{option} updated.")
elif value.lower() == "none":
player["Settings"]["rankUpMessage"] = "none"
await ctx.send(f"{option} updated.")
profiles[str(ctx.author.id)] = player
data_handler.dump(profiles, "profiles")
@commands.is_owner()
@profile.command(name = 'reset', hidden = True)
async def resetLevel(self, ctx):
"""
Resets All rp. Used when testing rate of earn
"""
profiles = {}
data_handler.dump(profiles, "profiles")
await ctx.send("Reset all profiles.")
def setup(bot):
bot.add_cog(Profiles(bot))
| 42.680672
| 233
| 0.549478
| 19,490
| 0.766387
| 0
| 0
| 19,126
| 0.752074
| 22,843
| 0.898234
| 8,468
| 0.332979
|
83df200991f24e112dfb55e0124bf7a8c642cf9c
| 7,985
|
py
|
Python
|
blender/.blender/scripts/uvcalc_follow_active_coords.py
|
visnz/sketchfab_download
|
976f667d5c2c2864b2bad65aceac0dab5ce51b74
|
[
"Apache-2.0"
] | 41
|
2021-02-18T05:56:26.000Z
|
2021-12-06T07:58:15.000Z
|
blender/.blender/scripts/uvcalc_follow_active_coords.py
|
visnz/sketchfab_download
|
976f667d5c2c2864b2bad65aceac0dab5ce51b74
|
[
"Apache-2.0"
] | 19
|
2021-02-18T05:59:03.000Z
|
2022-01-13T01:00:52.000Z
|
blender/.blender/scripts/uvcalc_follow_active_coords.py
|
visnz/sketchfab_download
|
976f667d5c2c2864b2bad65aceac0dab5ce51b74
|
[
"Apache-2.0"
] | 18
|
2021-02-22T13:32:56.000Z
|
2022-01-22T12:38:29.000Z
|
#!BPY
"""
Name: 'Follow Active (quads)'
Blender: 242
Group: 'UVCalculation'
Tooltip: 'Follow from active quads.'
"""
__author__ = "Campbell Barton"
__url__ = ("blender", "blenderartists.org")
__version__ = "1.0 2006/02/07"
__bpydoc__ = """\
This script sets the UV mapping and image of selected faces from adjacent unselected faces.
for full docs see...
http://mediawiki.blender.org/index.php/Scripts/Manual/UV_Calculate/Follow_active_quads
"""
# ***** BEGIN GPL LICENSE BLOCK *****
#
# Script copyright (C) Campbell J Barton
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# ***** END GPL LICENCE BLOCK *****
# --------------------------------------------------------------------------
from Blender import *
import bpy
import BPyMesh
def extend(EXTEND_MODE,ob):
if EXTEND_MODE == -1:
return
me = ob.getData(mesh=1)
me_verts = me.verts
# Toggle Edit mode
is_editmode = Window.EditMode()
if is_editmode:
Window.EditMode(0)
Window.WaitCursor(1)
t = sys.time()
edge_average_lengths = {}
OTHER_INDEX = 2,3,0,1
FAST_INDICIES = 0,2,1,3 # order is faster
def extend_uvs(face_source, face_target, edge_key):
'''
Takes 2 faces,
Projects its extends its UV coords onto the face next to it.
Both faces must share an edge.
'''
def face_edge_vs(vi):
# assunme a quad
return [(vi[0], vi[1]), (vi[1], vi[2]), (vi[2], vi[3]), (vi[3], vi[0])]
uvs_source = face_source.uv
uvs_target = face_target.uv
vidx_source = [v.index for v in face_source]
vidx_target = [v.index for v in face_target]
# vertex index is the key, uv is the value
uvs_vhash_source = dict( [ (vindex, uvs_source[i]) for i, vindex in enumerate(vidx_source)] )
uvs_vhash_target = dict( [ (vindex, uvs_target[i]) for i, vindex in enumerate(vidx_target)] )
edge_idxs_source = face_edge_vs(vidx_source)
edge_idxs_target = face_edge_vs(vidx_target)
source_matching_edge = -1
target_matching_edge = -1
edge_key_swap = edge_key[1], edge_key[0]
try: source_matching_edge = edge_idxs_source.index(edge_key)
except: source_matching_edge = edge_idxs_source.index(edge_key_swap)
try: target_matching_edge = edge_idxs_target.index(edge_key)
except: target_matching_edge = edge_idxs_target.index(edge_key_swap)
edgepair_inner_source = edge_idxs_source[source_matching_edge]
edgepair_inner_target = edge_idxs_target[target_matching_edge]
edgepair_outer_source = edge_idxs_source[OTHER_INDEX[source_matching_edge]]
edgepair_outer_target = edge_idxs_target[OTHER_INDEX[target_matching_edge]]
if edge_idxs_source[source_matching_edge] == edge_idxs_target[target_matching_edge]:
iA= 0; iB= 1 # Flipped, most common
else: # The normals of these faces must be different
iA= 1; iB= 0
# Set the target UV's touching source face, no tricky calc needed,
uvs_vhash_target[edgepair_inner_target[0]][:] = uvs_vhash_source[edgepair_inner_source[iA]]
uvs_vhash_target[edgepair_inner_target[1]][:] = uvs_vhash_source[edgepair_inner_source[iB]]
# Set the 2 UV's on the target face that are not touching
# for this we need to do basic expaning on the source faces UV's
if EXTEND_MODE == 2:
try: # divide by zero is possible
'''
measure the length of each face from the middle of each edge to the opposite
allong the axis we are copying, use this
'''
i1a= edgepair_outer_target[iB]
i2a= edgepair_inner_target[iA]
if i1a>i2a: i1a, i2a = i2a, i1a
i1b= edgepair_outer_source[iB]
i2b= edgepair_inner_source[iA]
if i1b>i2b: i1b, i2b = i2b, i1b
# print edge_average_lengths
factor = edge_average_lengths[i1a, i2a][0] / edge_average_lengths[i1b, i2b][0]
except:
# Div By Zero?
factor = 1.0
uvs_vhash_target[edgepair_outer_target[iB]][:] = uvs_vhash_source[edgepair_inner_source[0]] +factor * (uvs_vhash_source[edgepair_inner_source[0]] - uvs_vhash_source[edgepair_outer_source[1]])
uvs_vhash_target[edgepair_outer_target[iA]][:] = uvs_vhash_source[edgepair_inner_source[1]] +factor * (uvs_vhash_source[edgepair_inner_source[1]] - uvs_vhash_source[edgepair_outer_source[0]])
else:
# same as above but with no factor
uvs_vhash_target[edgepair_outer_target[iB]][:] = uvs_vhash_source[edgepair_inner_source[0]] + (uvs_vhash_source[edgepair_inner_source[0]] - uvs_vhash_source[edgepair_outer_source[1]])
uvs_vhash_target[edgepair_outer_target[iA]][:] = uvs_vhash_source[edgepair_inner_source[1]] + (uvs_vhash_source[edgepair_inner_source[1]] - uvs_vhash_source[edgepair_outer_source[0]])
if not me.faceUV:
me.faceUV= True
face_act = me.activeFace
if face_act == -1:
Draw.PupMenu('ERROR: No active face')
return
face_sel= [f for f in me.faces if len(f) == 4 and f.sel]
face_act_local_index = -1
for i, f in enumerate(face_sel):
if f.index == face_act:
face_act_local_index = i
break
if face_act_local_index == -1:
Draw.PupMenu('ERROR: Active face not selected')
return
# Modes
# 0 unsearched
# 1:mapped, use search from this face. - removed!!
# 2:all siblings have been searched. dont search again.
face_modes = [0] * len(face_sel)
face_modes[face_act_local_index] = 1 # extend UV's from this face.
# Edge connectivty
edge_faces = {}
for i, f in enumerate(face_sel):
for edkey in f.edge_keys:
try: edge_faces[edkey].append(i)
except: edge_faces[edkey] = [i]
SEAM = Mesh.EdgeFlags.SEAM
if EXTEND_MODE == 2:
edge_loops = BPyMesh.getFaceLoopEdges(face_sel, [ed.key for ed in me.edges if ed.flag & SEAM] )
me_verts = me.verts
for loop in edge_loops:
looplen = [0.0]
for ed in loop:
edge_average_lengths[ed] = looplen
looplen[0] += (me_verts[ed[0]].co - me_verts[ed[1]].co).length
looplen[0] = looplen[0] / len(loop)
# remove seams, so we dont map accross seams.
for ed in me.edges:
if ed.flag & SEAM:
# remove the edge pair if we can
try: del edge_faces[ed.key]
except: pass
# Done finding seams
# face connectivity - faces around each face
# only store a list of indicies for each face.
face_faces = [[] for i in xrange(len(face_sel))]
for edge_key, faces in edge_faces.iteritems():
if len(faces) == 2: # Only do edges with 2 face users for now
face_faces[faces[0]].append((faces[1], edge_key))
face_faces[faces[1]].append((faces[0], edge_key))
# Now we know what face is connected to what other face, map them by connectivity
ok = True
while ok:
ok = False
for i in xrange(len(face_sel)):
if face_modes[i] == 1: # searchable
for f_sibling, edge_key in face_faces[i]:
if face_modes[f_sibling] == 0:
face_modes[f_sibling] = 1 # mapped and search from.
extend_uvs(face_sel[i], face_sel[f_sibling], edge_key)
face_modes[i] = 1 # we can map from this one now.
ok= True # keep searching
face_modes[i] = 2 # dont search again
print sys.time() - t
if is_editmode:
Window.EditMode(1)
else:
me.update()
Window.RedrawAll()
Window.WaitCursor(0)
def main():
sce = bpy.data.scenes.active
ob = sce.objects.active
# print ob, ob.type
if ob == None or ob.type != 'Mesh':
Draw.PupMenu('ERROR: No mesh object.')
return
# 0:normal extend, 1:edge length
EXTEND_MODE = Draw.PupMenu("Use Face Area%t|Loop Average%x2|None%x0")
extend(EXTEND_MODE,ob)
if __name__ == '__main__':
main()
| 31.313725
| 195
| 0.707201
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,750
| 0.344396
|
83df6ece272b6dd9b07c901d59a3ab3e529c228e
| 1,196
|
py
|
Python
|
bloom/editor/ror_constants.py
|
thomasrogers03/bloom
|
5d49c18a241216aca354aa79971940691e6f33b4
|
[
"Apache-2.0"
] | 9
|
2020-11-22T03:04:52.000Z
|
2022-01-17T15:36:25.000Z
|
bloom/editor/ror_constants.py
|
thomasrogers03/bloom
|
5d49c18a241216aca354aa79971940691e6f33b4
|
[
"Apache-2.0"
] | null | null | null |
bloom/editor/ror_constants.py
|
thomasrogers03/bloom
|
5d49c18a241216aca354aa79971940691e6f33b4
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Thomas Rogers
# SPDX-License-Identifier: Apache-2.0
LOWER_LINK_TAG = 6
UPPER_LINK_TAG = 7
UPPER_WATER_TAG = 9
LOWER_WATER_TAG = 10
UPPER_STACK_TAG = 11
LOWER_STACK_TAG = 12
UPPER_GOO_TAG = 13
LOWER_GOO_TAG = 14
LOWER_LINK_TYPES = {LOWER_LINK_TAG, LOWER_WATER_TAG, LOWER_STACK_TAG, LOWER_GOO_TAG}
UPPER_LINK_TYPES = {UPPER_LINK_TAG, UPPER_WATER_TAG, UPPER_STACK_TAG, UPPER_GOO_TAG}
ROR_TYPE_LINK = "Link"
ROR_TYPE_STACK = "Stack"
ROR_TYPE_WATER = "Water"
ROR_TYPE_GOO = "Goo"
UPPER_TAG_MAPPING = {
ROR_TYPE_LINK: UPPER_LINK_TAG,
ROR_TYPE_STACK: UPPER_STACK_TAG,
ROR_TYPE_WATER: UPPER_WATER_TAG,
ROR_TYPE_GOO: UPPER_GOO_TAG,
}
UPPER_TAG_REVERSE_MAPPING = {
UPPER_LINK_TAG: ROR_TYPE_LINK,
UPPER_STACK_TAG: ROR_TYPE_STACK,
UPPER_WATER_TAG: ROR_TYPE_WATER,
UPPER_GOO_TAG: ROR_TYPE_GOO,
}
LOWER_TAG_MAPPING = {
ROR_TYPE_LINK: LOWER_LINK_TAG,
ROR_TYPE_STACK: LOWER_STACK_TAG,
ROR_TYPE_WATER: LOWER_WATER_TAG,
ROR_TYPE_GOO: LOWER_GOO_TAG,
}
ROR_TILE_MAPPING = {
ROR_TYPE_LINK: 504,
ROR_TYPE_STACK: 504,
ROR_TYPE_WATER: 2915,
ROR_TYPE_GOO: 1120,
}
ROR_TYPES_WITH_WATER = {
ROR_TYPE_WATER,
ROR_TYPE_GOO,
}
| 21.357143
| 84
| 0.76505
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 92
| 0.076923
|
83e3a8eb149951bf1ec4846a449c1ac8b36faf3a
| 6,107
|
py
|
Python
|
tests/validation/tests/v3_api/test_sbx_custom_filter.py
|
sambabox/rancher
|
ccb6b40e5c8bb183dbe20f5a099513eb623ed806
|
[
"Apache-2.0"
] | null | null | null |
tests/validation/tests/v3_api/test_sbx_custom_filter.py
|
sambabox/rancher
|
ccb6b40e5c8bb183dbe20f5a099513eb623ed806
|
[
"Apache-2.0"
] | null | null | null |
tests/validation/tests/v3_api/test_sbx_custom_filter.py
|
sambabox/rancher
|
ccb6b40e5c8bb183dbe20f5a099513eb623ed806
|
[
"Apache-2.0"
] | null | null | null |
from .common import * # NOQA
import requests
AUTH_PROVIDER = os.environ.get('RANCHER_AUTH_PROVIDER', "")
'''
Prerequisite:
Enable SBX without TLS, and using testuser1 as admin user.
Description:
In this test, we are testing the customized user and group search filter
functionalities.
1) For customized user search filter:
The filter looks like:
(&(objectClass=person)(|(sAMAccountName=test*)(sn=test*)(givenName=test*))
[user customized filter])
Here, after we add
userSearchFilter = (memberOf=CN=testgroup5,CN=Users,DC=tad,DC=rancher,DC=io)
we will filter out only testuser40 and testuser41, otherwise, all users start
with search keyword "testuser" will be listed out.
2) For customized group search filter:
The filter looks like:
(&(objectClass=group)(sAMAccountName=test)[group customized filter])
Here, after we add groupSearchFilter = (cn=testgroup2)
we will filter out only testgroup2, otherwise, all groups has search
keyword "testgroup" will be listed out.
'''
# Config Fields
HOSTNAME_OR_IP_ADDRESS = os.environ.get("RANCHER_HOSTNAME_OR_IP_ADDRESS")
PORT = os.environ.get("RANCHER_PORT")
CONNECTION_TIMEOUT = os.environ.get("RANCHER_CONNECTION_TIMEOUT")
SERVICE_ACCOUNT_NAME = os.environ.get("RANCHER_SERVICE_ACCOUNT_NAME")
SERVICE_ACCOUNT_PASSWORD = os.environ.get("RANCHER_SERVICE_ACCOUNT_PASSWORD")
DEFAULT_LOGIN_DOMAIN = os.environ.get("RANCHER_DEFAULT_LOGIN_DOMAIN")
USER_SEARCH_BASE = os.environ.get("RANCHER_USER_SEARCH_BASE")
GROUP_SEARCH_BASE = os.environ.get("RANCHER_GROUP_SEARCH_BASE")
PASSWORD = os.environ.get('RANCHER_USER_PASSWORD', "")
CATTLE_AUTH_URL = \
CATTLE_TEST_URL + \
"/v3-public/"+AUTH_PROVIDER+"Providers/" + \
AUTH_PROVIDER.lower()+"?action=login"
CATTLE_AUTH_PROVIDER_URL = \
CATTLE_TEST_URL + "/v3/"+AUTH_PROVIDER+"Configs/"+AUTH_PROVIDER.lower()
CATTLE_AUTH_PRINCIPAL_URL = CATTLE_TEST_URL + "/v3/principals?action=search"
CATTLE_AUTH_ENABLE_URL = CATTLE_AUTH_PROVIDER_URL + "?action=testAndApply"
CATTLE_AUTH_DISABLE_URL = CATTLE_AUTH_PROVIDER_URL + "?action=disable"
def test_custom_user_and_group_filter_for_SBX():
disable_sbx("testuser1", ADMIN_TOKEN)
enable_sbx_with_customized_filter(
"testuser1",
"(memberOf=CN=testgroup5,CN=Users,DC=tad,DC=rancher,DC=io)",
"", ADMIN_TOKEN)
search_sbx_users("testuser", ADMIN_TOKEN)
disable_sbx("testuser1", ADMIN_TOKEN)
enable_sbx_with_customized_filter(
"testuser1", "", "(cn=testgroup2)", ADMIN_TOKEN)
search_sbx_groups("testgroup", ADMIN_TOKEN)
def disable_sbx(username, token, expected_status=200):
headers = {'Authorization': 'Bearer ' + token}
r = requests.post(CATTLE_AUTH_DISABLE_URL, json={
"enabled": False,
"username": username,
"password": PASSWORD
}, verify=False, headers=headers)
assert r.status_code == expected_status
print("Disable SambaBox request for " +
username + " " + str(expected_status))
def enable_sbx_with_customized_filter(username, usersearchfilter,
groupsearchfilter, token,
expected_status=200):
headers = {'Authorization': 'Bearer ' + token}
sambaBoxConfig = {
"accessMode": "unrestricted",
"userSearchFilter": usersearchfilter,
"groupSearchFilter": groupsearchfilter,
"connectionTimeout": CONNECTION_TIMEOUT,
"defaultLoginDomain": DEFAULT_LOGIN_DOMAIN,
"groupDNAttribute": "distinguishedName",
"groupMemberMappingAttribute": "member",
"groupMemberUserAttribute": "distinguishedName",
"groupNameAttribute": "name",
"groupObjectClass": "group",
"groupSearchAttribute": "sAMAccountName",
"nestedGroupMembershipEnabled": False,
"port": PORT,
"servers": [
HOSTNAME_OR_IP_ADDRESS
],
"serviceAccountUsername": SERVICE_ACCOUNT_NAME,
"userDisabledBitMask": 2,
"userEnabledAttribute": "userAccountControl",
"userLoginAttribute": "sAMAccountName",
"userNameAttribute": "name",
"userObjectClass": "person",
"userSearchAttribute": "sAMAccountName|sn|givenName",
"userSearchBase": USER_SEARCH_BASE,
"serviceAccountPassword": SERVICE_ACCOUNT_PASSWORD
}
r = requests.post(CATTLE_AUTH_ENABLE_URL, json={
"sambaBoxConfig": sambaBoxConfig,
"enabled": True,
"username": username,
"password": PASSWORD
}, verify=False, headers=headers)
assert r.status_code == expected_status
print("Enable SambaBox request for " +
username + " " + str(expected_status))
def search_sbx_users(searchkey, token, expected_status=200):
headers = {'Authorization': 'Bearer ' + token}
r = requests.post(CATTLE_AUTH_PRINCIPAL_URL,
json={'name': searchkey, 'principalType': 'user',
'responseType': 'json'},
verify=False, headers=headers)
assert r.status_code == expected_status
if r.status_code == 200:
print(r.json())
data = r.json()['data']
print(data)
assert len(data) == 2
print(data)
assert \
data[0].get('id') == \
"sambabox_user://CN=test user40," \
"CN=Users,DC=tad,DC=rancher,DC=io"
assert \
data[1].get('id') == \
"sambabox_user://CN=test user41," \
"CN=Users,DC=tad,DC=rancher,DC=io"
def search_sbx_groups(searchkey, token, expected_status=200):
headers = {'Authorization': 'Bearer ' + token}
r = requests.post(CATTLE_AUTH_PRINCIPAL_URL,
json={'name': searchkey, 'principalType': 'group',
'responseType': 'json'},
verify=False, headers=headers)
assert r.status_code == expected_status
if r.status_code == 200:
data = r.json()['data']
assert len(data) == 1
assert \
data[0].get('id') == \
"sambabox_group://CN=testgroup2," \
"CN=Users,DC=tad,DC=rancher,DC=io"
| 37.012121
| 77
| 0.667267
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,604
| 0.426396
|
83e3b262a987de45abbd2e106414db47c397b8e3
| 7,738
|
py
|
Python
|
CIM14/ENTSOE/Dynamics/IEC61970/Dynamics/DynamicsMetaBlock.py
|
MaximeBaudette/PyCIM
|
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
|
[
"MIT"
] | 58
|
2015-04-22T10:41:03.000Z
|
2022-03-29T16:04:34.000Z
|
CIM14/ENTSOE/Dynamics/IEC61970/Dynamics/DynamicsMetaBlock.py
|
MaximeBaudette/PyCIM
|
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
|
[
"MIT"
] | 12
|
2015-08-26T03:57:23.000Z
|
2020-12-11T20:14:42.000Z
|
CIM14/ENTSOE/Dynamics/IEC61970/Dynamics/DynamicsMetaBlock.py
|
MaximeBaudette/PyCIM
|
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
|
[
"MIT"
] | 35
|
2015-01-10T12:21:03.000Z
|
2020-09-09T08:18:16.000Z
|
# Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM14.ENTSOE.Dynamics.IEC61970.Core.CoreIdentifiedObject import CoreIdentifiedObject
class DynamicsMetaBlock(CoreIdentifiedObject):
def __init__(self, blockKind="", proprietary=False, Block=None, MetaBlockSignal=None, MetaBlockReference=None, MetaBlockParameter=None, MetaBlockOutput=None, MetaBlockInput=None, MetaBlockState=None, *args, **kw_args):
"""Initialises a new 'DynamicsMetaBlock' instance.
@param blockKind:
@param proprietary:
@param Block:
@param MetaBlockSignal:
@param MetaBlockReference:
@param MetaBlockParameter:
@param MetaBlockOutput:
@param MetaBlockInput:
@param MetaBlockState:
"""
self.blockKind = blockKind
self.proprietary = proprietary
self._Block = []
self.Block = [] if Block is None else Block
self._MetaBlockSignal = []
self.MetaBlockSignal = [] if MetaBlockSignal is None else MetaBlockSignal
self._MetaBlockReference = []
self.MetaBlockReference = [] if MetaBlockReference is None else MetaBlockReference
self._MetaBlockParameter = []
self.MetaBlockParameter = [] if MetaBlockParameter is None else MetaBlockParameter
self._MetaBlockOutput = []
self.MetaBlockOutput = [] if MetaBlockOutput is None else MetaBlockOutput
self._MetaBlockInput = []
self.MetaBlockInput = [] if MetaBlockInput is None else MetaBlockInput
self._MetaBlockState = []
self.MetaBlockState = [] if MetaBlockState is None else MetaBlockState
super(DynamicsMetaBlock, self).__init__(*args, **kw_args)
_attrs = ["blockKind", "proprietary"]
_attr_types = {"blockKind": str, "proprietary": bool}
_defaults = {"blockKind": "", "proprietary": False}
_enums = {"blockKind": "DynamicsBlockKind"}
_refs = ["Block", "MetaBlockSignal", "MetaBlockReference", "MetaBlockParameter", "MetaBlockOutput", "MetaBlockInput", "MetaBlockState"]
_many_refs = ["Block", "MetaBlockSignal", "MetaBlockReference", "MetaBlockParameter", "MetaBlockOutput", "MetaBlockInput", "MetaBlockState"]
def getBlock(self):
"""
"""
return self._Block
def setBlock(self, value):
for x in self._Block:
x.MetaBlock = None
for y in value:
y._MetaBlock = self
self._Block = value
Block = property(getBlock, setBlock)
def addBlock(self, *Block):
for obj in Block:
obj.MetaBlock = self
def removeBlock(self, *Block):
for obj in Block:
obj.MetaBlock = None
def getMetaBlockSignal(self):
"""
"""
return self._MetaBlockSignal
def setMetaBlockSignal(self, value):
for x in self._MetaBlockSignal:
x.MemberOf_MetaBlock = None
for y in value:
y._MemberOf_MetaBlock = self
self._MetaBlockSignal = value
MetaBlockSignal = property(getMetaBlockSignal, setMetaBlockSignal)
def addMetaBlockSignal(self, *MetaBlockSignal):
for obj in MetaBlockSignal:
obj.MemberOf_MetaBlock = self
def removeMetaBlockSignal(self, *MetaBlockSignal):
for obj in MetaBlockSignal:
obj.MemberOf_MetaBlock = None
def getMetaBlockReference(self):
"""
"""
return self._MetaBlockReference
def setMetaBlockReference(self, value):
for x in self._MetaBlockReference:
x.MemberOf_MetaBlock = None
for y in value:
y._MemberOf_MetaBlock = self
self._MetaBlockReference = value
MetaBlockReference = property(getMetaBlockReference, setMetaBlockReference)
def addMetaBlockReference(self, *MetaBlockReference):
for obj in MetaBlockReference:
obj.MemberOf_MetaBlock = self
def removeMetaBlockReference(self, *MetaBlockReference):
for obj in MetaBlockReference:
obj.MemberOf_MetaBlock = None
def getMetaBlockParameter(self):
"""
"""
return self._MetaBlockParameter
def setMetaBlockParameter(self, value):
for x in self._MetaBlockParameter:
x.MemberOf_MetaBlock = None
for y in value:
y._MemberOf_MetaBlock = self
self._MetaBlockParameter = value
MetaBlockParameter = property(getMetaBlockParameter, setMetaBlockParameter)
def addMetaBlockParameter(self, *MetaBlockParameter):
for obj in MetaBlockParameter:
obj.MemberOf_MetaBlock = self
def removeMetaBlockParameter(self, *MetaBlockParameter):
for obj in MetaBlockParameter:
obj.MemberOf_MetaBlock = None
def getMetaBlockOutput(self):
"""
"""
return self._MetaBlockOutput
def setMetaBlockOutput(self, value):
for x in self._MetaBlockOutput:
x.MemberOf_MetaBlock = None
for y in value:
y._MemberOf_MetaBlock = self
self._MetaBlockOutput = value
MetaBlockOutput = property(getMetaBlockOutput, setMetaBlockOutput)
def addMetaBlockOutput(self, *MetaBlockOutput):
for obj in MetaBlockOutput:
obj.MemberOf_MetaBlock = self
def removeMetaBlockOutput(self, *MetaBlockOutput):
for obj in MetaBlockOutput:
obj.MemberOf_MetaBlock = None
def getMetaBlockInput(self):
"""
"""
return self._MetaBlockInput
def setMetaBlockInput(self, value):
for x in self._MetaBlockInput:
x.MemberOf_MetaBlock = None
for y in value:
y._MemberOf_MetaBlock = self
self._MetaBlockInput = value
MetaBlockInput = property(getMetaBlockInput, setMetaBlockInput)
def addMetaBlockInput(self, *MetaBlockInput):
for obj in MetaBlockInput:
obj.MemberOf_MetaBlock = self
def removeMetaBlockInput(self, *MetaBlockInput):
for obj in MetaBlockInput:
obj.MemberOf_MetaBlock = None
def getMetaBlockState(self):
"""
"""
return self._MetaBlockState
def setMetaBlockState(self, value):
for x in self._MetaBlockState:
x.MemberOf_MetaBlock = None
for y in value:
y._MemberOf_MetaBlock = self
self._MetaBlockState = value
MetaBlockState = property(getMetaBlockState, setMetaBlockState)
def addMetaBlockState(self, *MetaBlockState):
for obj in MetaBlockState:
obj.MemberOf_MetaBlock = self
def removeMetaBlockState(self, *MetaBlockState):
for obj in MetaBlockState:
obj.MemberOf_MetaBlock = None
| 33.938596
| 222
| 0.674334
| 6,545
| 0.845826
| 0
| 0
| 0
| 0
| 0
| 0
| 1,861
| 0.240501
|
83e3deec67e89aa7e42ab0f38a20a3246b563ad9
| 1,551
|
py
|
Python
|
official/cv/ADNet/export_model.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 77
|
2021-10-15T08:32:37.000Z
|
2022-03-30T13:09:11.000Z
|
official/cv/ADNet/export_model.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 3
|
2021-10-30T14:44:57.000Z
|
2022-02-14T06:57:57.000Z
|
official/cv/ADNet/export_model.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 24
|
2021-10-15T08:32:45.000Z
|
2022-03-24T18:45:20.000Z
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import argparse
import numpy as np
from src.options.general import opts
from src.models.ADNet import adnet
from mindspore import Tensor, export, context
parser = argparse.ArgumentParser(
description='ADNet test')
parser.add_argument('--weight_file', default='', type=str, help='The pretrained weight file')
parser.add_argument('--device_target', type=str, default="Ascend", choices=['Ascend', 'GPU', 'CPU'])
parser.add_argument('--target_device', type=int, default=0)
args = parser.parse_args()
context.set_context(device_target=args.device_target, mode=context.PYNATIVE_MODE, device_id=args.target_device)
opts['num_videos'] = 1
net, domain_specific_nets = adnet(opts, trained_file=args.weight_file)
input_ = np.random.uniform(0.0, 1.0, size=[128, 3, 112, 112]).astype(np.float32)
export(net, Tensor(input_), file_name='ADNet', file_format='MINDIR')
print('export finished')
| 43.083333
| 111
| 0.728562
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 814
| 0.524823
|
83e465c1f4e10369e60b79f24679537b6a23af68
| 189
|
py
|
Python
|
pyradex/tests/setup_package_data.py
|
SpacialTree/pyradex
|
722f9fdc45ff080cdcb151e37aa7075fab548f68
|
[
"BSD-3-Clause"
] | 12
|
2016-01-26T13:39:56.000Z
|
2021-09-01T07:38:04.000Z
|
pyradex/tests/setup_package_data.py
|
SpacialTree/pyradex
|
722f9fdc45ff080cdcb151e37aa7075fab548f68
|
[
"BSD-3-Clause"
] | 27
|
2015-05-29T16:01:31.000Z
|
2022-01-31T23:41:36.000Z
|
pyradex/tests/setup_package_data.py
|
SpacialTree/pyradex
|
722f9fdc45ff080cdcb151e37aa7075fab548f68
|
[
"BSD-3-Clause"
] | 13
|
2015-01-13T10:40:50.000Z
|
2022-01-25T22:24:46.000Z
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import os
def get_package_data():
paths_test = [os.path.join('data', '*.out')]
return {'pyradex.tests': paths_test}
| 27
| 63
| 0.693122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 91
| 0.481481
|
83e5340e1845145c339f0d7b935ed161bcb52088
| 566
|
py
|
Python
|
ipaqe_provision_hosts/backend/loader.py
|
apophys/idm-prepare-hosts
|
8075600cab44a1b0c4dbe6fe14a8235725eb06d1
|
[
"MIT"
] | 1
|
2017-04-04T14:35:57.000Z
|
2017-04-04T14:35:57.000Z
|
ipaqe_provision_hosts/backend/loader.py
|
apophys/idm-prepare-hosts
|
8075600cab44a1b0c4dbe6fe14a8235725eb06d1
|
[
"MIT"
] | null | null | null |
ipaqe_provision_hosts/backend/loader.py
|
apophys/idm-prepare-hosts
|
8075600cab44a1b0c4dbe6fe14a8235725eb06d1
|
[
"MIT"
] | null | null | null |
# Author: Milan Kubik, 2017
"""Backend entry point manipulation"""
import logging
from pkg_resources import iter_entry_points
RESOURCE_GROUP = "ipaqe_provision_hosts.backends"
log = logging.getLogger(__name__)
def load_backends(exclude=()):
"""Load all registered modules"""
log.debug("Loading entry points from %s.", RESOURCE_GROUP)
entry_points = {
ep.name: ep.load() for ep in iter_entry_points(RESOURCE_GROUP)
if ep.name not in exclude
}
log.debug("Loaded entry points: %s", entry_points.keys())
return entry_points
| 25.727273
| 70
| 0.717314
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 186
| 0.328622
|
83e5b68657474d465d5e1fcc4797976830c9d62f
| 100
|
py
|
Python
|
cfpland_bot/exceptions/__init__.py
|
jonatasbaldin/cfpland-telegram-bot
|
fdd846240705ff6ce7705413336f6d7169a2e7fc
|
[
"MIT"
] | 3
|
2019-04-23T14:16:11.000Z
|
2019-04-24T06:21:10.000Z
|
cfpland_bot/exceptions/__init__.py
|
jonatasbaldin/cfpland-telegram-bot
|
fdd846240705ff6ce7705413336f6d7169a2e7fc
|
[
"MIT"
] | 2
|
2020-07-17T14:53:16.000Z
|
2021-05-09T21:42:43.000Z
|
cfpland_bot/exceptions/__init__.py
|
jonatasbaldin/cfpland-telegram-bot
|
fdd846240705ff6ce7705413336f6d7169a2e7fc
|
[
"MIT"
] | null | null | null |
from .exceptions import ( # noqa: F401
MissingCFPAttributes,
MissingEnvironmentVariable,
)
| 20
| 39
| 0.74
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 12
| 0.12
|
83e738fd60db75ae5d34cea420004504804a6032
| 8,309
|
py
|
Python
|
main_tmp.py
|
tiffanydho/chip2probe
|
2c7e00796e048d39ad4da85b90bf76d021c6be1c
|
[
"MIT"
] | null | null | null |
main_tmp.py
|
tiffanydho/chip2probe
|
2c7e00796e048d39ad4da85b90bf76d021c6be1c
|
[
"MIT"
] | null | null | null |
main_tmp.py
|
tiffanydho/chip2probe
|
2c7e00796e048d39ad4da85b90bf76d021c6be1c
|
[
"MIT"
] | null | null | null |
import urllib.request
import os
import subprocess
import pandas as pd
from tqdm import tqdm
import sys
sys.path.append("probefilter")
sys.path.append("probefilter/libsvm-3.23/python")
from sitesfinder.imads import iMADS
from sitesfinder.imadsmodel import iMADSModel
from sitesfinder.plotcombiner import PlotCombiner
from sitesfinder.pbmescore import PBMEscore
from sitesfinder.sequence import Sequence
from sitesfinder.prediction.basepred import BasePrediction
from cooperative import coopfilter
'''
Summarize
lab-archive -> note the result
information about the data in the plot
'''
chipname = "ets1_GM12878"
chipurls = {
"r1":"https://www.encodeproject.org/files/ENCFF477EHC/@@download/ENCFF477EHC.bam",
"r2":"https://www.encodeproject.org/files/ENCFF371ZBY/@@download/ENCFF371ZBY.bam",
"c1":"https://www.encodeproject.org/files/ENCFF963CVB/@@download/ENCFF963CVB.bam",
"c2":""
}
tagsize = 36
#bedpath = "/data/gordanlab/vincentius/cooperative_probe/hg19_0005_Ets1.bed"
bedpath = "/Users/vincentiusmartin/Research/chip2gcPBM/resources/imads_preds/predictions/hg19_0005_Ets1_filtered.bed"
# Analysis directory
escore_short_path = "/Users/vincentiusmartin/Research/chip2gcPBM/resources/escores/ets1_escores.txt"
escore_map_path = "/Users/vincentiusmartin/Research/chip2gcPBM/resources/escores/index_short_to_long.csv"
# for iMADS, must specify cores and model files
modelcores = ["GGAA", "GGAT"]
modelpaths = ["/Users/vincentiusmartin/Research/chip2gcPBM/resources/imads_preds/models/ets1/ETS1_100nM_Bound_filtered_normalized_transformed_20bp_GGAA_1a2a3mer_format.model",
"/Users/vincentiusmartin/Research/chip2gcPBM/resources/imads_preds/models/ets1/ETS1_100nM_Bound_filtered_normalized_transformed_20bp_GGAT_1a2a3mer_format.model"]
modelwidth = 20 # TODO: confirm if we can get length without manually specifying it
imads_cutoff = 0.2128
model_kmers = [1,2,3]
escore_cutoff = 0.4
# ============================
outdir = "../result/%s" % chipname
# From https://stackoverflow.com/questions/15644964/python-progress-bar-and-downloads
class DownloadProgressBar(tqdm):
def update_to(self, b=1, bsize=1, tsize=None):
if tsize is not None:
self.total = tsize
self.update(b * bsize - self.n)
def download_url(url, output_path):
with DownloadProgressBar(unit='B', unit_scale=True,
miniters=1, desc=url.split('/')[-1]) as t:
urllib.request.urlretrieve(url, filename=output_path, reporthook=t.update_to)
if __name__=="__main__":
if not os.path.exists(outdir):
os.makedirs(outdir)
chipdata_path = "%s/chipseq_data" % (outdir)
if not os.path.exists(chipdata_path):
os.makedirs(chipdata_path)
chipdata = {}
chip_info = "ChIP-seq data for %s:\n" % chipname
# ===== Download ChIP-seq data =====
for key in chipurls:
fname = os.path.basename(chipurls[key])
saveto = os.path.join(chipdata_path, fname)
chipdata[key] = saveto
chip_info += "%s: %s\n" % (key,fname)
print("Downloading %s to %s:" % (key,saveto))
#download_url(chipurls[key], saveto)
with open("%s/chipinfo.txt" % (outdir), 'w') as f:
f.write(chip_info)
macs_result_path = "%s/macs_result" % (outdir)
if not os.path.exists(macs_result_path):
os.makedirs(macs_result_path)
print("Running macs...")
subprocess.call(["./macs2.sh",chipdata["r1"],chipdata["r2"],chipdata["c1"],chipdata["c2"],"%s/%s" % (macs_result_path,chipname), str(tagsize)],shell=False)
print("Finished running macs, results are saved in %s" % macs_result_path)
idr_result_path = "%s/idr_result" % (outdir)
if not os.path.exists(idr_result_path):
os.makedirs(idr_result_path)
print("Running idrs...")
subprocess.call(["./idr.sh","%s/%s" % (macs_result_path,chipname),idr_result_path],shell=False)
analysis_result_path = "%s/analysis_result" % (outdir)
if not os.path.exists(analysis_result_path):
os.makedirs(analysis_result_path)
print("Running analysis...")
pwd = os.path.dirname(os.path.realpath(__file__))
pu1_path = "%s/%s%s" % (macs_result_path,chipname,"_r1_treat_pileup.bdg")
pu2_path = "%s/%s%s" % (macs_result_path,chipname,"_r2_treat_pileup.bdg")
pu_both_path = "%s/%s%s" % (macs_result_path,chipname,"_bothrs_treat_pileup.bdg")
nrwp_preidr_path = "%s/%s%s" % (macs_result_path,chipname,"_bothrs_peaks.narrowPeak")
nrwp_postidr_path = "%s/%s" % (idr_result_path,"idr_001p_wlist.005i")
args_rscript = [pu1_path, pu2_path, pu_both_path, nrwp_preidr_path, nrwp_postidr_path, bedpath, analysis_result_path, chipname]
#print(["R_analysis/main.R",pwd] + args_rscript)
#subprocess.call(["srun","Rscript","R_analysis/main.R",pwd] + args_rscript,shell=False)
subprocess.call(["Rscript","R_analysis/main.R",pwd] + args_rscript,shell=False)
# ============== PLOT AND FILTERING PART ==============
# First, we can just load the models to avoid having to reload this on every iteration
models = [iMADSModel(modelpath, modelcore, modelwidth, model_kmers) for modelpath, modelcore in zip(modelpaths, modelcores)]
imads = iMADS(models, imads_cutoff) # 0.2128 is for the ETS1 cutoff
escore = PBMEscore(escore_short_path, escore_map_path)
sitelist_path = "%s/%s" % (analysis_result_path, "sitefiles_list.txt")
with open(sitelist_path, 'r') as f:
sitelist = [line.strip() for line in f.readlines()]
for sitepath in sitelist:
print(sitepath)
filename = os.path.basename(os.path.splitext(sitepath)[0])
print("Making sites plot for %s" % filename)
seqdf = pd.read_csv(sitepath, sep='\t')
# Make Escore object
es_preds = escore.predict_sequences(seqdf)
eplots = escore.plot(es_preds)
# Make iMADS plot
imads_preds = imads.predict_sequences(seqdf)
imadsplots = imads.plot(imads_preds)
plots = [imadsplots, eplots]
pc = PlotCombiner() # can do this just once but not a big deal
plotpath = "%s/sitesplot_%s.pdf" % (analysis_result_path, filename)
pc.plot_seq_combine(plots, filepath=plotpath)
filtered_sites = {}
print("Site filtering...")
for key in es_preds:
bs = Sequence(es_preds[key],imads_preds[key])
if bs.site_count() == 2:
filtered_sites[key] = bs
#site_list = [{**{"key":site, "sequence":es_preds[site].sequence},**filtered_sites[site].get_sites_dict()} for site in filtered_sites]
#columns = ["key", "site_start_1", "site_start_2", "site_end_1", "site_end_2", "site_pos_1", "site_pos_2", "imads_score_1", "imads_score_2", "sequence"]
#pd.DataFrame(site_list).to_csv("%s/sitelist_%s.pdf" % (analysis_result_path), index=False, columns=columns, float_format='%.4f')
seqdict = {}
funcdict = {}
filtered_probes = []
# TODO: tmr look at 110,271
for key in filtered_sites:
#for key in ["sequence11"]:
# Visualization part
seqdict["%s-wt" % key] = filtered_sites[key].sequence
for idx,mut in enumerate([[0],[1],[0,1]]):
mutseq = filtered_sites[key].abolish_sites(mut,escore)
seqdict["%s-m%d" % (key,idx + 1)] = mutseq.sequence
funcdict["%s-m%d" % (key,idx + 1)] = mutseq.plot_functions
if coopfilter.filter_coopseq(seqdict["%s-wt"%key], seqdict["%s-m1"%key],
seqdict["%s-m2"%key], seqdict["%s-m3"%key],
filtered_sites[key].get_sites_dict(), escore):
filtered_probes.append({"key":key, "wt":seqdict["%s-wt"%key], "m1":seqdict["%s-m1"%key],
"m2":seqdict["%s-m2"%key], "m3":seqdict["%s-m3"%key]})
pp = escore.plot(escore.predict_sequences(seqdict),additional_functions=funcdict)
pc.plot_seq_combine([pp], filepath="%s/plot_mut_%s.pdf" % (analysis_result_path,filename))
# probably should check here if filtered_probes is empty
pd.DataFrame(filtered_probes).to_csv("%s/mutated_probes_%s.tsv" % (analysis_result_path,filename),sep="\t",index=False,columns=["key","wt","m1","m2","m3"])
#print(fname,header)
| 46.161111
| 175
| 0.672764
| 184
| 0.022145
| 0
| 0
| 0
| 0
| 0
| 0
| 3,136
| 0.377422
|
83e8b5d26b60139b83dc6ffd9717b442165e6180
| 145
|
py
|
Python
|
7 kyu/Complete The Pattern 2.py
|
mwk0408/codewars_solutions
|
9b4f502b5f159e68024d494e19a96a226acad5e5
|
[
"MIT"
] | 6
|
2020-09-03T09:32:25.000Z
|
2020-12-07T04:10:01.000Z
|
7 kyu/Complete The Pattern 2.py
|
mwk0408/codewars_solutions
|
9b4f502b5f159e68024d494e19a96a226acad5e5
|
[
"MIT"
] | 1
|
2021-12-13T15:30:21.000Z
|
2021-12-13T15:30:21.000Z
|
7 kyu/Complete The Pattern 2.py
|
mwk0408/codewars_solutions
|
9b4f502b5f159e68024d494e19a96a226acad5e5
|
[
"MIT"
] | null | null | null |
def pattern(n):
res=""
for i in range(n,0,-1):
for j in range(i):
res+=str(n-j)
res+="\n"
return res[:-1]
| 20.714286
| 27
| 0.427586
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 6
| 0.041379
|
83e959ba13c92777006cc78510ef1401b37ed85b
| 633
|
py
|
Python
|
src/dagos/platform/__init__.py
|
DAG-OS/dagos
|
ac663ecf1cb9abe12669136e2b2e22b936ec88b5
|
[
"MIT"
] | null | null | null |
src/dagos/platform/__init__.py
|
DAG-OS/dagos
|
ac663ecf1cb9abe12669136e2b2e22b936ec88b5
|
[
"MIT"
] | 8
|
2022-02-20T15:43:03.000Z
|
2022-03-27T19:04:16.000Z
|
src/dagos/platform/__init__.py
|
DAG-OS/dagos
|
ac663ecf1cb9abe12669136e2b2e22b936ec88b5
|
[
"MIT"
] | null | null | null |
import dagos.platform.platform_utils as platform_utils
from .command_runner import CommandRunner
from .command_runner import ContainerCommandRunner
from .command_runner import LocalCommandRunner
from .platform_domain import CommandNotAvailableIssue
from .platform_domain import OperatingSystem
from .platform_domain import PlatformIssue
from .platform_domain import PlatformScope
from .platform_domain import UnsupportedOperatingSystemIssue
from .platform_exceptions import UnsupportedOperatingSystemException
from .platform_exceptions import UnsupportedPlatformException
from .platform_support_checker import PlatformSupportChecker
| 48.692308
| 68
| 0.903633
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
83eb304b78bbd24868418bb775b73ade9aefef43
| 1,593
|
py
|
Python
|
scripts/find_guids_without_referents.py
|
DanielSBrown/osf.io
|
98dda2ac237377197acacce78274bc0a4ce8f303
|
[
"Apache-2.0"
] | 1
|
2015-10-02T18:35:53.000Z
|
2015-10-02T18:35:53.000Z
|
scripts/find_guids_without_referents.py
|
DanielSBrown/osf.io
|
98dda2ac237377197acacce78274bc0a4ce8f303
|
[
"Apache-2.0"
] | 13
|
2020-03-24T15:29:41.000Z
|
2022-03-11T23:15:28.000Z
|
scripts/find_guids_without_referents.py
|
DanielSBrown/osf.io
|
98dda2ac237377197acacce78274bc0a4ce8f303
|
[
"Apache-2.0"
] | null | null | null |
"""Finds Guids that do not have referents or that point to referents that no longer exist.
E.g. a node was created and given a guid but an error caused the node to
get deleted, leaving behind a guid that points to nothing.
"""
import sys
from modularodm import Q
from framework.guid.model import Guid
from website.app import init_app
from scripts import utils as scripts_utils
import logging
logger = logging.getLogger(__name__)
def main():
if 'dry' not in sys.argv:
scripts_utils.add_file_logger(logger, __file__)
# Set up storage backends
init_app(routes=False)
logger.info('{n} invalid GUID objects found'.format(n=len(get_targets())))
logger.info('Finished.')
def get_targets():
"""Find GUIDs with no referents and GUIDs with referents that no longer exist."""
# Use a loop because querying MODM with Guid.find(Q('referent', 'eq', None))
# only catches the first case.
ret = []
# NodeFiles were once a GuidStored object and are no longer used any more.
# However, they still exist in the production database. We just skip over them
# for now, but they can probably need to be removed in the future.
# There were also 10 osfguidfile objects that lived in a corrupt repo that
# were not migrated to OSF storage, so we skip those as well. /sloria /jmcarp
for each in Guid.find(Q('referent.1', 'nin', ['nodefile', 'osfguidfile'])):
if each.referent is None:
logger.info('GUID {} has no referent.'.format(each._id))
ret.append(each)
return ret
if __name__ == '__main__':
main()
| 36.204545
| 90
| 0.702448
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 932
| 0.58506
|
83eb4550225e76cac1d76f96f09f214fbc122c76
| 13,836
|
py
|
Python
|
test/test_app.py
|
IoT-Partners/Platform
|
ecb17ca5e3e5cf447ecb48c22bfab36b102f01b0
|
[
"MIT"
] | null | null | null |
test/test_app.py
|
IoT-Partners/Platform
|
ecb17ca5e3e5cf447ecb48c22bfab36b102f01b0
|
[
"MIT"
] | null | null | null |
test/test_app.py
|
IoT-Partners/Platform
|
ecb17ca5e3e5cf447ecb48c22bfab36b102f01b0
|
[
"MIT"
] | null | null | null |
"""
This script is for testing/calling in several different ways
functions from QRColorChecker modules.
@author: Eduard Cespedes Borràs
@mail: eduard@iot-partners.com
"""
import unittest
import hashlib
import dateutil
from chalicelib.server import Server
import sys
import json
from datetime import datetime
sys.path.append('../chalicelib')
class AppTest(unittest.TestCase):
def setUp(self):
self.sns_client = TestSNS()
self.log = TestLog()
self.dynamodb_device_data = TestDynamoDB()
self.dynamodb_device = TestDynamoDB()
self.str_data = '{"DevEUI_uplink": {"Time": "2017-03-11T11:52:50.412+01:00","DevEUI": "0004A30B001C3306",' \
'"FPort": "7","FCntUp": "1","MType": "2","FCntDn": "2","payload_hex": "10bb17f18198100734",' \
'"mic_hex": "c00c1cfa","Lrcid": "00000127","LrrRSSI": "-64.000000","LrrSNR": "9.000000",' \
'"SpFact": "11","SubBand": "G1","Channel": "LC2","DevLrrCnt": "1","Lrrid": "08060412","Late":' \
' "0","LrrLAT": "41.550377","LrrLON": "2.241691","Lrrs": {"Lrr": {"Lrrid": "08060412",' \
'"Chain": "0","LrrRSSI": "-64.000000","LrrSNR": "9.000000","LrrESP": "-64.514969"}},' \
'"CustomerID": "100001774",' \
'"CustomerData": {"alr":{"pro":"LORA/Generic","ver":"1"}},' \
'"ModelCfg": "0","DevAddr": "260113E2","AckRequested": "0",' \
'"rawMacCommands": "0703070307030703"}}'
def test_parse_lora_json(self):
jsonbody = json.loads(self.str_data)
parsed_json = Server.parse_lora_json(self.str_data)
time = jsonbody["DevEUI_uplink"]["Time"]
payload = jsonbody["DevEUI_uplink"]["payload_hex"]
device_id = jsonbody["DevEUI_uplink"]["DevAddr"]
virtual_tx = device_id + "-" + time
hash_object = hashlib.sha256(virtual_tx.encode())
hex_dig = hash_object.hexdigest()
dt = dateutil.parser.parse(time)
strftime = dt.strftime("%s")
time_millis = int(strftime) * 1000
self.assertEqual(parsed_json["time_json"], time)
self.assertEqual(parsed_json["timeStamp"], int(time_millis))
self.assertEqual(parsed_json["payload"], payload)
self.assertEqual(parsed_json["DevEUI"], device_id)
self.assertEqual(parsed_json["type"], "LORA")
self.assertEqual(parsed_json["extra"], json.dumps(jsonbody))
self.assertEqual(parsed_json["virtual_tx"], hex_dig)
# http "https://d8dsx2bkn9.execute-api.eu-west-1.amazonaws.com/api/sigfox?time=1515360218&id=IDTest&data=02180AE4"
def test_parse_sigfox(self):
data_dic = {
"context": {
"httpMethod": "GET",
"identity": {
"sourceIp": "127.0.0.1"
},
"resourcePath": "/sigfox"
},
"headers": {
"accept": "*/*",
"accept-encoding": "gzip, deflate",
"connection": "keep-alive",
"host": "localhost:8000",
"user-agent": "HTTPie/0.9.8"
},
"method": "GET",
"query_params": {
"data": "10bb17f18198100734",
"id": "260113E2",
"time": "1515360218"
},
"stage_vars": {},
"uri_params": {}
}
parsed_dic = Server.parse_sigfox_dic(data_dic)
d = datetime.utcfromtimestamp(int("1515360218") * 1000 / 1e3)
json_date = str(d.isoformat()) + "Z"
virtual_tx = "260113E2" + "-" + json_date
hash_object = hashlib.sha256(virtual_tx.encode())
hex_dig = hash_object.hexdigest()
self.assertEqual(parsed_dic["time_json"], json_date)
self.assertEqual(parsed_dic["timeStamp"], int("1515360218"))
self.assertEqual(parsed_dic["payload"], "10bb17f18198100734")
self.assertEqual(parsed_dic["DevEUI"], "260113E2")
self.assertEqual(parsed_dic["type"], "SIGFOX")
self.assertEqual(parsed_dic["virtual_tx"], hex_dig)
# http "https://d8dsx2bkn9.execute-api.eu-west-1.amazonaws.com/api/sigfox?time=1515360218&id=IDTest&data=02180AE4&test=test"
def test_parse_sigfox_with_test_data(self):
data_dic = {
"method": "GET",
"query_params": {
"data": "10bb17f18198100734",
"id": "260113E2",
"time": "1515360218",
"test": "test"
},
"stage_vars": {},
"uri_params": {}
}
parsed_dic = Server.parse_sigfox_dic(data_dic)
self.assertEqual(parsed_dic["timeStamp"], int("1515360218"))
self.assertEqual(parsed_dic["payload"], "10bb17f18198100734")
self.assertEqual(parsed_dic["DevEUI"], "260113E2")
self.assertEqual(parsed_dic["type"], "SIGFOX")
self.assertEqual(parsed_dic["test"], "test")
def test_publishing_data_to_SNS(self):
data_to_publish = {
"DevEUI": "260113E3",
"extra": {
"DevEUI_uplink": {
"CustomerID": "100001774",
"DevAddr": "260113E3"
}
},
"payload": "010000beef",
"timeStamp": 1499366509000,
"time_json": "2017-07-06T18:41:49.51+02:00",
"type": "LORA",
"virtual_tx": "2dd66154468fa5d433420f5bad5d3f580f3dab46fa33e127ef69c511f641ae2f"
}
server = Server(None, None, self.sns_client, self.log)
expected_message = json.dumps(data_to_publish)
server.publish_data_store_device(data_to_publish)
self.assertEqual(1, self.sns_client.return_published_times())
self.assertEqual(expected_message, self.sns_client.return_message())
self.assertEqual("arn:aws:sns:eu-west-1:488643450383:StoreDeviceData", self.sns_client.return_topicarn())
def test_persist_data_to_DynamoDB(self):
server = Server(self.dynamodb_device_data, None, None, self.log)
expected_item = {
'title': "The Big New Movie",
'year': 2015,
'info': {
'plot': "Nothing happens at all.",
'rating': "0"
}
}
server.persist_data(expected_item)
self.assertEqual(1, self.dynamodb_device_data.return_persisted_times())
self.assertEqual(expected_item, self.dynamodb_device_data.return_persisted_item())
def test_parsing_none_known_payload(self):
expected_item = {"virtual_tx": "A001", "time_json": "2017-01-21T12:12:12.001Z", "timeStamp": 1499366509000,
"payload": "A1bb17f18198100734",
"DevEUI": "260113E3", "type": "LORA", "extra": "{}"}
geolocation = Server.parse_payload(expected_item)
self.assertIsNone(geolocation)
def test_parsing_geolocation_payload(self):
expected_item = {"virtual_tx": "A001", "time_json": "2017-01-21T12:12:12.001Z", "timeStamp": 1499366509000,
"payload": "10bb17f18198100734",
"DevEUI": "260113E3", "type": "LORA", "extra": "{}"}
geolocation = Server.parse_payload(expected_item)
self.assertIsNotNone(geolocation)
payload = expected_item["payload"]
lat_hex = payload[2:8]
lat_str = int(lat_hex, 16)
lat = (lat_str * 180 / 16777215) - 90
lng_hex = payload[8:14]
lng_str = int(lng_hex, 16)
lng = (lng_str * 360 / 16777215) - 180
self.assertEqual(1499366509000, geolocation["timeStamp"])
self.assertIsNotNone(geolocation["GEO"])
# AppTest.printGeoLocation(lat, lat_hex, lat_str, lng_hex, lng_str, payload, lng)
self.assertEqual(str(lat), geolocation["GEO"]["lat"])
self.assertEqual(str(lng), geolocation["GEO"]["lng"])
# Example query:
# http "https://d8dsx2bkn9.execute-api.eu-west-1.amazonaws.com/api/sigfox?time=1510098998&id=260113E3&data=02180AE4"
def test_parsing_keep_alive_payload(self):
expected_item = {"virtual_tx": "A001", "time_json": "2017-01-21T12:12:12.001Z", "timeStamp": 1499366509000,
"payload": "02180AE4",
"DevEUI": "260113E3", "type": "LORA", "extra": "{}"}
keep_alive = Server.parse_payload(expected_item)
self.assertIsNotNone(keep_alive)
payload = expected_item["payload"]
interval = payload[2:4]
interval_int = int(interval, 16)
voltatge_hex = payload[4:8]
voltatge_hex_dec = int(voltatge_hex, 16) / 1000
self.assertEqual(1499366509000, keep_alive["timeStamp"])
self.assertIsNotNone(keep_alive["KA"])
self.assertEqual(str(interval_int), keep_alive["KA"]["interval"])
self.assertEqual(str(voltatge_hex_dec), keep_alive["KA"]["voltage"])
def test_dispatch_alarm_Keep_Alive_low_value(self):
server = Server(None, None, self.sns_client, self.log)
virtual_tx = "AE1234567"
data = {"timeStamp": "1499366509000",
"DevEUI": "260113E3",
"KA":
{"interval": "24",
"voltage": "2.456"}}
server.dispatch_alarm(virtual_tx, data)
data.update({"virtual_tx": virtual_tx})
expected_message = json.dumps(data)
self.assertEqual(1, self.sns_client.return_published_times())
self.assertEqual("arn:aws:sns:eu-west-1:488643450383:NotifySNS", self.sns_client.return_topicarn())
self.assertEqual(expected_message, self.sns_client.return_message())
self.assertEqual("Triggered Alarm 260113E3", self.sns_client.return_subject())
def test_no_dispatch_alarm_for_Keep_Alive_high_value(self):
server = Server(None, None, self.sns_client, self.log)
data = {"timeStamp": "1499366509000",
"DevEUI": "260113E3",
"KA":
{"interval": "24",
"voltage": "2.856"}}
server.dispatch_alarm("AE1234567", data)
self.assertEqual(0, self.sns_client.return_published_times())
def test_not_update_data_in_DynamoDB_if_None(self):
server = Server(self.dynamodb_device_data, None, None, self.log)
expected_item = None
server.update_data(expected_item)
self.assertEqual(0, self.dynamodb_device_data.return_updated_times())
def test_update_data_in_DynamoDB(self):
server = Server(self.dynamodb_device_data, None, None, self.log)
expected_item = {
"timeStamp": 1499366509000,
"DevEUI": "260113E3",
"GEO": {"lat": "12.5", "lng": "1.4"}
}
server.update_data(expected_item)
self.assertEqual(1, self.dynamodb_device_data.return_updated_times())
self.assertEqual(
{"timeStamp": 1499366509000, "DevEUI": "260113E3"},
self.dynamodb_device_data.return_updated_item()["Key"])
self.assertEqual(
'SET geo = :val',
self.dynamodb_device_data.return_updated_item()["UpdateExpression"])
self.assertEqual(
{':val': {"lat": "12.5", "lng": "1.4"}},
self.dynamodb_device_data.return_updated_item()["ExpressionAttributeValues"])
@staticmethod
def printGeoLocation(lat, lat_hex, lat_str, lng_hex, lng_str, payload, lng):
str_packet_id = str_packet_id = payload[:2]
print("payload:\t" + payload)
print("packed_id:\t" + str_packet_id)
print("lat_hex:\t" + lat_hex)
print("lat_str\t" + str(lat_str))
print("lat\t" + str(lat))
print("lng_hex:\t" + lng_hex)
print("lng_str:\t" + str(lng_str))
print("lat: " + str(lat) + ", lng: " + str(lng))
class TestLog:
def __init__(self):
self.message = ''
self.logged = 0
def debug(self, message):
self.message = message
self.logged += 1
return message
def return_message(self):
return self.message
def return_logging_times(self):
return self.logged
class TestSNS:
def __init__(self):
self.Message = ''
self.TopicArn = ''
self.Subject = ''
self.published = 0
def publish(self, TopicArn, Subject, Message):
self.Message = Message
self.TopicArn = TopicArn
self.Subject = Subject
self.published += 1
def return_topicarn(self):
return self.TopicArn
def return_message(self):
return self.Message
def return_published_times(self):
return self.published
def return_subject(self):
return self.Subject
class TestDynamoDB:
def __init__(self):
self.Item = ''
self.persisted = 0
self.updated = 0
self.Key = ''
self.UpdateExpression = ''
self.ExpressionAttributeValues = ''
self.ReturnValues = ''
def put_item(self, Item):
self.Item = Item
self.persisted += 1
def update_item(self, Key, UpdateExpression, ExpressionAttributeValues, ReturnValues):
self.Key = Key
self.UpdateExpression = UpdateExpression
self.ExpressionAttributeValues = ExpressionAttributeValues
self.ReturnValues = ReturnValues
self.updated += 1
def return_persisted_item(self):
return self.Item
def return_persisted_times(self):
return self.persisted
def return_updated_item(self):
return {"Key": self.Key,
"UpdateExpression": self.UpdateExpression,
"ExpressionAttributeValues": self.ExpressionAttributeValues,
"ReturnValues": self.ReturnValues}
def return_updated_times(self):
return self.updated
| 37.700272
| 128
| 0.594681
| 13,478
| 0.974055
| 0
| 0
| 482
| 0.034834
| 0
| 0
| 3,668
| 0.265086
|
83ecbdee9bb1d4607592c7d48726a571593fde4f
| 3,497
|
py
|
Python
|
test/test_config.py
|
beremaran/spdown
|
59e5ea6996be51ad015f9da6758e2ce556b9fb94
|
[
"MIT"
] | 2
|
2019-08-13T15:13:58.000Z
|
2019-10-04T09:09:24.000Z
|
test/test_config.py
|
beremaran/spdown
|
59e5ea6996be51ad015f9da6758e2ce556b9fb94
|
[
"MIT"
] | 4
|
2021-02-08T20:23:42.000Z
|
2022-03-11T23:27:07.000Z
|
test/test_config.py
|
beremaran/spdown
|
59e5ea6996be51ad015f9da6758e2ce556b9fb94
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import os
import json
import unittest
from collections import OrderedDict
from spdown.config import Config
TEST_CONFIG_PATHS = OrderedDict([
('local', 'config.json'),
('home', os.path.join(
os.path.expanduser('~'), '.config',
'spdown', 'config'
))
])
TEST_CONFIG = {
'download_directory': '~/TestMusic'
}
class TestConfig(unittest.TestCase):
@staticmethod
def get_backup_path(config_location):
return '{}.bak'.format(
TEST_CONFIG_PATHS[config_location]
)
@staticmethod
def backup_configuration(config_location):
backup_path = TestConfig.get_backup_path(config_location)
if os.path.exists(TEST_CONFIG_PATHS[config_location]):
os.rename(
TEST_CONFIG_PATHS[config_location],
backup_path
)
@staticmethod
def restore_configuration(config_location):
backup_path = TestConfig.get_backup_path(config_location)
if os.path.exists(backup_path):
os.rename(
backup_path,
TEST_CONFIG_PATHS[config_location]
)
@staticmethod
def create_test_config(config_location):
TestConfig.backup_configuration(config_location)
with open(TEST_CONFIG_PATHS[config_location], 'w') as f:
json.dump(TEST_CONFIG, f)
def test_find_configuration_file(self):
config = Config()
for config_path in TEST_CONFIG_PATHS.keys():
TestConfig.backup_configuration(config_path)
for config_path in TEST_CONFIG_PATHS.keys():
config.set_config_path(None)
TestConfig.create_test_config(config_path)
config._load(exit_on_error=False)
TestConfig.restore_configuration(config_path)
self.assertEqual(TEST_CONFIG, config._configuration)
def test_get(self):
config = Config()
for config_path in TEST_CONFIG_PATHS.keys():
TestConfig.backup_configuration(config_path)
for config_path in TEST_CONFIG_PATHS.keys():
config.set_config_path(None)
TestConfig.create_test_config(config_path)
download_directory = config.get('download_directory')
TestConfig.restore_configuration(config_path)
self.assertEqual(download_directory, TEST_CONFIG['download_directory'])
def test_set(self):
config = Config()
for config_path in TEST_CONFIG_PATHS.keys():
TestConfig.backup_configuration(config_path)
for config_path in TEST_CONFIG_PATHS.keys():
config.set_config_path(None)
TestConfig.create_test_config(config_path)
config.set('download_directory', 'test')
TestConfig.restore_configuration(config_path)
self.assertEqual(config.get('download_directory'), 'test')
def test_fix_path_errors(self):
config = Config()
for config_path in TEST_CONFIG_PATHS.keys():
TestConfig.backup_configuration(config_path)
for config_path in TEST_CONFIG_PATHS.keys():
config.set_config_path(None)
TestConfig.create_test_config(config_path)
config.set('download_directory', '~/Music/')
config._configuration = None
self.assertEqual(config.get('download_directory'), '~/Music')
TestConfig.restore_configuration(config_path)
if __name__ == "__main__":
unittest.main()
| 31.223214
| 83
| 0.659994
| 3,083
| 0.881613
| 0
| 0
| 955
| 0.273091
| 0
| 0
| 280
| 0.080069
|
83ed5076917201fcac6f1e8e51002b51c7395c85
| 2,167
|
py
|
Python
|
external/emulation/tests/test_config.py
|
ai2cm/fv3net
|
e62038aee0a97d6207e66baabd8938467838cf51
|
[
"MIT"
] | 1
|
2021-12-14T23:43:35.000Z
|
2021-12-14T23:43:35.000Z
|
external/emulation/tests/test_config.py
|
ai2cm/fv3net
|
e62038aee0a97d6207e66baabd8938467838cf51
|
[
"MIT"
] | 195
|
2021-09-16T05:47:18.000Z
|
2022-03-31T22:03:15.000Z
|
external/emulation/tests/test_config.py
|
ai2cm/fv3net
|
e62038aee0a97d6207e66baabd8938467838cf51
|
[
"MIT"
] | null | null | null |
from emulation._emulate.microphysics import TimeMask
from emulation.config import (
EmulationConfig,
ModelConfig,
StorageConfig,
_load_nml,
_get_timestep,
_get_storage_hook,
get_hooks,
)
import emulation.zhao_carr
import datetime
def test_EmulationConfig_from_dict():
seconds = 60
month = 2
config = EmulationConfig.from_dict(
{
"model": {
"path": "some-path",
"online_schedule": {
"period": seconds,
"initial_time": datetime.datetime(2000, month, 1),
},
}
}
)
assert config.model.online_schedule.period == datetime.timedelta(seconds=seconds)
assert config.model.online_schedule.initial_time.month == month
def test_ModelConfig_no_interval():
config = ModelConfig(path="")
assert len(list(config._build_masks())) == 0
def test_ModelConfig_with_interval():
def schedule(time):
return 1.0
config = ModelConfig(path="", online_schedule=schedule)
time_schedule = [
mask for mask in config._build_masks() if isinstance(mask, TimeMask)
][0]
assert time_schedule.schedule == schedule
def test__get_timestep(dummy_rundir):
namelist = _load_nml()
timestep = _get_timestep(namelist)
assert timestep == 900
def test__load_nml(dummy_rundir):
namelist = _load_nml()
assert namelist["coupler_nml"]["hours"] == 1
def test__get_storage_hook(dummy_rundir):
config = StorageConfig()
hook = _get_storage_hook(config)
assert hook
def test_get_hooks(dummy_rundir):
gscond, model, storage = get_hooks()
assert storage
assert model
assert gscond
def test_ModelConfig_mask_where_fortran_cloud_identical():
config = ModelConfig(path="", mask_gscond_identical_cloud=True)
(a,) = config._build_masks()
assert a == emulation.zhao_carr.mask_where_fortran_cloud_identical
def test_ModelConfig_mask_gscond_zero_cloud():
config = ModelConfig(path="", mask_gscond_zero_cloud=True)
(a,) = config._build_masks()
assert a == emulation.zhao_carr.mask_where_fortran_cloud_vanishes_gscond
| 25.494118
| 85
| 0.684818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 91
| 0.041994
|
83ed572ee1b1140fe9364cb212822f09bee7de36
| 323
|
py
|
Python
|
sorting/insertion_sort.py
|
src24/algos
|
b1ac1049be6adaafedaa0572f009668e2c8d3809
|
[
"MIT"
] | null | null | null |
sorting/insertion_sort.py
|
src24/algos
|
b1ac1049be6adaafedaa0572f009668e2c8d3809
|
[
"MIT"
] | null | null | null |
sorting/insertion_sort.py
|
src24/algos
|
b1ac1049be6adaafedaa0572f009668e2c8d3809
|
[
"MIT"
] | null | null | null |
from typing import List
# O(n^2)
def insertion_sort(arr: List[int], desc: bool = False) -> None:
for i, item in enumerate(arr):
if i == 0:
continue
j: int = i - 1
while j >= 0 and (arr[j] > item) ^ desc:
arr[j + 1] = arr[j]
j -= 1
arr[j + 1] = item
| 23.071429
| 63
| 0.4613
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 8
| 0.024768
|
83ee40ca37d52089325ca67f4f809d3e842c7b0b
| 8,939
|
py
|
Python
|
tests/test_client.py
|
ocefpaf/pystac-client
|
ddf0e0566b2b1783a4d32d3d77f9f51b80270df3
|
[
"Apache-2.0"
] | null | null | null |
tests/test_client.py
|
ocefpaf/pystac-client
|
ddf0e0566b2b1783a4d32d3d77f9f51b80270df3
|
[
"Apache-2.0"
] | null | null | null |
tests/test_client.py
|
ocefpaf/pystac-client
|
ddf0e0566b2b1783a4d32d3d77f9f51b80270df3
|
[
"Apache-2.0"
] | null | null | null |
from datetime import datetime
from urllib.parse import urlsplit, parse_qs
from dateutil.tz import tzutc
import pystac
import pytest
from pystac_client import Client
from pystac_client.conformance import ConformanceClasses
from .helpers import STAC_URLS, TEST_DATA, read_data_file
class TestAPI:
@pytest.mark.vcr
def test_instance(self):
api = Client.open(STAC_URLS['PLANETARY-COMPUTER'])
# An API instance is also a Catalog instance
assert isinstance(api, pystac.Catalog)
assert str(api) == '<Client id=microsoft-pc>'
@pytest.mark.vcr
def test_links(self):
api = Client.open(STAC_URLS['PLANETARY-COMPUTER'])
# Should be able to get collections via links as with a typical PySTAC Catalog
collection_links = api.get_links('child')
assert len(collection_links) > 0
collections = list(api.get_collections())
assert len(collection_links) == len(collections)
first_collection = api.get_single_link('child').resolve_stac_object(root=api).target
assert isinstance(first_collection, pystac.Collection)
def test_spec_conformance(self):
"""Testing conformance against a ConformanceClass should allow APIs using legacy URIs to pass."""
client = Client.from_file(str(TEST_DATA / 'planetary-computer-root.json'))
# Set conformsTo URIs to conform with STAC API - Core using official URI
client._stac_io._conformance = ['https://api.stacspec.org/v1.0.0-beta.1/core']
assert client._stac_io.conforms_to(ConformanceClasses.CORE)
@pytest.mark.vcr
def test_no_conformance(self):
"""Should raise a NotImplementedError if no conformance info can be found. Luckily, the test API doesn't publish
a "conformance" link so we can just remove the "conformsTo" attribute to test this."""
client = Client.from_file(str(TEST_DATA / 'planetary-computer-root.json'))
client._stac_io._conformance = []
with pytest.raises(NotImplementedError):
client._stac_io.assert_conforms_to(ConformanceClasses.CORE)
with pytest.raises(NotImplementedError):
client._stac_io.assert_conforms_to(ConformanceClasses.ITEM_SEARCH)
@pytest.mark.vcr
def test_no_stac_core_conformance(self):
"""Should raise a NotImplementedError if the API does not conform to the STAC API - Core spec."""
client = Client.from_file(str(TEST_DATA / 'planetary-computer-root.json'))
client._stac_io._conformance = client._stac_io._conformance[1:]
with pytest.raises(NotImplementedError):
client._stac_io.assert_conforms_to(ConformanceClasses.CORE)
assert client._stac_io.conforms_to(ConformanceClasses.ITEM_SEARCH)
@pytest.mark.vcr
def test_from_file(self):
api = Client.from_file(STAC_URLS['PLANETARY-COMPUTER'])
assert api.title == 'Microsoft Planetary Computer STAC API'
def test_invalid_url(self):
with pytest.raises(TypeError):
Client.open()
def test_get_collections_with_conformance(self, requests_mock):
"""Checks that the "data" endpoint is used if the API published the collections conformance class."""
pc_root_text = read_data_file("planetary-computer-root.json")
pc_collection_dict = read_data_file("planetary-computer-aster-l1t-collection.json",
parse_json=True)
# Mock the root catalog
requests_mock.get(STAC_URLS["PLANETARY-COMPUTER"], status_code=200, text=pc_root_text)
api = Client.open(STAC_URLS["PLANETARY-COMPUTER"])
assert api._stac_io.conforms_to(ConformanceClasses.COLLECTIONS)
# Get & mock the collections (rel type "data") link
collections_link = api.get_single_link("data")
requests_mock.get(collections_link.href,
status_code=200,
json={
"collections": [pc_collection_dict],
"links": []
})
_ = next(api.get_collections())
history = requests_mock.request_history
assert len(history) == 2
assert history[1].url == collections_link.href
def test_custom_request_parameters(self, requests_mock):
pc_root_text = read_data_file("planetary-computer-root.json")
pc_collection_dict = read_data_file("planetary-computer-collection.json", parse_json=True)
requests_mock.get(STAC_URLS["PLANETARY-COMPUTER"], status_code=200, text=pc_root_text)
init_qp_name = "my-param"
init_qp_value = "some-value"
api = Client.open(STAC_URLS['PLANETARY-COMPUTER'], parameters={init_qp_name: init_qp_value})
# Ensure that the the Client will use the /collections endpoint and not fall back
# to traversing child links.
assert api._stac_io.conforms_to(ConformanceClasses.COLLECTIONS)
# Get the /collections endpoint
collections_link = api.get_single_link("data")
# Mock the request
requests_mock.get(collections_link.href,
status_code=200,
json={
"collections": [pc_collection_dict],
"links": []
})
# Make the collections request
_ = next(api.get_collections())
history = requests_mock.request_history
assert len(history) == 2
actual_qs = urlsplit(history[1].url).query
actual_qp = parse_qs(actual_qs)
# Check that the param from the init method is present
assert init_qp_name in actual_qp
assert len(actual_qp[init_qp_name]) == 1
assert actual_qp[init_qp_name][0] == init_qp_value
def test_get_collections_without_conformance(self, requests_mock):
"""Checks that the "data" endpoint is used if the API published the collections conformance class."""
pc_root_dict = read_data_file("planetary-computer-root.json", parse_json=True)
pc_collection_dict = read_data_file("planetary-computer-aster-l1t-collection.json",
parse_json=True)
# Remove the collections conformance class
pc_root_dict["conformsTo"].remove(
"http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/oas30")
# Remove all child links except for the collection that we are mocking
pc_collection_href = next(link["href"] for link in pc_collection_dict["links"]
if link["rel"] == "self")
pc_root_dict["links"] = [
link for link in pc_root_dict["links"]
if link["rel"] != "child" or link["href"] == pc_collection_href
]
# Mock the root catalog
requests_mock.get(STAC_URLS["PLANETARY-COMPUTER"], status_code=200, json=pc_root_dict)
api = Client.open(STAC_URLS["PLANETARY-COMPUTER"])
assert not api._stac_io.conforms_to(ConformanceClasses.COLLECTIONS)
# Mock the collection
requests_mock.get(pc_collection_href, status_code=200, json=pc_collection_dict)
_ = next(api.get_collections())
history = requests_mock.request_history
assert len(history) == 2
assert history[1].url == pc_collection_href
class TestAPISearch:
@pytest.fixture(scope='function')
def api(self):
return Client.from_file(str(TEST_DATA / 'planetary-computer-root.json'))
def test_search_conformance_error(self, api):
"""Should raise a NotImplementedError if the API doesn't conform to the Item Search spec. Message should
include information about the spec that was not conformed to."""
# Set the conformance to only STAC API - Core
api._stac_io._conformance = [api._stac_io._conformance[0]]
with pytest.raises(NotImplementedError) as excinfo:
api.search(limit=10, max_items=10, collections='mr-peebles')
assert str(ConformanceClasses.ITEM_SEARCH) in str(excinfo.value)
def test_no_search_link(self, api):
# Remove the search link
api.remove_links('search')
with pytest.raises(NotImplementedError) as excinfo:
api.search(limit=10, max_items=10, collections='naip')
assert 'No link with "rel" type of "search"' in str(excinfo.value)
def test_search(self, api):
results = api.search(bbox=[-73.21, 43.99, -73.12, 44.05],
collections='naip',
limit=10,
max_items=20,
datetime=[datetime(2020, 1, 1, 0, 0, 0, tzinfo=tzutc()), None])
assert results._parameters == {
'bbox': (-73.21, 43.99, -73.12, 44.05),
'collections': ('naip', ),
'limit': 10,
'datetime': '2020-01-01T00:00:00Z/..'
}
| 41.193548
| 120
| 0.650632
| 8,650
| 0.96767
| 0
| 0
| 2,280
| 0.255062
| 0
| 0
| 2,507
| 0.280456
|
83ee7b4543ab79bc0395dcd6db36fd4ba26a265c
| 378
|
py
|
Python
|
setup.py
|
mattpatey/text2qrcode
|
f0cbb006241ba20c76b16d67815836fd44890315
|
[
"Xnet",
"X11"
] | 1
|
2020-11-13T20:59:08.000Z
|
2020-11-13T20:59:08.000Z
|
setup.py
|
mattpatey/text2qrcode
|
f0cbb006241ba20c76b16d67815836fd44890315
|
[
"Xnet",
"X11"
] | null | null | null |
setup.py
|
mattpatey/text2qrcode
|
f0cbb006241ba20c76b16d67815836fd44890315
|
[
"Xnet",
"X11"
] | null | null | null |
from setuptools import (
find_packages,
setup,
)
setup(
name="text2qrcode",
version="1.0-a1",
description="Render a QR code image from input text",
author="Matt Patey",
packages=find_packages(),
install_requires=["qrcode", "pillow"],
entry_points={
"console_scripts": [
"t2qr=text2qrcode.main:main"
]
}
)
| 19.894737
| 57
| 0.595238
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 134
| 0.354497
|
83ef28442d472afe61e0a90f60e0718bf2a46056
| 363
|
py
|
Python
|
test/test_image.py
|
arkagogoldey/cloud_coverage_image_analysis
|
dde9954a27f70e77f9760455d12eeb6e458f8dba
|
[
"MIT"
] | 1
|
2021-10-16T09:26:53.000Z
|
2021-10-16T09:26:53.000Z
|
test/test_image.py
|
arkagogoldey/cloud_coverage_image_analysis
|
dde9954a27f70e77f9760455d12eeb6e458f8dba
|
[
"MIT"
] | null | null | null |
test/test_image.py
|
arkagogoldey/cloud_coverage_image_analysis
|
dde9954a27f70e77f9760455d12eeb6e458f8dba
|
[
"MIT"
] | null | null | null |
import numpy as np
import random
from proyecto2.image import Image
class TestImage:
def test_pixels(self):
for _ in range(5):
x = random.randrange(1920, 4368, 1)
y = random.randrange(1080, 2912, 1)
matrix = np.random.rand(y, x)
image = Image(matrix)
assert (matrix == image.pixels).all()
| 24.2
| 49
| 0.584022
| 293
| 0.807163
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
83effd89a13b4f1b810c9a266a94d6710b5a3afc
| 2,236
|
py
|
Python
|
test_autolens/unit/pipeline/phase/point_source/test_phase_point_source.py
|
agarwalutkarsh554/PyAutoLens
|
72d2f5c39834446e72879fd119b591e52b36cac4
|
[
"MIT"
] | null | null | null |
test_autolens/unit/pipeline/phase/point_source/test_phase_point_source.py
|
agarwalutkarsh554/PyAutoLens
|
72d2f5c39834446e72879fd119b591e52b36cac4
|
[
"MIT"
] | null | null | null |
test_autolens/unit/pipeline/phase/point_source/test_phase_point_source.py
|
agarwalutkarsh554/PyAutoLens
|
72d2f5c39834446e72879fd119b591e52b36cac4
|
[
"MIT"
] | null | null | null |
from os import path
import numpy as np
import pytest
import autofit as af
import autolens as al
from autolens.mock import mock
pytestmark = pytest.mark.filterwarnings(
"ignore:Using a non-tuple sequence for multidimensional indexing is deprecated; use `arr[tuple(seq)]` instead of "
"`arr[seq]`. In the future this will be interpreted as an arrays index, `arr[np.arrays(seq)]`, which will result "
"either in an error or a different result."
)
directory = path.dirname(path.realpath(__file__))
class TestMakeAnalysis:
def test__positions__settings_inputs_are_used_in_positions(
self, positions_x2, positions_x2_noise_map
):
phase_positions_x2 = al.PhasePointSource(
settings=al.SettingsPhasePositions(),
search=mock.MockSearch(),
positions_solver=mock.MockPositionsSolver(model_positions=positions_x2),
)
assert isinstance(phase_positions_x2.settings, al.SettingsPhasePositions)
analysis = phase_positions_x2.make_analysis(
positions=positions_x2, positions_noise_map=positions_x2_noise_map
)
assert analysis.positions.in_grouped_list == positions_x2.in_grouped_list
assert (
analysis.noise_map.in_grouped_list == positions_x2_noise_map.in_grouped_list
)
def test___phase_info_is_made(
self, phase_positions_x2, positions_x2, positions_x2_noise_map
):
phase_positions_x2.make_analysis(
positions=positions_x2,
positions_noise_map=positions_x2_noise_map,
results=mock.MockResults(),
)
file_phase_info = path.join(
phase_positions_x2.search.paths.output_path, "phase.info"
)
phase_info = open(file_phase_info, "r")
search = phase_info.readline()
cosmology = phase_info.readline()
phase_info.close()
assert search == "Optimizer = MockSearch \n"
assert (
cosmology
== 'Cosmology = FlatLambdaCDM(name="Planck15", H0=67.7 km / (Mpc s), Om0=0.307, Tcmb0=2.725 K, '
"Neff=3.05, m_nu=[0. 0. 0.06] eV, Ob0=0.0486) \n"
)
| 33.373134
| 119
| 0.654293
| 1,704
| 0.762075
| 0
| 0
| 0
| 0
| 0
| 0
| 459
| 0.205277
|
83f051af9726ef346dde4699fd1ff70473f62a92
| 1,737
|
py
|
Python
|
convert.py
|
lfe999/xenforo-scraper
|
a06dd9412658941b269889932534d071ad30367e
|
[
"MIT"
] | 2
|
2021-07-30T03:11:06.000Z
|
2022-03-07T15:40:30.000Z
|
convert.py
|
lfe999/xenforo-scraper
|
a06dd9412658941b269889932534d071ad30367e
|
[
"MIT"
] | null | null | null |
convert.py
|
lfe999/xenforo-scraper
|
a06dd9412658941b269889932534d071ad30367e
|
[
"MIT"
] | 1
|
2021-07-07T16:05:07.000Z
|
2021-07-07T16:05:07.000Z
|
formats = {"KiB": 1024, "KB": 1000,
"MiB": 1024**2, "MB": 1000**2,
"GiB": 1024**3, "GB": 1000**3,
"TiB": 1024**4, "TB": 1000**4}
# Converts shorthand into number of bytes, ex. 1KiB = 1024
def shortToBytes(short):
if short is not None:
try:
for format, multiplier in formats.items():
if format.lower() in short.lower():
return int(float(short.lower().replace(format.lower(), ""))*multiplier)
raise Exception("No match found for unit multipliers ex. KiB, MB.")
except AttributeError:
raise Exception("Shorthand must be a string, not integer.")
else:
return None
# Converts the number of bytes into shorthand expression, ex. 2500 = 2.5KB
def bytesToShort(bytes):
reverse = dict(reversed(list(formats.items()))).items()
for format, multiplier in reverse:
try:
if bytes/multiplier > 1:
return str(round(bytes/multiplier, 2)) + format
except TypeError:
return None
#raise Exception("Bytes must be an integer.")
# Run tests only if file is ran as standalone.
if __name__ == '__main__':
# Tests
import pytest
assert shortToBytes("103kib") == 105472
assert shortToBytes("103GIB") == 110595407872
assert shortToBytes("0.5TB") == 500000000000
assert bytesToShort(105472) == "105.47KB"
assert bytesToShort(110595407872) == "110.6GB"
assert bytesToShort(500000000000) == "500.0GB"
with pytest.raises(Exception):
print(bytesToShort("k2jfzsk2"))
with pytest.raises(Exception):
print(shortToBytes("ad2wd2"))
with pytest.raises(Exception):
print(shortToBytes(25252))
| 35.44898
| 91
| 0.614853
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 439
| 0.252735
|
83f147a88053ee096c8c450bcf0c3e2aae29aca2
| 12,023
|
py
|
Python
|
old/pro/src/GUI/lofarBFgui.py
|
peijin94/LOFAR-Sun-tools
|
23ace5a5e8c0bdaa0cbb5ab6e37f6527716d16f3
|
[
"MIT"
] | null | null | null |
old/pro/src/GUI/lofarBFgui.py
|
peijin94/LOFAR-Sun-tools
|
23ace5a5e8c0bdaa0cbb5ab6e37f6527716d16f3
|
[
"MIT"
] | null | null | null |
old/pro/src/GUI/lofarBFgui.py
|
peijin94/LOFAR-Sun-tools
|
23ace5a5e8c0bdaa0cbb5ab6e37f6527716d16f3
|
[
"MIT"
] | null | null | null |
# The UI interface and analysis of the lofar solar beam from
import sys
# insert at 1, 0 is the script path (or '' in REPL)
sys.path.insert(1, '..')
from PyQt5.QtWidgets import *
from PyQt5.QtGui import QIcon
from PyQt5.uic import loadUi
from PyQt5.QtCore import Qt
import matplotlib
from matplotlib.backends.backend_qt5agg import (NavigationToolbar2QT as NavigationToolbar)
import matplotlib.pyplot as plt
import numpy as np
from scipy.interpolate import griddata
from skimage import measure
import matplotlib.dates as mdates
import resource_rc
from lofarSun.lofarData import LofarDataBF
from pandas.plotting import register_matplotlib_converters
import platform
import matplotlib as mpl
# try to use the precise epoch
mpl.rcParams['date.epoch']='1970-01-01T00:00:00'
try:
mdates.set_epoch('1970-01-01T00:00:00')
except:
pass
register_matplotlib_converters()
if platform.system() != "Darwin":
matplotlib.use('TkAgg')
else:
print("Detected MacOS, using the default matplotlib backend: " +
matplotlib.get_backend())
class MatplotlibWidget(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
loadUi("layout.ui", self)
self.move(10,30)
self.init_graph()
self.setWindowIcon(QIcon(":/GUI/resource/lofar.png"))
self.addToolBar(NavigationToolbar(self.mplw.canvas, self))
self.dataset = LofarDataBF()
self.asecpix = 20
self.x_select = 0
self.y_select = 0
self.selected = False
self.interpset = 'cubic'
self.keyaval = False
self.extrapolate = True
self.interp_nearest.setChecked(False)
self.interp_linear.setChecked(False)
self.interp_cubic.setChecked(True)
self.show_disk.setChecked(True)
self.show_FWHM.setChecked(True)
self.show_peak.setChecked(True)
self.action_prefix = '<span style=\" font-size:12pt; font-weight:600; color:#18B608;\" >[Action] </span>'
self.info_prefix = '<span style=\" font-size:12pt; font-weight:600; color:#0424AE;\" >[Info] </span>'
# define all the actions
self.button_gen.clicked.connect(self.showBeamForm)
self.pointing.clicked.connect(self.showPointing)
self.loadsav.triggered.connect(self.update_lofarBF)
self.loadcube.triggered.connect(self.update_lofar_cube)
self.loadfits.triggered.connect(self.update_lofar_fits)
self.spectroPlot.clicked.connect(self.spectroPlot_func)
self.t_idx_select = 0
self.f_idx_select = 0
self.interp_nearest.toggled.connect(lambda:self.btnstate(self.interp_nearest))
self.interp_linear.toggled.connect(lambda:self.btnstate(self.interp_linear))
self.interp_cubic.toggled.connect(lambda:self.btnstate(self.interp_cubic))
QShortcut(Qt.Key_Up, self, self.keyUp)
QShortcut(Qt.Key_Down, self, self.keyDown)
QShortcut(Qt.Key_Left, self, self.keyLeft)
QShortcut(Qt.Key_Right, self, self.keyRight)
def btnstate(self,b):
self.interpset = (b.text().lower())
@staticmethod
def move_window(window, dx, dy):
"""Move a matplotlib window to a given x and y offset, independent of backend"""
if matplotlib.get_backend() == "Qt5Agg":
window.move(dx, dy)
else:
window.wm_geometry("+{dx}+{dy}".format(dx=dx, dy=dy))
def init_graph(self):
self.mplw.canvas.axes.clear()
self.mplw.canvas.axes.imshow(plt.imread('resource/login.png'))
self.mplw.canvas.axes.set_axis_off()
self.mplw.canvas.draw()
def update_lofarBF(self):
options = QFileDialog.Options()
options |= QFileDialog.DontUseNativeDialog
self.dataset.fname, _ = QFileDialog.getOpenFileName(self,"QFileDialog.getOpenFileName()",
"","IDL Files (*.sav);;All Files (*)",
options=options)
if self.dataset.fname:
print(self.dataset.fname)
if len(self.dataset.fname):
self.dataset.load_sav(self.dataset.fname)
self.mplw.canvas.axes.clear()
self.draw_ds_after_load()
self.beamSet.clear()
self.beamSet.addItems([str(x).rjust(3,'0') for x in range(self.dataset.xb.shape[0])])
def update_lofar_cube(self):
options = QFileDialog.Options()
options |= QFileDialog.DontUseNativeDialog
self.dataset.fname, _ = QFileDialog.getOpenFileName(self,"QFileDialog.getOpenFileName()",
"","IDL Files (*.sav);;All Files (*)",
options=options)
if self.dataset.fname:
print(self.dataset.fname)
if len(self.dataset.fname):
self.dataset.load_sav_cube(self.dataset.fname)
self.mplw.canvas.axes.clear()
self.draw_ds_after_load()
self.beamSet.clear()
self.beamSet.addItems([str(x).rjust(3,'0') for x in range(self.dataset.xb.shape[0])])
def update_lofar_fits(self):
options = QFileDialog.Options()
options |= QFileDialog.DontUseNativeDialog
self.dataset.fname, _ = QFileDialog.getOpenFileName(self,"QFileDialog.getOpenFileName()",
"","Fits Files (*.fits);;All Files (*)",
options=options)
if self.dataset.fname:
print(self.dataset.fname)
if len(self.dataset.fname):
self.dataset.load_fits(self.dataset.fname)
self.mplw.canvas.axes.clear()
self.draw_ds_after_load()
self.beamSet.clear()
self.beamSet.addItems([str(x).rjust(3,'0') for x in range(self.dataset.xb.shape[0])])
def draw_ds_after_load(self,idx_cur=0,conn_click=True):
data_ds = np.array(self.dataset.data_cube[:, :, idx_cur])
#print(self.dataset.time_ds)
ax_cur = self.mplw.canvas.axes
self.dataset.plot_bf_dyspec(idx_cur,ax_cur)
self.mplw.canvas.draw()
self.mplw.canvas.mpl_connect('button_release_event', self.onclick)
self.log.append(self.action_prefix+'Load : '+self.dataset.fname+' Beam-'+str(idx_cur))
def spectroPlot_func(self):
self.mplw.canvas.axes.clear()
this_idx = self.beamSet.currentIndex()
print(this_idx)
self.draw_ds_after_load(idx_cur=this_idx)
def onclick(self,event):
if ~event.dblclick and event.button==1:
print('%s click: button=%d, x=%d, y=%d, xdata=%f, ydata=%f' %
('double' if event.dblclick else 'single', event.button,
event.x, event.y, event.xdata, event.ydata))
self.x_select = event.xdata
self.y_select = event.ydata
self.input_t.setText(mdates.num2date(self.x_select).strftime('%H:%M:%S.%f'))
self.input_f.setText('{:06.3f}'.format(self.y_select))
self.t_idx_select = (np.abs(self.dataset.time_ds - self.x_select)).argmin()
self.f_idx_select = (np.abs(self.dataset.freqs_ds - self.y_select)).argmin()
if self.selected:
if len(self.mplw.canvas.axes.lines)>0:
self.mplw.canvas.axes.lines.remove(self.mplw.canvas.axes.lines[0])
self.mplw.canvas.axes.plot(self.x_select, self.y_select, 'w+', markersize=25, linewidth=2)
self.mplw.canvas.draw()
self.selected = True
self.keyaval = True
if plt.fignum_exists(4):
self.showBeamForm()
def showPointing(self):
if self.dataset.havedata:
plt.figure(5)
self.move_window(plt.get_current_fig_manager().window, 1150, 550)
plt.plot(self.dataset.xb,self.dataset.yb,'b.')
for idx in list(range(self.dataset.xb.shape[0])):
plt.text(self.dataset.xb[idx],self.dataset.yb[idx],str(idx))
ax = plt.gca()
ax.set_xlabel('X (Arcsec)')
ax.set_ylabel('Y (Arcsec)')
ax.set_aspect('equal', 'box')
plt.show()
def keyUp(self):
self.stepNear(1,0)
def keyDown(self):
self.stepNear(-1,0)
def keyLeft(self):
self.stepNear(0,-1)
def keyRight(self):
self.stepNear(0,1)
def stepNear(self,f_move,t_move):
if self.keyaval:
self.t_idx_select = self.t_idx_select + t_move
self.f_idx_select = self.f_idx_select + f_move
self.x_select = self.dataset.time_ds[self.t_idx_select]
self.y_select = self.dataset.freqs_ds[self.f_idx_select]
self.input_t.setText(mdates.num2date(self.x_select).strftime('%H:%M:%S.%f'))
self.input_f.setText('{:06.3f}'.format(self.y_select))
if self.selected:
self.mplw.canvas.axes.lines.remove(self.mplw.canvas.axes.lines[0])
self.mplw.canvas.axes.plot(self.x_select, self.y_select, 'w+', markersize=25, linewidth=2)
self.mplw.canvas.draw()
self.selected = True
self.keyaval = True
if plt.fignum_exists(4):
self.showBeamForm()
def showBeamForm(self):
print(self.selected)
if not self.selected:
QMessageBox.about(self, "Attention", "Select a time-frequency point!")
elif self.dataset.havedata:
X,Y,data_bf,x,y,Ibeam = self.dataset.bf_image_by_idx(self.f_idx_select,\
self.t_idx_select,fov=3000,asecpix=self.asecpix,\
extrap=self.extrapolate,interpm=self.interpset)
self.log.append(self.action_prefix+' Beam Form at'+
mdates.num2date(self.x_select).strftime('%H:%M:%S.%f') +' of '
'{:06.3f}'.format(self.y_select)+'MHz')
fig = plt.figure(4)
self.move_window(plt.get_current_fig_manager().window, 1150, 50)
fig.clear()
ax = plt.gca()
im = ax.imshow(data_bf, cmap='gist_heat',
origin='lower',extent=[np.min(X),np.max(X),np.min(Y),np.max(Y)])
ax.set_xlabel('X (Arcsec)')
ax.set_ylabel('Y (Arcsec)')
ax.set_aspect('equal', 'box')
plt.colorbar(im)
print(np.min(data_bf)+(np.max(data_bf)-np.min(data_bf))/2.0)
FWHM_thresh = np.max(data_bf)/2.0 #np.min(data_bf)+(np.max(data_bf)-np.min(data_bf))/2.0
img_bi = data_bf > FWHM_thresh
if self.show_FWHM.isChecked():
ax.contour(X,Y,data_bf,levels=[FWHM_thresh,FWHM_thresh*2*0.9],colors=['deepskyblue','forestgreen'])
bw_lb = measure.label(img_bi)
rg_lb = measure.regionprops(bw_lb)
x_peak = X[np.where(data_bf == np.max(data_bf))]
y_peak = Y[np.where(data_bf == np.max(data_bf))]
rg_id = bw_lb[np.where(data_bf == np.max(data_bf))]
area_peak = rg_lb[int(rg_id)-1].area
self.log.append(self.info_prefix+' [XY_peak:('+
'{:7.1f}'.format(x_peak[0])+','+
'{:7.1f}'.format(y_peak[0])+') asec] '+
'[Area:('+'{:7.1f}'.format(area_peak*(self.asecpix/60)**2)+
') amin2]')
if self.show_FWHM.isChecked():
ax.contour(X,Y,np.abs(bw_lb-rg_id)<0.1,levels=[0.5],colors=['lime'])
if self.show_disk.isChecked():
ax.plot(960*np.sin(np.arange(0,2*np.pi,0.001)),
960*np.cos(np.arange(0,2*np.pi,0.001)),'w')
if self.show_peak.isChecked():
ax.plot(x_peak,y_peak,'k+')
plt.show()
app = QApplication([])
window = MatplotlibWidget()
window.show()
app.exec_()
| 38.909385
| 115
| 0.592198
| 10,890
| 0.905764
| 0
| 0
| 300
| 0.024952
| 0
| 0
| 1,377
| 0.11453
|
83f17a06a8bc16cfd0111230bb492518bce41c73
| 2,169
|
py
|
Python
|
otter/api.py
|
sean-morris/otter-grader
|
72135c78a69836dbbc920e25f737d4382bee0ec1
|
[
"BSD-3-Clause"
] | null | null | null |
otter/api.py
|
sean-morris/otter-grader
|
72135c78a69836dbbc920e25f737d4382bee0ec1
|
[
"BSD-3-Clause"
] | null | null | null |
otter/api.py
|
sean-morris/otter-grader
|
72135c78a69836dbbc920e25f737d4382bee0ec1
|
[
"BSD-3-Clause"
] | null | null | null |
"""
"""
__all__ = ["export_notebook", "grade_submission"]
import os
import sys
import shutil
import tempfile
from contextlib import redirect_stdout
try:
from contextlib import nullcontext
except ImportError:
from .utils import nullcontext # nullcontext is new in Python 3.7
from .argparser import get_parser
from .export import export_notebook
from .run import main as run_grader
PARSER = get_parser()
ARGS_STARTER = ["run"]
def grade_submission(ag_path, submission_path, quiet=False, debug=False):
"""
Runs non-containerized grading on a single submission at ``submission_path`` using the autograder
configuration file at ``ag_path``.
Creates a temporary grading directory using the ``tempfile`` library and grades the submission
by replicating the autograder tree structure in that folder and running the autograder there. Does
not run environment setup files (e.g. ``setup.sh``) or install requirements, so any requirements
should be available in the environment being used for grading.
Print statements executed during grading can be suppressed with ``quiet``.
Args:
ag_path (``str``): path to autograder zip file
submission_path (``str``): path to submission file
quiet (``bool``, optional): whether to suppress print statements during grading; default
``False``
debug (``bool``, optional): whether to run the submission in debug mode (without ignoring
errors)
Returns:
``otter.test_files.GradingResults``: the results object produced during the grading of the
submission.
"""
dp = tempfile.mkdtemp()
args_list = ARGS_STARTER.copy()
args_list.extend([
"-a", ag_path,
"-o", dp,
submission_path,
"--no-logo",
])
if debug:
args_list.append("--debug")
args = PARSER.parse_args(args_list)
if quiet:
f = open(os.devnull, "w")
cm = redirect_stdout(f)
else:
cm = nullcontext()
with cm:
results = run_grader(**vars(args))
if quiet:
f.close()
shutil.rmtree(dp)
return results
| 27.1125
| 102
| 0.664361
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,228
| 0.56616
|
83f188e156ec6c7d9f2733735708e0459183598e
| 930
|
py
|
Python
|
taurex/data/profiles/pressure/arraypressure.py
|
ucl-exoplanets/TauREx3_public
|
cf8da465448df44c3c4dcc2cd0002ef34edd3920
|
[
"BSD-3-Clause"
] | 10
|
2019-12-18T09:19:16.000Z
|
2021-06-21T11:02:06.000Z
|
taurex/data/profiles/pressure/arraypressure.py
|
ucl-exoplanets/TauREx3_public
|
cf8da465448df44c3c4dcc2cd0002ef34edd3920
|
[
"BSD-3-Clause"
] | 10
|
2020-03-24T18:02:15.000Z
|
2021-08-23T20:32:09.000Z
|
taurex/data/profiles/pressure/arraypressure.py
|
ucl-exoplanets/TauREx3_public
|
cf8da465448df44c3c4dcc2cd0002ef34edd3920
|
[
"BSD-3-Clause"
] | 8
|
2020-03-26T14:16:42.000Z
|
2021-12-18T22:11:25.000Z
|
from .pressureprofile import PressureProfile
import numpy as np
class ArrayPressureProfile(PressureProfile):
def __init__(self, array, reverse=False):
super().__init__(self.__class__.__name__, array.shape[-1])
if reverse:
self.pressure_profile = array[::-1]
else:
self.pressure_profile = array
def compute_pressure_profile(self):
"""
Sets up the pressure profile for the atmosphere model
"""
logp = np.log10(self.pressure_profile)
gradp = np.gradient(logp)
self.pressure_profile_levels = \
10**np.append(logp-gradp/2, logp[-1]+gradp[-1]/2)
@property
def profile(self):
return self.pressure_profile
def write(self, output):
pressure = super().write(output)
return pressure
@classmethod
def input_keywords(self):
return ['array', 'fromarray',]
| 24.473684
| 66
| 0.62043
| 863
| 0.927957
| 0
| 0
| 150
| 0.16129
| 0
| 0
| 96
| 0.103226
|
83f1b322463e935e9c59457e936e5b4e88b767fd
| 2,857
|
py
|
Python
|
bin/check_samplesheet.py
|
ggabernet/vcreport
|
fe5d315364c19d7286c5f7419cc5ff4599ed373d
|
[
"MIT"
] | 1
|
2021-08-23T20:15:15.000Z
|
2021-08-23T20:15:15.000Z
|
bin/check_samplesheet.py
|
ggabernet/vcreport
|
fe5d315364c19d7286c5f7419cc5ff4599ed373d
|
[
"MIT"
] | null | null | null |
bin/check_samplesheet.py
|
ggabernet/vcreport
|
fe5d315364c19d7286c5f7419cc5ff4599ed373d
|
[
"MIT"
] | 1
|
2021-09-09T09:40:11.000Z
|
2021-09-09T09:40:11.000Z
|
#!/usr/bin/env python
# This script is based on the example at: https://raw.githubusercontent.com/nf-core/test-datasets/viralrecon/samplesheet/samplesheet_test_illumina_amplicon.csv
import os
import sys
import errno
import argparse
def parse_args(args=None):
Description = "Reformat nf-core/vcreport samplesheet file and check its contents."
Epilog = "Example usage: python check_samplesheet.py <FILE_IN>"
parser = argparse.ArgumentParser(description=Description, epilog=Epilog)
parser.add_argument("FILE_IN", help="Input samplesheet file.")
return parser.parse_args(args)
def make_dir(path):
if len(path) > 0:
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise exception
def print_error(error, context="Line", context_str=""):
error_str = "ERROR: Please check samplesheet -> {}".format(error)
if context != "" and context_str != "":
error_str = "ERROR: Please check samplesheet -> {}\n{}: '{}'".format(
error, context.strip(), context_str.strip()
)
print(error_str)
sys.exit(1)
# TODO nf-core: Update the check_samplesheet function
def check_samplesheet(file_in):
"""
This function checks that the samplesheet follows the following structure:
sample,vcf
SAMPLE1,sample1.vcf
SAMPLE2,sample2.vcf
For an example see:
https://raw.githubusercontent.com/nf-core/test-datasets/viralrecon/samplesheet/samplesheet_test_illumina_amplicon.csv
"""
sample_mapping_dict = {}
with open(file_in, "r") as fin:
## Check header
MIN_COLS = 2
# TODO nf-core: Update the column names for the input samplesheet
HEADER = ["sample", "vcf"]
header = [x.strip('"') for x in fin.readline().strip().split(",")]
if header[: len(HEADER)] != HEADER:
print("ERROR: Please check samplesheet header -> {} != {}".format(",".join(header), ",".join(HEADER)))
sys.exit(1)
## Check sample entries
for line in fin:
lspl = [x.strip().strip('"') for x in line.strip().split(",")]
# Check valid number of columns per row
if len(lspl) < len(HEADER):
print_error(
"Invalid number of columns (minimum = {})!".format(len(HEADER)),
"Line",
line,
)
num_cols = len([x for x in lspl if x])
if num_cols < MIN_COLS:
print_error(
"Invalid number of populated columns (minimum = {})!".format(MIN_COLS),
"Line",
line,
)
def main(args=None):
args = parse_args(args)
check_samplesheet(args.FILE_IN)
if __name__ == "__main__":
sys.exit(main())
| 31.395604
| 159
| 0.60273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,136
| 0.39762
|
83f1ef1dcba662400bb9b8d83a966ab6acf3c9c8
| 2,093
|
py
|
Python
|
mltraining.py
|
krumaska/FTIFTC
|
aff8a00a7a4c720801de9b2ac20ce69e9e2c561a
|
[
"MIT"
] | null | null | null |
mltraining.py
|
krumaska/FTIFTC
|
aff8a00a7a4c720801de9b2ac20ce69e9e2c561a
|
[
"MIT"
] | null | null | null |
mltraining.py
|
krumaska/FTIFTC
|
aff8a00a7a4c720801de9b2ac20ce69e9e2c561a
|
[
"MIT"
] | null | null | null |
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
import numpy as np
import random
lol = pd.read_csv('./data/sample_SilverKDA.csv')
lol.drop(['Unnamed: 0'],axis=1,inplace=True)
print(lol)
f, ax = plt.subplots(1, 2, figsize=(18, 8))
lol['gameResult'].value_counts().plot.pie(explode= [0, 0.1], autopct='%1.1f%%', ax=ax[0], shadow=True)
ax[0].set_title('Pie plot - Game Result')
ax[0].set_ylabel('')
sns.countplot('gameResult', data=lol, ax=ax[1])
ax[1].set_title('Count plot - Game Result')
pd.crosstab(lol['JUNGLE'], lol['gameResult'], margins=True)
plt.show()
x = range(0,50)
print(x)
randInt = random.randint(0,lol['gameResult'].count()-50)
y0 = lol['gameResult'][randInt:randInt+50]
plt.plot(x, y0, label="gameResult")
y1 = lol['TOP'][randInt:randInt+50]
plt.plot(x, y1, label="TOP")
y2 = lol['JUNGLE'][randInt:randInt+50]
plt.plot(x, y2, label="JUNGLE")
y3 = lol['MIDDLE'][randInt:randInt+50]
plt.plot(x, y3, label="MIDDLE")
y4 = lol['BOTTOM'][randInt:randInt+50]
plt.plot(x, y4, label="BOTTOM")
y5 = lol['SUPPORT'][randInt:randInt+50]
plt.plot(x, y5, label="SUPPORT")
print(randInt)
plt.xlabel('count')
plt.ylabel('data')
plt.legend()
plt.show()
print(lol.head())
print(lol.info())
X = lol[['TOP','JUNGLE','MIDDLE','BOTTOM','SUPPORT']]
y = lol['gameResult']
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.4, random_state=13)
lr = LogisticRegression(random_state=13, solver='liblinear')
lr.fit(X_train, y_train)
pred = lr.predict(X_test)
print(accuracy_score(y_test, pred))
import numpy as np
thisPic = np.array([[1.43, 1.84, 1.92, 2.50, 3.92]])
winRate = lr.predict_proba(thisPic)[0,1]
if winRate >= 0.5 and winRate <=0.6:
print("해볼만합니다.")
elif winRate <0.5 and winRate >=0.3:
print("팀상태보고 원하면 게임을 미리 포기하시길 바랍니다.")
elif winRate <0.3:
print("게임을 미리 포기하길 추천드립니다.")
else:
print("팀이 잘할 가능성이 매우 높아 보입니다.")
print('우리팀의 승률 : ',lr.predict_proba(thisPic)[0,1]*100,"%")
| 26.833333
| 102
| 0.698041
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 561
| 0.252362
|
83f254257c334bebe3b34129f3e77014a18affa5
| 1,300
|
py
|
Python
|
timeboard.py
|
jtbarker/hiring-engineers
|
cd00fff1bb2be6374fc462891c3bf629e3c3ccb1
|
[
"Apache-2.0"
] | null | null | null |
timeboard.py
|
jtbarker/hiring-engineers
|
cd00fff1bb2be6374fc462891c3bf629e3c3ccb1
|
[
"Apache-2.0"
] | null | null | null |
timeboard.py
|
jtbarker/hiring-engineers
|
cd00fff1bb2be6374fc462891c3bf629e3c3ccb1
|
[
"Apache-2.0"
] | null | null | null |
from datadog import initialize, api
options = {
'api_key': '16ff05c7af6ed4652a20f5a8d0c609ce',
'app_key': 'e6a169b9b337355eef90002878fbf9a565e9ee77'
}
initialize(**options)
title = "Mymetric timeboard"
description = "Mymetric Timeboard"
graphs = [
{
"definition": {
"events": [],
"requests": [
{"q": "avg:mymetric{host:ubuntu-xenial}"}
],
"viz": "timeseries"
},
"title": "mymetric in timeseries"
},
{
"definition": {
"events": [],
"requests": [
{"q": "anomalies(avg:postgres.connections.current{host:ubuntu-xenial}, 'basic', 2)"}
],
"viz": "timeseries"
},
"title": "PostgreSQL connections"
},
{
"definition": {
"events": [],
"requests": [
{"q": "avg:mymetric{host:ubuntu-xenial}.rollup(sum, 3600)"}
],
"viz": "timeseries"
},
"title": "Rollup function mymetric"
},
]
template_variables = [{
"name": "ubuntu_xenial",
"prefix": "host",
"default": "host:my-host"
}]
read_only = True
api.Timeboard.create(title=title,description=description,graphs=graphs,template_variables=template_variables)
| 26
| 109
| 0.529231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 600
| 0.461538
|
83f2cb0d5c42a6b58a56b8a67b072fa321682a58
| 1,104
|
py
|
Python
|
tests/python/rlview/test-run.py
|
JonathanLehner/korali
|
90f97d8e2fed2311f988f39cfe014f23ba7dd6cf
|
[
"MIT"
] | 43
|
2018-07-26T07:20:42.000Z
|
2022-03-02T10:23:12.000Z
|
tests/python/rlview/test-run.py
|
JonathanLehner/korali
|
90f97d8e2fed2311f988f39cfe014f23ba7dd6cf
|
[
"MIT"
] | 212
|
2018-09-21T10:44:07.000Z
|
2022-03-22T14:33:05.000Z
|
tests/python/rlview/test-run.py
|
JonathanLehner/korali
|
90f97d8e2fed2311f988f39cfe014f23ba7dd6cf
|
[
"MIT"
] | 16
|
2018-07-25T15:00:36.000Z
|
2022-03-22T14:19:46.000Z
|
#! /usr/bin/env python3
from subprocess import call
r = call(["python3", "-m", "korali.rlview", "--help"])
if r!=0:
exit(r)
r = call(["python3", "-m", "korali.rlview", "--dir", "abf2d_vracer1", "--test"])
if r!=0:
exit(r)
r = call(["python3", "-m", "korali.rlview", "--dir", "abf2d_vracer1", "--maxObservations", "10000", "--test"])
if r!=0:
exit(r)
r = call(["python3", "-m", "korali.rlview", "--dir", "abf2d_vracer1", "--maxReward", "20.0", "--test"])
if r!=0:
exit(r)
r = call(["python3", "-m", "korali.rlview", "--dir", "abf2d_vracer1", "--minReward", "-1.0", "--test"])
if r!=0:
exit(r)
r = call(["python3", "-m", "korali.rlview", "--dir", "abf2d_vracer1", "--showCI", "0.2", "--test"])
if r!=0:
exit(r)
r = call(["python3", "-m", "korali.rlview", "--dir", "abf2d_vracer1", "--averageDepth", "30", "--test"])
if r!=0:
exit(r)
r = call(["python3", "-m", "korali.rlview", "--dir", "abf2d_vracer1", "abf2d_vracer2", "--test"])
if r!=0:
exit(r)
r = call(["python3", "-m", "korali.rlview", "--dir", "abf2d_vracer1", "--output", "test.png", "--test"])
if r!=0:
exit(r)
exit(0)
| 26.926829
| 110
| 0.548913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 657
| 0.595109
|
83f2f7aa75a2c9e552bb8125bde1278a1b2c932e
| 1,643
|
bzl
|
Python
|
cmake/build_defs.bzl
|
benjaminp/upb
|
901744a97e5170bfdd5b408a26b6603b1fbab9ad
|
[
"BSD-3-Clause"
] | null | null | null |
cmake/build_defs.bzl
|
benjaminp/upb
|
901744a97e5170bfdd5b408a26b6603b1fbab9ad
|
[
"BSD-3-Clause"
] | null | null | null |
cmake/build_defs.bzl
|
benjaminp/upb
|
901744a97e5170bfdd5b408a26b6603b1fbab9ad
|
[
"BSD-3-Clause"
] | null | null | null |
def generated_file_staleness_test(name, outs, generated_pattern):
"""Tests that checked-in file(s) match the contents of generated file(s).
The resulting test will verify that all output files exist and have the
correct contents. If the test fails, it can be invoked with --fix to
bring the checked-in files up to date.
Args:
name: Name of the rule.
outs: the checked-in files that are copied from generated files.
generated_pattern: the pattern for transforming each "out" file into a
generated file. For example, if generated_pattern="generated/%s" then
a file foo.txt will look for generated file generated/foo.txt.
"""
script_name = name + ".py"
script_src = ":staleness_test.py"
# Filter out non-existing rules so Blaze doesn't error out before we even
# run the test.
existing_outs = native.glob(include = outs)
# The file list contains a few extra bits of information at the end.
# These get unpacked by the Config class in staleness_test_lib.py.
file_list = outs + [generated_pattern, native.package_name() or ".", name]
native.genrule(
name = name + "_makescript",
outs = [script_name],
srcs = [script_src],
testonly = 1,
cmd = "cat $(location " + script_src + ") > $@; " +
"sed -i.bak -e 's|INSERT_FILE_LIST_HERE|" + "\\\n ".join(file_list) + "|' $@",
)
native.py_test(
name = name,
srcs = [script_name],
data = existing_outs + [generated_pattern % file for file in outs],
deps = [
":staleness_test_lib",
],
)
| 36.511111
| 93
| 0.634814
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 981
| 0.597079
|
83f32ec218d69fd1e8829338d9a53be2a269009b
| 4,894
|
py
|
Python
|
tests/transform_finding_test.py
|
aws-samples/aws-security-hub-analytic-pipeline
|
3e4242c24297725f656ff8a560ff180604443223
|
[
"MIT-0"
] | 7
|
2021-06-16T00:55:44.000Z
|
2022-02-13T23:00:27.000Z
|
tests/transform_finding_test.py
|
QPC-database/aws-security-hub-analytic-pipeline
|
aff8cd7f5954c285b93fe8f67f8bef2482a1f686
|
[
"MIT-0"
] | null | null | null |
tests/transform_finding_test.py
|
QPC-database/aws-security-hub-analytic-pipeline
|
aff8cd7f5954c285b93fe8f67f8bef2482a1f686
|
[
"MIT-0"
] | 2
|
2021-07-11T02:41:38.000Z
|
2022-03-29T20:34:23.000Z
|
from assets.lambdas.transform_findings.index import TransformFindings
import boto3
from moto import mock_s3
def __make_bucket(bucket_name: str):
bucket = boto3.resource('s3').Bucket(bucket_name)
bucket.create()
return bucket
@mock_s3
def test_fix_dictionary():
bucket = __make_bucket('tester')
transform_findings = TransformFindings(bucket.name)
finding = {
'first/level/test': 'test',
'ProductArn': 'arn:aws:securityhub:us-east-1::product/aws/securityhub',
'Types': ['Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark'],
'Description': 'Security groups provide stateful filtering of ingress/egress network traffic to AWS resources. It is recommended that no security group allows unrestricted ingress access to port 22.',
'SchemaVersion': '2018-10-08',
'Compliance': {'Status': 'PASSED'},
'GeneratorId': 'arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0/rule/4.1',
'FirstObservedAt': '2021-01-31T04:52:30.123Z',
'CreatedAt': '2021-01-31T04:52:30.123Z',
'RecordState': 'ACTIVE',
'Title': '4.1 Ensure no security groups allow ingress from 0.0.0.0/0 to port 22',
'Workflow': {'Status': 'RESOLVED'},
'LastObservedAt': '2021-05-07T11:05:27.353Z',
'Severity': {'Normalized': 0, 'Label': 'INFORMATIONAL', 'Product': 0, 'Original': 'INFORMATIONAL'},
'UpdatedAt': '2021-05-07T11:05:25.775Z',
'FindingProviderFields': {
'Types': [
'Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark'],
'Severity': {'Normalized': 0, 'Label': 'INFORMATIONAL', 'Product': 0, 'Original': 'INFORMATIONAL'}
},
'WorkflowState': 'NEW',
'ProductFields': {
'StandardsGuideArn': 'arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0',
'StandardsGuideSubscriptionArn': 'arn:aws:securityhub:us-east-1:0123456789:subscription/cis-aws-foundations-benchmark/v/1.2.0',
'RuleId': '4.1',
'RecommendationUrl': 'https://docs.aws.amazon.com/console/securityhub/standards-cis-4.1/remediation',
'RelatedAWSResources:0/name': 'securityhub-restricted-ssh-38a80c22',
'RelatedAWSResources:0/type': 'AWS::Config::ConfigRule',
'StandardsControlArn': 'arn:aws:securityhub:us-east-1:0123456789:control/cis-aws-foundations-benchmark/v/1.2.0/4.1',
'aws/securityhub/ProductName': 'Security Hub',
'aws/securityhub/CompanyName': 'AWS',
'aws/securityhub/FindingId': 'arn:aws:securityhub:us-east-1::product/aws/securityhub/arn:aws:securityhub:us-east-1:0123456789:subscription/cis-aws-foundations-benchmark/v/1.2.0/4.1/finding/2a55570b-74e9-4aa3-9f4e-66f515c7ff03'
},
'AwsAccountId': '0123456789',
'Id': 'arn:aws:securityhub:us-east-1:0123456789:subscription/cis-aws-foundations-benchmark/v/1.2.0/4.1/finding/2a55570b-74e9-4aa3-9f4e-66f515c7ff03',
'Remediation': {
'Recommendation': {
'Text': 'For directions on how to fix this issue, please consult the AWS Security Hub CIS documentation.',
'Url': 'https://docs.aws.amazon.com/console/securityhub/standards-cis-4.1/remediation'}
},
'Resources': [{
'Partition': 'aws',
'Type': 'AwsEc2SecurityGroup',
'Details': {
'AwsEc2SecurityGroup': {
'GroupName': 'default',
'OwnerId': '0123456789',
'VpcId': 'vpc-0123456789',
'IpPermissions': [{'IpProtocol': '-1', 'UserIdGroupPairs': [
{'UserId': '0123456789', 'GroupId': 'sg-0123456789'}]}],
'IpPermissionsEgress': [{'IpProtocol': '-1', 'IpRanges': [{'CidrIp': '0.0.0.0/0'}]}],
'GroupId': 'sg-0123456789'}
},
'Region': 'us-east-1', 'Id': 'arn:aws:ec2:us-east-1:0123456789:security-group/sg-0123456789'
}]
}
result = transform_findings.fix_dictionary(finding)
assert isinstance(result, dict)
assert 'first/level/test' not in result
assert 'first_level_test' in result
assert 'ProductFields' in result
assert 'aws/securityhub/ProductName' not in result['ProductFields']
assert 'aws_securityhub_ProductName' in result['ProductFields']
assert 'aws/securityhub/CompanyName' not in result['ProductFields']
assert 'aws_securityhub_CompanyName' in result['ProductFields']
assert 'aws/securityhub/FindingId' not in result['ProductFields']
assert 'aws_securityhub_FindingId' in result['ProductFields']
assert 'RelatedAWSResources:0/name' not in result['ProductFields']
assert 'RelatedAWSResources_0_name' in result['ProductFields']
| 56.252874
| 238
| 0.647323
| 0
| 0
| 0
| 0
| 4,653
| 0.950756
| 0
| 0
| 3,216
| 0.657131
|
83f41e9d7d2619c0ed48dbceaafa749c11834cc5
| 1,431
|
py
|
Python
|
Breeze18/Breeze/migrations/0006_auto_20180110_2205.py
|
Breeze18/Breeze
|
4215776e2f02fab3ce357e67b3b6ca378742049c
|
[
"Apache-2.0"
] | null | null | null |
Breeze18/Breeze/migrations/0006_auto_20180110_2205.py
|
Breeze18/Breeze
|
4215776e2f02fab3ce357e67b3b6ca378742049c
|
[
"Apache-2.0"
] | 1
|
2017-11-09T13:07:24.000Z
|
2018-01-29T04:31:26.000Z
|
Breeze18/Breeze/migrations/0006_auto_20180110_2205.py
|
Breeze18/Breeze
|
4215776e2f02fab3ce357e67b3b6ca378742049c
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2018-01-10 16:35
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('Breeze', '0005_forgetpass'),
]
operations = [
migrations.RemoveField(
model_name='registration',
name='transaction_id',
),
migrations.AddField(
model_name='registration',
name='college',
field=models.CharField(default='', max_length=200),
),
migrations.AddField(
model_name='registration',
name='payable',
field=models.DecimalField(decimal_places=2, max_digits=8, null=True),
),
migrations.AddField(
model_name='registration',
name='registration_id',
field=models.CharField(default='', max_length=200, unique=True),
),
migrations.AddField(
model_name='registration',
name='transaction_status',
field=models.CharField(default='Unpaid', max_length=200),
),
migrations.AlterField(
model_name='registration',
name='userId',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
| 30.446809
| 110
| 0.600978
| 1,207
| 0.843466
| 0
| 0
| 0
| 0
| 0
| 0
| 271
| 0.189378
|
83f4f90a2f2418b0454a8f8ffca04dc4c58e2aca
| 25,414
|
py
|
Python
|
plugin.video.rebirth/resources/lib/modules/libtools.py
|
TheWardoctor/wardoctors-repo
|
893f646d9e27251ffc00ca5f918e4eb859a5c8f0
|
[
"Apache-2.0"
] | 1
|
2019-03-05T09:38:10.000Z
|
2019-03-05T09:38:10.000Z
|
plugin.video.rebirth/resources/lib/modules/libtools.py
|
TheWardoctor/wardoctors-repo
|
893f646d9e27251ffc00ca5f918e4eb859a5c8f0
|
[
"Apache-2.0"
] | null | null | null |
plugin.video.rebirth/resources/lib/modules/libtools.py
|
TheWardoctor/wardoctors-repo
|
893f646d9e27251ffc00ca5f918e4eb859a5c8f0
|
[
"Apache-2.0"
] | 1
|
2021-11-05T20:48:09.000Z
|
2021-11-05T20:48:09.000Z
|
# -*- coding: utf-8 -*-
################################################################################
# | #
# | ______________________________________________________________ #
# | :~8a.`~888a:::::::::::::::88......88:::::::::::::::;a8~".a88::| #
# | ::::~8a.`~888a::::::::::::88......88::::::::::::;a8~".a888~:::| #
# | :::::::~8a.`~888a:::::::::88......88:::::::::;a8~".a888~::::::| #
# | ::::::::::~8a.`~888a::::::88......88::::::;a8~".a888~:::::::::| #
# | :::::::::::::~8a.`~888a:::88......88:::;a8~".a888~::::::::::::| #
# | :::::::::::: :~8a.`~888a:88 .....88;a8~".a888~:::::::::::::::| #
# | :::::::::::::::::::~8a.`~888......88~".a888~::::::::::::::::::| #
# | 8888888888888888888888888888......8888888888888888888888888888| #
# | ..............................................................| #
# | ..............................................................| #
# | 8888888888888888888888888888......8888888888888888888888888888| #
# | ::::::::::::::::::a888~".a88......888a."~8;:::::::::::::::::::| #
# | :::::::::::::::a888~".a8~:88......88~888a."~8;::::::::::::::::| #
# | ::::::::::::a888~".a8~::::88......88:::~888a."~8;:::::::::::::| #
# | :::::::::a888~".a8~:::::::88......88::::::~888a."~8;::::::::::| #
# | ::::::a888~".a8~::::::::::88......88:::::::::~888a."~8;:::::::| #
# | :::a888~".a8~:::::::::::::88......88::::::::::::~888a."~8;::::| #
# | a888~".a8~::::::::::::::::88......88:::::::::::::::~888a."~8;:| #
# | #
# | Rebirth Addon #
# | Copyright (C) 2017 Cypher #
# | #
# | This program is free software: you can redistribute it and/or modify #
# | it under the terms of the GNU General Public License as published by #
# | the Free Software Foundation, either version 3 of the License, or #
# | (at your option) any later version. #
# | #
# | This program is distributed in the hope that it will be useful, #
# | but WITHOUT ANY WARRANTY; without even the implied warranty of #
# | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# | GNU General Public License for more details. #
# | #
################################################################################
try:
from sqlite3 import dbapi2 as database
except:
from pysqlite2 import dbapi2 as database
import datetime
import json
import os
import re
import sys
import urllib
import urlparse
import xbmc
from resources.lib.modules import control
from resources.lib.modules import cleantitle
class lib_tools:
@staticmethod
def create_folder(folder):
try:
folder = xbmc.makeLegalFilename(folder)
control.makeFile(folder)
try:
if not 'ftp://' in folder: raise Exception()
from ftplib import FTP
ftparg = re.compile('ftp://(.+?):(.+?)@(.+?):?(\d+)?/(.+/?)').findall(folder)
ftp = FTP(ftparg[0][2], ftparg[0][0], ftparg[0][1])
try:
ftp.cwd(ftparg[0][4])
except:
ftp.mkd(ftparg[0][4])
ftp.quit()
except:
pass
except:
pass
@staticmethod
def write_file(path, content):
try:
path = xbmc.makeLegalFilename(path)
if not isinstance(content, basestring):
content = str(content)
file = control.openFile(path, 'w')
file.write(str(content))
file.close()
except Exception as e:
pass
@staticmethod
def nfo_url(media_string, ids):
tvdb_url = 'http://thetvdb.com/?tab=series&id=%s'
tmdb_url = 'https://www.themoviedb.org/%s/%s'
imdb_url = 'http://www.imdb.com/title/%s/'
if 'tvdb' in ids:
return tvdb_url % (str(ids['tvdb']))
elif 'tmdb' in ids:
return tmdb_url % (media_string, str(ids['tmdb']))
elif 'imdb' in ids:
return imdb_url % (str(ids['imdb']))
else:
return ''
@staticmethod
def check_sources(title, year, imdb, tvdb=None, season=None, episode=None, tvshowtitle=None, premiered=None):
try:
from resources.lib.modules import sources
src = sources.sources().getSources(title, year, imdb, tvdb, season, episode, tvshowtitle, premiered)
return src and len(src) > 5
except:
return False
@staticmethod
def legal_filename(filename):
try:
filename = filename.strip()
filename = re.sub(r'(?!%s)[^\w\-_\.]', '.', filename)
filename = re.sub('\.+', '.', filename)
filename = re.sub(re.compile('(CON|PRN|AUX|NUL|COM\d|LPT\d)\.', re.I), '\\1_', filename)
xbmc.makeLegalFilename(filename)
return filename
except:
return filename
@staticmethod
def make_path(base_path, title, year='', season=''):
show_folder = re.sub(r'[^\w\-_\. ]', '_', title)
show_folder = '%s (%s)' % (show_folder, year) if year else show_folder
path = os.path.join(base_path, show_folder)
if season:
path = os.path.join(path, 'Season %s' % season)
return path
class libmovies:
def __init__(self):
self.library_folder = os.path.join(control.transPath(control.setting('library.movie')), '')
self.check_setting = control.setting('library.check_movie') or 'false'
self.library_setting = control.setting('library.update') or 'true'
self.dupe_setting = control.setting('library.check') or 'true'
self.infoDialog = False
def add(self, name, title, year, imdb, tmdb, range=False):
if not control.condVisibility('Window.IsVisible(infodialog)') and not control.condVisibility('Player.HasVideo'):
control.infoDialog(control.lang(32552).encode('utf-8'), time=10000000)
self.infoDialog = True
try:
if not self.dupe_setting == 'true': raise Exception()
id = [imdb, tmdb] if not tmdb == '0' else [imdb]
lib = control.jsonrpc('{"jsonrpc": "2.0", "method": "VideoLibrary.GetMovies", "params": {"filter":{"or": [{"field": "year", "operator": "is", "value": "%s"}, {"field": "year", "operator": "is", "value": "%s"}, {"field": "year", "operator": "is", "value": "%s"}]}, "properties" : ["imdbnumber", "originaltitle", "year"]}, "id": 1}' % (year, str(int(year)+1), str(int(year)-1)))
lib = unicode(lib, 'utf-8', errors='ignore')
lib = json.loads(lib)['result']['movies']
lib = [i for i in lib if str(i['imdbnumber']) in id or (i['originaltitle'].encode('utf-8') == title and str(i['year']) == year)][0]
except:
lib = []
files_added = 0
try:
if not lib == []: raise Exception()
if self.check_setting == 'true':
src = lib_tools.check_sources(title, year, imdb, None, None, None, None, None)
if not src: raise Exception()
self.strmFile({'name': name, 'title': title, 'year': year, 'imdb': imdb, 'tmdb': tmdb})
files_added += 1
except:
pass
if range == True: return
if self.infoDialog == True:
control.infoDialog(control.lang(32554).encode('utf-8'), time=1)
if self.library_setting == 'true' and not control.condVisibility('Library.IsScanningVideo') and files_added > 0:
control.execute('UpdateLibrary(video)')
def range(self, url):
control.idle()
yes = control.yesnoDialog(control.lang(32555).encode('utf-8'), '', '')
if not yes: return
if not control.condVisibility('Window.IsVisible(infodialog)') and not control.condVisibility('Player.HasVideo'):
control.infoDialog(control.lang(32552).encode('utf-8'), time=10000000)
self.infoDialog = True
from resources.lib.indexers import movies
items = movies.movies().get(url, idx=False)
if items == None: items = []
for i in items:
try:
if xbmc.abortRequested == True: return sys.exit()
self.add('%s (%s)' % (i['title'], i['year']), i['title'], i['year'], i['imdb'], i['tmdb'], range=True)
except:
pass
if self.infoDialog == True:
control.infoDialog(control.lang(32554).encode('utf-8'), time=1)
if self.library_setting == 'true' and not control.condVisibility('Library.IsScanningVideo'):
control.execute('UpdateLibrary(video)')
def strmFile(self, i):
try:
name, title, year, imdb, tmdb = i['name'], i['title'], i['year'], i['imdb'], i['tmdb']
sysname, systitle = urllib.quote_plus(name), urllib.quote_plus(title)
transtitle = cleantitle.normalize(title.translate(None, '\/:*?"<>|'))
content = '%s?action=play&name=%s&title=%s&year=%s&imdb=%s&tmdb=%s' % (sys.argv[0], sysname, systitle, year, imdb, tmdb)
folder = lib_tools.make_path(self.library_folder, transtitle, year)
lib_tools.create_folder(folder)
lib_tools.write_file(os.path.join(folder, lib_tools.legal_filename(transtitle) + '.strm'), content)
lib_tools.write_file(os.path.join(folder, 'movie.nfo'), lib_tools.nfo_url('movie', i))
except:
pass
class libtvshows:
def __init__(self):
self.library_folder = os.path.join(control.transPath(control.setting('library.tv')),'')
self.version = control.version()
self.check_setting = control.setting('library.check_episode') or 'false'
self.include_unknown = control.setting('library.include_unknown') or 'true'
self.library_setting = control.setting('library.update') or 'true'
self.dupe_setting = control.setting('library.check') or 'true'
self.datetime = (datetime.datetime.utcnow() - datetime.timedelta(hours = 5))
self.date = (self.datetime - datetime.timedelta(hours = 24)).strftime('%Y%m%d')
self.infoDialog = False
self.block = False
def add(self, tvshowtitle, year, imdb, tvdb, range=False):
if not control.condVisibility('Window.IsVisible(infodialog)') and not control.condVisibility('Player.HasVideo'):
control.infoDialog(control.lang(32552).encode('utf-8'), time=10000000)
self.infoDialog = True
from resources.lib.indexers import episodes
items = episodes.episodes().get(tvshowtitle, year, imdb, tvdb, idx=False)
try: items = [{'title': i['title'], 'year': i['year'], 'imdb': i['imdb'], 'tvdb': i['tvdb'], 'season': i['season'], 'episode': i['episode'], 'tvshowtitle': i['tvshowtitle'], 'premiered': i['premiered']} for i in items]
except: items = []
try:
if not self.dupe_setting == 'true': raise Exception()
if items == []: raise Exception()
id = [items[0]['imdb'], items[0]['tvdb']]
lib = control.jsonrpc('{"jsonrpc": "2.0", "method": "VideoLibrary.GetTVShows", "params": {"properties" : ["imdbnumber", "title", "year"]}, "id": 1}')
lib = unicode(lib, 'utf-8', errors='ignore')
lib = json.loads(lib)['result']['tvshows']
lib = [i['title'].encode('utf-8') for i in lib if str(i['imdbnumber']) in id or (i['title'].encode('utf-8') == items[0]['tvshowtitle'] and str(i['year']) == items[0]['year'])][0]
lib = control.jsonrpc('{"jsonrpc": "2.0", "method": "VideoLibrary.GetEpisodes", "params": {"filter":{"and": [{"field": "tvshow", "operator": "is", "value": "%s"}]}, "properties": ["season", "episode"]}, "id": 1}' % lib)
lib = unicode(lib, 'utf-8', errors='ignore')
lib = json.loads(lib)['result']['episodes']
lib = ['S%02dE%02d' % (int(i['season']), int(i['episode'])) for i in lib]
items = [i for i in items if not 'S%02dE%02d' % (int(i['season']), int(i['episode'])) in lib]
except:
pass
files_added = 0
for i in items:
try:
if xbmc.abortRequested == True: return sys.exit()
if self.check_setting == 'true':
if i['episode'] == '1':
self.block = True
src = lib_tools.check_sources(i['title'], i['year'], i['imdb'], i['tvdb'], i['season'], i['episode'], i['tvshowtitle'], i['premiered'])
if src: self.block = False
if self.block == True: raise Exception()
premiered = i.get('premiered', '0')
if (premiered != '0' and int(re.sub('[^0-9]', '', str(premiered))) > int(self.date)) or (premiered == '0' and not self.include_unknown):
continue
self.strmFile(i)
files_added += 1
except:
pass
if range == True: return
if self.infoDialog == True:
control.infoDialog(control.lang(32554).encode('utf-8'), time=1)
if self.library_setting == 'true' and not control.condVisibility('Library.IsScanningVideo') and files_added > 0:
control.execute('UpdateLibrary(video)')
def range(self, url):
control.idle()
yes = control.yesnoDialog(control.lang(32555).encode('utf-8'), '', '')
if not yes: return
if not control.condVisibility('Window.IsVisible(infodialog)') and not control.condVisibility('Player.HasVideo'):
control.infoDialog(control.lang(32552).encode('utf-8'), time=10000000)
self.infoDialog = True
from resources.lib.indexers import tvshows
items = tvshows.tvshows().get(url, idx=False)
if items == None: items = []
for i in items:
try:
if xbmc.abortRequested == True: return sys.exit()
self.add(i['title'], i['year'], i['imdb'], i['tvdb'], range=True)
except:
pass
if self.infoDialog == True:
control.infoDialog(control.lang(32554).encode('utf-8'), time=1)
if self.library_setting == 'true' and not control.condVisibility('Library.IsScanningVideo'):
control.execute('UpdateLibrary(video)')
def strmFile(self, i):
try:
title, year, imdb, tvdb, season, episode, tvshowtitle, premiered = i['title'], i['year'], i['imdb'], i['tvdb'], i['season'], i['episode'], i['tvshowtitle'], i['premiered']
episodetitle = urllib.quote_plus(title)
systitle, syspremiered = urllib.quote_plus(tvshowtitle), urllib.quote_plus(premiered)
transtitle = cleantitle.normalize(tvshowtitle.translate(None, '\/:*?"<>|'))
content = '%s?action=play&title=%s&year=%s&imdb=%s&tvdb=%s&season=%s&episode=%s&tvshowtitle=%s&date=%s' % (sys.argv[0], episodetitle, year, imdb, tvdb, season, episode, systitle, syspremiered)
folder = lib_tools.make_path(self.library_folder, transtitle, year)
lib_tools.create_folder(folder)
lib_tools.write_file(os.path.join(folder, 'tvshow.nfo'), lib_tools.nfo_url('tv', i))
folder = lib_tools.make_path(self.library_folder, transtitle, year, season)
lib_tools.create_folder(folder)
lib_tools.write_file(os.path.join(folder, lib_tools.legal_filename('%s S%02dE%02d' % (transtitle, int(season), int(episode))) + '.strm'), content)
except:
pass
class libepisodes:
def __init__(self):
self.library_folder = os.path.join(control.transPath(control.setting('library.tv')),'')
self.library_setting = control.setting('library.update') or 'true'
self.include_unknown = control.setting('library.include_unknown') or 'true'
self.property = '%s_service_property' % control.addonInfo('name').lower()
self.datetime = (datetime.datetime.utcnow() - datetime.timedelta(hours = 5))
self.date = (self.datetime - datetime.timedelta(hours = 24)).strftime('%Y%m%d')
self.infoDialog = False
def update(self, query=None, info='true'):
if not query == None: control.idle()
try:
items = []
season, episode = [], []
show = [os.path.join(self.library_folder, i) for i in control.listDir(self.library_folder)[0]]
for s in show:
try: season += [os.path.join(s, i) for i in control.listDir(s)[0]]
except: pass
for s in season:
try: episode.append([os.path.join(s, i) for i in control.listDir(s)[1] if i.endswith('.strm')][-1])
except: pass
for file in episode:
try:
file = control.openFile(file)
read = file.read()
read = read.encode('utf-8')
file.close()
if not read.startswith(sys.argv[0]): raise Exception()
params = dict(urlparse.parse_qsl(read.replace('?','')))
try: tvshowtitle = params['tvshowtitle']
except: tvshowtitle = None
try: tvshowtitle = params['show']
except: pass
if tvshowtitle == None or tvshowtitle == '': raise Exception()
year, imdb, tvdb = params['year'], params['imdb'], params['tvdb']
imdb = 'tt' + re.sub('[^0-9]', '', str(imdb))
try: tmdb = params['tmdb']
except: tmdb = '0'
items.append({'tvshowtitle': tvshowtitle, 'year': year, 'imdb': imdb, 'tmdb': tmdb, 'tvdb': tvdb})
except:
pass
items = [i for x, i in enumerate(items) if i not in items[x + 1:]]
if len(items) == 0: raise Exception()
except:
return
try:
lib = control.jsonrpc('{"jsonrpc": "2.0", "method": "VideoLibrary.GetTVShows", "params": {"properties" : ["imdbnumber", "title", "year"]}, "id": 1}')
lib = unicode(lib, 'utf-8', errors='ignore')
lib = json.loads(lib)['result']['tvshows']
except:
return
if info == 'true' and not control.condVisibility('Window.IsVisible(infodialog)') and not control.condVisibility('Player.HasVideo'):
control.infoDialog(control.lang(32553).encode('utf-8'), time=10000000)
self.infoDialog = True
try:
control.makeFile(control.dataPath)
dbcon = database.connect(control.libcacheFile)
dbcur = dbcon.cursor()
dbcur.execute("CREATE TABLE IF NOT EXISTS tvshows (""id TEXT, ""items TEXT, ""UNIQUE(id)"");")
except:
return
try:
from resources.lib.indexers import episodes
except:
return
files_added = 0
for item in items:
it = None
if xbmc.abortRequested == True: return sys.exit()
try:
dbcur.execute("SELECT * FROM tvshows WHERE id = '%s'" % item['tvdb'])
fetch = dbcur.fetchone()
it = eval(fetch[1].encode('utf-8'))
except:
pass
try:
if not it == None: raise Exception()
it = episodes.episodes().get(item['tvshowtitle'], item['year'], item['imdb'], item['tvdb'], idx=False)
status = it[0]['status'].lower()
it = [{'title': i['title'], 'year': i['year'], 'imdb': i['imdb'], 'tvdb': i['tvdb'], 'season': i['season'], 'episode': i['episode'], 'tvshowtitle': i['tvshowtitle'], 'premiered': i['premiered']} for i in it]
if status == 'continuing': raise Exception()
dbcur.execute("INSERT INTO tvshows Values (?, ?)", (item['tvdb'], repr(it)))
dbcon.commit()
except:
pass
try:
id = [item['imdb'], item['tvdb']]
if not item['tmdb'] == '0': id += [item['tmdb']]
ep = [x['title'].encode('utf-8') for x in lib if str(x['imdbnumber']) in id or (x['title'].encode('utf-8') == item['tvshowtitle'] and str(x['year']) == item['year'])][0]
ep = control.jsonrpc('{"jsonrpc": "2.0", "method": "VideoLibrary.GetEpisodes", "params": {"filter":{"and": [{"field": "tvshow", "operator": "is", "value": "%s"}]}, "properties": ["season", "episode"]}, "id": 1}' % ep)
ep = unicode(ep, 'utf-8', errors='ignore')
ep = json.loads(ep).get('result', {}).get('episodes', {})
ep = [{'season': int(i['season']), 'episode': int(i['episode'])} for i in ep]
ep = sorted(ep, key=lambda x: (x['season'], x['episode']))[-1]
num = [x for x,y in enumerate(it) if str(y['season']) == str(ep['season']) and str(y['episode']) == str(ep['episode'])][-1]
it = [y for x,y in enumerate(it) if x > num]
if len(it) == 0: continue
except:
continue
for i in it:
try:
if xbmc.abortRequested == True: return sys.exit()
premiered = i.get('premiered', '0')
if (premiered != '0' and int(re.sub('[^0-9]', '', str(premiered))) > int(self.date)) or (premiered == '0' and not self.include_unknown):
continue
libtvshows().strmFile(i)
files_added += 1
except:
pass
if self.infoDialog == True:
control.infoDialog(control.lang(32554).encode('utf-8'), time=1)
if self.library_setting == 'true' and not control.condVisibility('Library.IsScanningVideo') and files_added > 0:
control.execute('UpdateLibrary(video)')
def service(self):
try:
lib_tools.create_folder(os.path.join(control.transPath(control.setting('library.movie')), ''))
lib_tools.create_folder(os.path.join(control.transPath(control.setting('library.tv')), ''))
except:
pass
try:
control.makeFile(control.dataPath)
dbcon = database.connect(control.libcacheFile)
dbcur = dbcon.cursor()
dbcur.execute("CREATE TABLE IF NOT EXISTS service (""setting TEXT, ""value TEXT, ""UNIQUE(setting)"");")
dbcur.execute("SELECT * FROM service WHERE setting = 'last_run'")
fetch = dbcur.fetchone()
if fetch == None:
serviceProperty = "1970-01-01 23:59:00.000000"
dbcur.execute("INSERT INTO service Values (?, ?)", ('last_run', serviceProperty))
dbcon.commit()
else:
serviceProperty = str(fetch[1])
dbcon.close()
except:
try: return dbcon.close()
except: return
try: control.window.setProperty(self.property, serviceProperty)
except: return
while not xbmc.abortRequested:
try:
serviceProperty = control.window.getProperty(self.property)
t1 = datetime.timedelta(hours=6)
t2 = datetime.datetime.strptime(serviceProperty, '%Y-%m-%d %H:%M:%S.%f')
t3 = datetime.datetime.now()
check = abs(t3 - t2) > t1
if check == False: raise Exception()
if (control.player.isPlaying() or control.condVisibility('Library.IsScanningVideo')): raise Exception()
serviceProperty = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')
control.window.setProperty(self.property, serviceProperty)
try:
dbcon = database.connect(control.libcacheFile)
dbcur = dbcon.cursor()
dbcur.execute("CREATE TABLE IF NOT EXISTS service (""setting TEXT, ""value TEXT, ""UNIQUE(setting)"");")
dbcur.execute("DELETE FROM service WHERE setting = 'last_run'")
dbcur.execute("INSERT INTO service Values (?, ?)", ('last_run', serviceProperty))
dbcon.commit()
dbcon.close()
except:
try: dbcon.close()
except: pass
if not control.setting('library.service.update') == 'true': raise Exception()
info = control.setting('library.service.notification') or 'true'
self.update(info=info)
except:
pass
control.sleep(10000)
| 43.666667
| 388
| 0.517235
| 22,168
| 0.872275
| 0
| 0
| 2,687
| 0.105729
| 0
| 0
| 7,344
| 0.288975
|
83f68d37160408c53d642878608195421084467e
| 9,670
|
py
|
Python
|
frappe/website/page_renderers/template_page.py
|
sersaber/frappe
|
e0c25d2b2c19fe79f7c7848e7307d90a5f27c68a
|
[
"MIT"
] | null | null | null |
frappe/website/page_renderers/template_page.py
|
sersaber/frappe
|
e0c25d2b2c19fe79f7c7848e7307d90a5f27c68a
|
[
"MIT"
] | null | null | null |
frappe/website/page_renderers/template_page.py
|
sersaber/frappe
|
e0c25d2b2c19fe79f7c7848e7307d90a5f27c68a
|
[
"MIT"
] | null | null | null |
import io
import os
import click
import frappe
from frappe.website.page_renderers.base_template_page import BaseTemplatePage
from frappe.website.router import get_base_template, get_page_info
from frappe.website.utils import (
cache_html,
extract_comment_tag,
extract_title,
get_frontmatter,
get_next_link,
get_sidebar_items,
get_toc,
is_binary_file,
)
WEBPAGE_PY_MODULE_PROPERTIES = (
"base_template_path",
"template",
"no_cache",
"sitemap",
"condition_field",
)
COMMENT_PROPERTY_KEY_VALUE_MAP = {
"no-breadcrumbs": ("no_breadcrumbs", 1),
"show-sidebar": ("show_sidebar", 1),
"add-breadcrumbs": ("add_breadcrumbs", 1),
"no-header": ("no_header", 1),
"add-next-prev-links": ("add_next_prev_links", 1),
"no-cache": ("no_cache", 1),
"no-sitemap": ("sitemap", 0),
"sitemap": ("sitemap", 1),
}
class TemplatePage(BaseTemplatePage):
def __init__(self, path, http_status_code=None):
super().__init__(path=path, http_status_code=http_status_code)
self.set_template_path()
def set_template_path(self):
"""
Searches for file matching the path in the /www
and /templates/pages folders and sets path if match is found
"""
folders = get_start_folders()
for app in frappe.get_installed_apps(frappe_last=True):
app_path = frappe.get_app_path(app)
for dirname in folders:
search_path = os.path.join(app_path, dirname, self.path)
for file_path in self.get_index_path_options(search_path):
if os.path.isfile(file_path) and not is_binary_file(file_path):
self.app = app
self.app_path = app_path
self.file_dir = dirname
self.basename = os.path.splitext(file_path)[0]
self.template_path = os.path.relpath(file_path, self.app_path)
self.basepath = os.path.dirname(file_path)
self.filename = os.path.basename(file_path)
self.name = os.path.splitext(self.filename)[0]
return
def can_render(self):
return hasattr(self, "template_path") and bool(self.template_path)
@staticmethod
def get_index_path_options(search_path):
return (
frappe.as_unicode(f"{search_path}{d}") for d in ("", ".html", ".md", "/index.html", "/index.md")
)
def render(self):
html = self.get_html()
html = self.add_csrf_token(html)
return self.build_response(html)
@cache_html
def get_html(self):
# context object should be separate from self for security
# because it will be accessed via the user defined template
self.init_context()
self.set_pymodule()
self.update_context()
self.setup_template_source()
self.load_colocated_files()
self.set_properties_from_source()
self.post_process_context()
html = self.render_template()
html = self.update_toc(html)
return html
def post_process_context(self):
self.set_user_info()
self.add_sidebar_and_breadcrumbs()
super(TemplatePage, self).post_process_context()
def add_sidebar_and_breadcrumbs(self):
if self.basepath:
self.context.sidebar_items = get_sidebar_items(self.context.website_sidebar, self.basepath)
if self.context.add_breadcrumbs and not self.context.parents:
parent_path = os.path.dirname(self.path)
if self.path.endswith("index"):
# in case of index page move one directory up for parent path
parent_path = os.path.dirname(parent_path)
for parent_file_path in self.get_index_path_options(parent_path):
parent_file_path = os.path.join(self.app_path, self.file_dir, parent_file_path)
if os.path.isfile(parent_file_path):
parent_page_context = get_page_info(parent_file_path, self.app, self.file_dir)
if parent_page_context:
self.context.parents = [
dict(route=os.path.dirname(self.path), title=parent_page_context.title)
]
break
def set_pymodule(self):
"""
A template may have a python module with a `get_context` method along with it in the
same folder. Also the hyphens will be coverted to underscore for python module names.
This method sets the pymodule_name if it exists.
"""
template_basepath = os.path.splitext(self.template_path)[0]
self.pymodule_name = None
# replace - with _ in the internal modules names
self.pymodule_path = os.path.join(
os.path.dirname(template_basepath),
os.path.basename(template_basepath.replace("-", "_")) + ".py",
)
if os.path.exists(os.path.join(self.app_path, self.pymodule_path)):
self.pymodule_name = self.app + "." + self.pymodule_path.replace(os.path.sep, ".")[:-3]
def setup_template_source(self):
"""Setup template source, frontmatter and markdown conversion"""
self.source = self.get_raw_template()
self.extract_frontmatter()
self.convert_from_markdown()
def update_context(self):
self.set_page_properties()
self.context.build_version = frappe.utils.get_build_version()
if self.pymodule_name:
self.pymodule = frappe.get_module(self.pymodule_name)
self.set_pymodule_properties()
data = self.run_pymodule_method("get_context")
# some methods may return a "context" object
if data:
self.context.update(data)
# TODO: self.context.children = self.run_pymodule_method('get_children')
self.context.developer_mode = frappe.conf.developer_mode
if self.context.http_status_code:
self.http_status_code = self.context.http_status_code
def set_pymodule_properties(self):
for prop in WEBPAGE_PY_MODULE_PROPERTIES:
if hasattr(self.pymodule, prop):
self.context[prop] = getattr(self.pymodule, prop)
def set_page_properties(self):
self.context.base_template = self.context.base_template or get_base_template(self.path)
self.context.basepath = self.basepath
self.context.basename = self.basename
self.context.name = self.name
self.context.path = self.path
self.context.route = self.path
self.context.template = self.template_path
def set_properties_from_source(self):
if not self.source:
return
context = self.context
if not context.title:
context.title = extract_title(self.source, self.path)
base_template = extract_comment_tag(self.source, "base_template")
if base_template:
context.base_template = base_template
if (
context.base_template
and "{%- extends" not in self.source
and "{% extends" not in self.source
and "</body>" not in self.source
):
self.source = """{{% extends "{0}" %}}
{{% block page_content %}}{1}{{% endblock %}}""".format(
context.base_template, self.source
)
self.set_properties_via_comments()
def set_properties_via_comments(self):
for comment, (context_key, value) in COMMENT_PROPERTY_KEY_VALUE_MAP.items():
comment_tag = f"<!-- {comment} -->"
if comment_tag in self.source:
self.context[context_key] = value
click.echo(f"\n⚠️ DEPRECATION WARNING: {comment_tag} will be deprecated on 2021-12-31.")
click.echo(f"Please remove it from {self.template_path} in {self.app}")
def run_pymodule_method(self, method_name):
if hasattr(self.pymodule, method_name):
import inspect
method = getattr(self.pymodule, method_name)
if inspect.getfullargspec(method).args:
return method(self.context)
else:
return method()
def render_template(self):
if self.template_path.endswith("min.js"):
html = self.source # static
else:
if self.context.safe_render is not None:
safe_render = self.context.safe_render
else:
safe_render = True
html = frappe.render_template(self.source, self.context, safe_render=safe_render)
return html
def extends_template(self):
return self.template_path.endswith((".html", ".md")) and (
"{%- extends" in self.source or "{% extends" in self.source
)
def get_raw_template(self):
return frappe.get_jloader().get_source(frappe.get_jenv(), self.context.template)[0]
def load_colocated_files(self):
"""load co-located css/js files with the same name"""
js_path = self.basename + ".js"
if os.path.exists(js_path) and "{% block script %}" not in self.source:
self.context.colocated_js = self.get_colocated_file(js_path)
css_path = self.basename + ".css"
if os.path.exists(css_path) and "{% block style %}" not in self.source:
self.context.colocated_css = self.get_colocated_file(css_path)
def get_colocated_file(self, path):
with io.open(path, "r", encoding="utf-8") as f:
return f.read()
def extract_frontmatter(self):
if not self.template_path.endswith((".md", ".html")):
return
try:
# values will be used to update self
res = get_frontmatter(self.source)
if res["attributes"]:
self.context.update(res["attributes"])
self.source = res["body"]
except Exception:
pass
def convert_from_markdown(self):
if self.template_path.endswith(".md"):
self.source = frappe.utils.md_to_html(self.source)
self.context.page_toc_html = self.source.toc_html
if not self.context.show_sidebar:
self.source = '<div class="from-markdown">' + self.source + "</div>"
def update_toc(self, html):
if "{index}" in html:
html = html.replace("{index}", get_toc(self.path))
if "{next}" in html:
html = html.replace("{next}", get_next_link(self.path))
return html
def set_standard_path(self, path):
self.app = "frappe"
self.app_path = frappe.get_app_path("frappe")
self.path = path
self.template_path = "www/{path}.html".format(path=path)
def set_missing_values(self):
super().set_missing_values()
# for backward compatibility
self.context.docs_base_url = "/docs"
def set_user_info(self):
from frappe.utils.user import get_fullname_and_avatar
info = get_fullname_and_avatar(frappe.session.user)
self.context["fullname"] = info.fullname
self.context["user_image"] = info.avatar
self.context["user"] = info.name
def get_start_folders():
return frappe.local.flags.web_pages_folders or ("www", "templates/pages")
| 31.193548
| 99
| 0.729369
| 8,754
| 0.9049
| 0
| 0
| 600
| 0.062022
| 0
| 0
| 1,880
| 0.194335
|
83f948132b34592da621aabbd2b53119e725f6d1
| 17,013
|
py
|
Python
|
mistral/db/v2/sqlalchemy/models.py
|
mail2nsrajesh/mistral
|
b19d87141563e00f18cd74c685392d0b9b70e351
|
[
"Apache-2.0"
] | null | null | null |
mistral/db/v2/sqlalchemy/models.py
|
mail2nsrajesh/mistral
|
b19d87141563e00f18cd74c685392d0b9b70e351
|
[
"Apache-2.0"
] | null | null | null |
mistral/db/v2/sqlalchemy/models.py
|
mail2nsrajesh/mistral
|
b19d87141563e00f18cd74c685392d0b9b70e351
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015 - Mirantis, Inc.
# Copyright 2015 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import hashlib
import json
import sqlalchemy as sa
from sqlalchemy import event
from sqlalchemy.orm import backref
from sqlalchemy.orm import relationship
import sys
from oslo_config import cfg
from oslo_log import log as logging
from mistral.db.sqlalchemy import model_base as mb
from mistral.db.sqlalchemy import types as st
from mistral import exceptions as exc
from mistral.services import security
from mistral import utils
# Definition objects.
LOG = logging.getLogger(__name__)
def _get_hash_function_by(column_name):
def calc_hash(context):
val = context.current_parameters[column_name] or {}
if isinstance(val, dict):
# If the value is a dictionary we need to make sure to have
# keys in the same order in a string representation.
hash_base = json.dumps(sorted(val.items()))
else:
hash_base = str(val)
return hashlib.sha256(hash_base.encode('utf-8')).hexdigest()
return calc_hash
def validate_long_type_length(cls, field_name, value):
"""Makes sure the value does not exceeds the maximum size."""
if value:
# Get the configured limit.
size_limit_kb = cfg.CONF.engine.execution_field_size_limit_kb
# If the size is unlimited.
if size_limit_kb < 0:
return
size_kb = int(sys.getsizeof(str(value)) / 1024)
if size_kb > size_limit_kb:
LOG.error(
"Size limit %dKB exceed for class [%s], "
"field %s of size %dKB.",
size_limit_kb, str(cls), field_name, size_kb
)
raise exc.SizeLimitExceededException(
field_name,
size_kb,
size_limit_kb
)
def register_length_validator(attr_name):
"""Register an event listener on the attribute.
This event listener will validate the size every
time a 'set' occurs.
"""
for cls in utils.iter_subclasses(Execution):
if hasattr(cls, attr_name):
event.listen(
getattr(cls, attr_name),
'set',
lambda t, v, o, i: validate_long_type_length(cls, attr_name, v)
)
class Definition(mb.MistralSecureModelBase):
__abstract__ = True
id = mb.id_column()
name = sa.Column(sa.String(255))
definition = sa.Column(st.MediumText(), nullable=True)
spec = sa.Column(st.JsonMediumDictType())
tags = sa.Column(st.JsonListType())
is_system = sa.Column(sa.Boolean())
# There's no WorkbookExecution so we safely omit "Definition" in the name.
class Workbook(Definition):
"""Contains info about workbook (including definition in Mistral DSL)."""
__tablename__ = 'workbooks_v2'
__table_args__ = (
sa.UniqueConstraint('name', 'project_id'),
sa.Index('%s_project_id' % __tablename__, 'project_id'),
sa.Index('%s_scope' % __tablename__, 'scope'),
)
class WorkflowDefinition(Definition):
"""Contains info about workflow (including definition in Mistral DSL)."""
__tablename__ = 'workflow_definitions_v2'
__table_args__ = (
sa.UniqueConstraint('name', 'project_id'),
sa.Index('%s_is_system' % __tablename__, 'is_system'),
sa.Index('%s_project_id' % __tablename__, 'project_id'),
sa.Index('%s_scope' % __tablename__, 'scope'),
)
class ActionDefinition(Definition):
"""Contains info about registered Actions."""
__tablename__ = 'action_definitions_v2'
__table_args__ = (
sa.UniqueConstraint('name', 'project_id'),
sa.Index('%s_is_system' % __tablename__, 'is_system'),
sa.Index('%s_action_class' % __tablename__, 'action_class'),
sa.Index('%s_project_id' % __tablename__, 'project_id'),
sa.Index('%s_scope' % __tablename__, 'scope'),
)
# Main properties.
description = sa.Column(sa.Text())
input = sa.Column(sa.Text())
# Service properties.
action_class = sa.Column(sa.String(200))
attributes = sa.Column(st.JsonDictType())
# Execution objects.
class Execution(mb.MistralSecureModelBase):
__abstract__ = True
# Common properties.
id = mb.id_column()
name = sa.Column(sa.String(255))
description = sa.Column(sa.String(255), nullable=True)
workflow_name = sa.Column(sa.String(255))
workflow_id = sa.Column(sa.String(80))
spec = sa.Column(st.JsonMediumDictType())
state = sa.Column(sa.String(20))
state_info = sa.Column(sa.Text(), nullable=True)
tags = sa.Column(st.JsonListType())
# Internal properties which can be used by engine.
runtime_context = sa.Column(st.JsonLongDictType())
class ActionExecution(Execution):
"""Contains action execution information."""
__tablename__ = 'action_executions_v2'
__table_args__ = (
sa.Index('%s_project_id' % __tablename__, 'project_id'),
sa.Index('%s_scope' % __tablename__, 'scope'),
sa.Index('%s_state' % __tablename__, 'state'),
sa.Index('%s_updated_at' % __tablename__, 'updated_at')
)
# Main properties.
accepted = sa.Column(sa.Boolean(), default=False)
input = sa.Column(st.JsonLongDictType(), nullable=True)
output = sa.orm.deferred(sa.Column(st.JsonLongDictType(), nullable=True))
class WorkflowExecution(Execution):
"""Contains workflow execution information."""
__tablename__ = 'workflow_executions_v2'
__table_args__ = (
sa.Index('%s_project_id' % __tablename__, 'project_id'),
sa.Index('%s_scope' % __tablename__, 'scope'),
sa.Index('%s_state' % __tablename__, 'state'),
sa.Index('%s_updated_at' % __tablename__, 'updated_at'),
)
# Main properties.
accepted = sa.Column(sa.Boolean(), default=False)
input = sa.Column(st.JsonLongDictType(), nullable=True)
output = sa.orm.deferred(sa.Column(st.JsonLongDictType(), nullable=True))
params = sa.Column(st.JsonLongDictType())
# Initial workflow context containing workflow variables, environment,
# openstack security context etc.
# NOTES:
# * Data stored in this structure should not be copied into inbound
# contexts of tasks. No need to duplicate it.
# * This structure does not contain workflow input.
context = sa.Column(st.JsonLongDictType())
class TaskExecution(Execution):
"""Contains task runtime information."""
__tablename__ = 'task_executions_v2'
__table_args__ = (
sa.Index('%s_project_id' % __tablename__, 'project_id'),
sa.Index('%s_scope' % __tablename__, 'scope'),
sa.Index('%s_state' % __tablename__, 'state'),
sa.Index('%s_updated_at' % __tablename__, 'updated_at'),
sa.UniqueConstraint('unique_key')
)
# Main properties.
action_spec = sa.Column(st.JsonLongDictType())
unique_key = sa.Column(sa.String(250), nullable=True)
type = sa.Column(sa.String(10))
# Whether the task is fully processed (publishing and calculating commands
# after it). It allows to simplify workflow controller implementations
# significantly.
processed = sa.Column(sa.BOOLEAN, default=False)
# Data Flow properties.
in_context = sa.Column(st.JsonLongDictType())
published = sa.Column(st.JsonLongDictType())
@property
def executions(self):
return (
self.action_executions
if not self.spec.get('workflow')
else self.workflow_executions
)
for cls in utils.iter_subclasses(Execution):
event.listen(
# Catch and trim Execution.state_info to always fit allocated size.
# Note that the limit is 65500 which is less than 65535 (2^16 -1).
# The reason is that utils.cut() is not exactly accurate in case if
# the value is not a string, but, for example, a dictionary. If we
# limit it exactly to 65535 then once in a while it may go slightly
# beyond the allowed maximum size. It may depend on the order of
# keys in a string representation and other things that are hidden
# inside utils.cut_dict() method.
cls.state_info,
'set',
lambda t, v, o, i: utils.cut(v, 65500),
retval=True
)
# Many-to-one for 'ActionExecution' and 'TaskExecution'.
ActionExecution.task_execution_id = sa.Column(
sa.String(36),
sa.ForeignKey(TaskExecution.id, ondelete='CASCADE'),
nullable=True
)
TaskExecution.action_executions = relationship(
ActionExecution,
backref=backref('task_execution', remote_side=[TaskExecution.id]),
cascade='all, delete-orphan',
foreign_keys=ActionExecution.task_execution_id,
lazy='select'
)
sa.Index(
'%s_task_execution_id' % ActionExecution.__tablename__,
'task_execution_id'
)
# Many-to-one for 'WorkflowExecution' and 'TaskExecution'.
WorkflowExecution.task_execution_id = sa.Column(
sa.String(36),
sa.ForeignKey(TaskExecution.id, ondelete='CASCADE'),
nullable=True
)
TaskExecution.workflow_executions = relationship(
WorkflowExecution,
backref=backref('task_execution', remote_side=[TaskExecution.id]),
cascade='all, delete-orphan',
foreign_keys=WorkflowExecution.task_execution_id,
lazy='select'
)
sa.Index(
'%s_task_execution_id' % WorkflowExecution.__tablename__,
'task_execution_id'
)
# Many-to-one for 'TaskExecution' and 'WorkflowExecution'.
TaskExecution.workflow_execution_id = sa.Column(
sa.String(36),
sa.ForeignKey(WorkflowExecution.id, ondelete='CASCADE')
)
WorkflowExecution.task_executions = relationship(
TaskExecution,
backref=backref('workflow_execution', remote_side=[WorkflowExecution.id]),
cascade='all, delete-orphan',
foreign_keys=TaskExecution.workflow_execution_id,
lazy='select'
)
sa.Index(
'%s_workflow_execution_id' % TaskExecution.__tablename__,
TaskExecution.workflow_execution_id
)
# Other objects.
class DelayedCall(mb.MistralModelBase):
"""Contains info about delayed calls."""
__tablename__ = 'delayed_calls_v2'
id = mb.id_column()
factory_method_path = sa.Column(sa.String(200), nullable=True)
target_method_name = sa.Column(sa.String(80), nullable=False)
method_arguments = sa.Column(st.JsonDictType())
serializers = sa.Column(st.JsonDictType())
key = sa.Column(sa.String(250), nullable=True)
auth_context = sa.Column(st.JsonDictType())
execution_time = sa.Column(sa.DateTime, nullable=False)
processing = sa.Column(sa.Boolean, default=False, nullable=False)
sa.Index(
'%s_execution_time' % DelayedCall.__tablename__,
DelayedCall.execution_time
)
class Environment(mb.MistralSecureModelBase):
"""Contains environment variables for workflow execution."""
__tablename__ = 'environments_v2'
__table_args__ = (
sa.UniqueConstraint('name', 'project_id'),
sa.Index('%s_name' % __tablename__, 'name'),
sa.Index('%s_project_id' % __tablename__, 'project_id'),
sa.Index('%s_scope' % __tablename__, 'scope'),
)
# Main properties.
id = mb.id_column()
name = sa.Column(sa.String(200))
description = sa.Column(sa.Text())
variables = sa.Column(st.JsonLongDictType())
class CronTrigger(mb.MistralSecureModelBase):
"""Contains info about cron triggers."""
__tablename__ = 'cron_triggers_v2'
__table_args__ = (
sa.UniqueConstraint('name', 'project_id'),
sa.UniqueConstraint(
'workflow_input_hash', 'workflow_name', 'pattern', 'project_id',
'workflow_params_hash', 'remaining_executions',
'first_execution_time'
),
sa.Index(
'%s_next_execution_time' % __tablename__,
'next_execution_time'
),
sa.Index('%s_project_id' % __tablename__, 'project_id'),
sa.Index('%s_scope' % __tablename__, 'scope'),
sa.Index('%s_workflow_name' % __tablename__, 'workflow_name'),
)
id = mb.id_column()
name = sa.Column(sa.String(200))
pattern = sa.Column(
sa.String(100),
nullable=True,
default='0 0 30 2 0' # Set default to 'never'.
)
first_execution_time = sa.Column(sa.DateTime, nullable=True)
next_execution_time = sa.Column(sa.DateTime, nullable=False)
workflow_name = sa.Column(sa.String(255))
remaining_executions = sa.Column(sa.Integer)
workflow_id = sa.Column(
sa.String(36),
sa.ForeignKey(WorkflowDefinition.id)
)
workflow = relationship('WorkflowDefinition', lazy='joined')
workflow_params = sa.Column(st.JsonDictType())
workflow_params_hash = sa.Column(
sa.CHAR(64),
default=_get_hash_function_by('workflow_params')
)
workflow_input = sa.Column(st.JsonDictType())
workflow_input_hash = sa.Column(
sa.CHAR(64),
default=_get_hash_function_by('workflow_input')
)
trust_id = sa.Column(sa.String(80))
def to_dict(self):
d = super(CronTrigger, self).to_dict()
utils.datetime_to_str_in_dict(d, 'first_execution_time')
utils.datetime_to_str_in_dict(d, 'next_execution_time')
return d
# Register all hooks related to secure models.
mb.register_secure_model_hooks()
# TODO(rakhmerov): This is a bad solution. It's hard to find in the code,
# configure flexibly etc. Fix it.
# Register an event listener to verify that the size of all the long columns
# affected by the user do not exceed the limit configuration.
for attr_name in ['input', 'output', 'params', 'published']:
register_length_validator(attr_name)
class ResourceMember(mb.MistralModelBase):
"""Contains info about resource members."""
__tablename__ = 'resource_members_v2'
__table_args__ = (
sa.UniqueConstraint(
'resource_id',
'resource_type',
'member_id'
),
)
id = mb.id_column()
resource_id = sa.Column(sa.String(80), nullable=False)
resource_type = sa.Column(
sa.String(50),
nullable=False,
default='workflow'
)
project_id = sa.Column(sa.String(80), default=security.get_project_id)
member_id = sa.Column(sa.String(80), nullable=False)
status = sa.Column(sa.String(20), nullable=False, default="pending")
class EventTrigger(mb.MistralSecureModelBase):
"""Contains info about event triggers."""
__tablename__ = 'event_triggers_v2'
__table_args__ = (
sa.UniqueConstraint('exchange', 'topic', 'event', 'workflow_id',
'project_id'),
sa.Index('%s_project_id_workflow_id' % __tablename__, 'project_id',
'workflow_id'),
)
id = mb.id_column()
name = sa.Column(sa.String(200))
workflow_id = sa.Column(
sa.String(36),
sa.ForeignKey(WorkflowDefinition.id)
)
workflow_params = sa.Column(st.JsonDictType())
workflow_input = sa.Column(st.JsonDictType())
exchange = sa.Column(sa.String(80), nullable=False)
topic = sa.Column(sa.String(80), nullable=False)
event = sa.Column(sa.String(80), nullable=False)
trust_id = sa.Column(sa.String(80))
class NamedLock(mb.MistralModelBase):
"""Contains info about named locks.
Usage of named locks is based on properties of READ COMMITTED
transactions of the most generally used SQL databases such as
Postgres, MySQL, Oracle etc.
The locking scenario is as follows:
1. Transaction A (TX-A) inserts a row with unique 'id' and
some value that identifies a locked object stored in 'name'.
2. Transaction B (TX-B) and any subsequent transactions tries
to insert a row with unique 'id' and the same value of 'name'
field and it waits till TX-A is completed due to transactional
properties of READ COMMITTED.
3. If TX-A then immediately deletes the record and commits then
TX-B and or one of the subsequent transactions are released
and its 'insert' is completed.
4. Then the scenario repeats with step #2 where the role of TX-A
will be playing a transaction that just did insert.
Practically, this table should never contain any committed rows.
All its usage is around the play with transactional storages.
"""
__tablename__ = 'named_locks'
sa.UniqueConstraint('name')
id = mb.id_column()
name = sa.Column(sa.String(250))
sa.UniqueConstraint(NamedLock.name)
| 31.622677
| 79
| 0.673779
| 11,021
| 0.647799
| 0
| 0
| 184
| 0.010815
| 0
| 0
| 6,174
| 0.362899
|
83fa1f4c15e8b0c13b7079f93983ba9e472a57f2
| 3,286
|
bzl
|
Python
|
haskell/private/packages.bzl
|
andyscott/rules_haskell
|
1c7341f885f62cecad705f6a9e8b610b73f11527
|
[
"Apache-2.0"
] | null | null | null |
haskell/private/packages.bzl
|
andyscott/rules_haskell
|
1c7341f885f62cecad705f6a9e8b610b73f11527
|
[
"Apache-2.0"
] | null | null | null |
haskell/private/packages.bzl
|
andyscott/rules_haskell
|
1c7341f885f62cecad705f6a9e8b610b73f11527
|
[
"Apache-2.0"
] | null | null | null |
"""Package list handling"""
load(":private/set.bzl", "set")
def pkg_info_to_ghc_args(pkg_info):
"""
Takes the package info collected by `ghc_info()` and returns the actual
list of command line arguments that should be passed to GHC.
"""
args = [
# In compile.bzl, we pass this just before all -package-id
# arguments. Not doing so leads to bizarre compile-time failures.
# It turns out that equally, not doing so leads to bizarre
# link-time failures. See
# https://github.com/tweag/rules_haskell/issues/395.
"-hide-all-packages",
]
if not pkg_info.has_version:
args.extend([
# Macro version are disabled for all packages by default
# and enabled for package with version
# see https://github.com/tweag/rules_haskell/issues/414
"-fno-version-macros",
])
for package in pkg_info.packages:
args.extend(["-package", package])
for package_id in pkg_info.package_ids:
args.extend(["-package-id", package_id])
for package_db in pkg_info.package_dbs:
args.extend(["-package-db", package_db])
return args
def expose_packages(build_info, lib_info, use_direct, use_my_pkg_id, custom_package_caches, version):
"""
Returns the information that is needed by GHC in order to enable haskell
packages.
build_info: is common to all builds
version: if the rule contains a version, we will export the CPP version macro
All the other arguments are not understood well:
lib_info: only used for repl and linter
use_direct: only used for repl and linter
use_my_pkg_id: only used for one specific task in compile.bzl
custom_package_caches: override the package_caches of build_info, used only by the repl
"""
has_version = version != None and version != ""
# Expose all prebuilt dependencies
#
# We have to remember to specify all (transitive) wired-in
# dependencies or we can't find objects for linking
#
# Set use_direct if build_info does not have a direct_prebuilt_deps field.
packages = []
for prebuilt_dep in set.to_list(build_info.direct_prebuilt_deps if use_direct else build_info.prebuilt_dependencies):
packages.append(prebuilt_dep.package)
# Expose all bazel dependencies
package_ids = []
for package in set.to_list(build_info.package_ids):
# XXX: repl and lint uses this lib_info flags
# It is set to None in all other usage of this function
# TODO: find the meaning of this flag
if lib_info == None or package != lib_info.package_id:
# XXX: use_my_pkg_id is not None only in compile.bzl
if (use_my_pkg_id == None) or package != use_my_pkg_id:
package_ids.append(package)
# Only include package DBs for deps, prebuilt deps should be found
# auto-magically by GHC
package_dbs = []
for cache in set.to_list(build_info.package_caches if not custom_package_caches else custom_package_caches):
package_dbs.append(cache.dirname)
ghc_info = struct(
has_version = has_version,
packages = packages,
package_ids = package_ids,
package_dbs = package_dbs,
)
return ghc_info
| 36.921348
| 121
| 0.680158
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,744
| 0.530736
|
83fa2b2bb34bdebf6c4cd5300d5d2f1279a8b7ff
| 223
|
py
|
Python
|
get_repo/git.py
|
florian42/get-repo
|
5c1dbf5ecfbdb28f3a628bff57e3a0963ec0fdcc
|
[
"MIT"
] | null | null | null |
get_repo/git.py
|
florian42/get-repo
|
5c1dbf5ecfbdb28f3a628bff57e3a0963ec0fdcc
|
[
"MIT"
] | null | null | null |
get_repo/git.py
|
florian42/get-repo
|
5c1dbf5ecfbdb28f3a628bff57e3a0963ec0fdcc
|
[
"MIT"
] | null | null | null |
import subprocess
def clone(url: str, target_directory: str) -> None:
print(f'Cloning {url} into {target_directory} ...')
subprocess.run(
['git', 'clone', url, target_directory], capture_output=True
)
| 24.777778
| 68
| 0.659193
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 56
| 0.251121
|
83fa3e28d2e07cc9a136e3744d2ab599a591cc63
| 2,220
|
py
|
Python
|
tests/test_models/test_state.py
|
adrian-blip/AirBnB_clone_v2
|
c27a9d923631c78ec437e4608b5c98f3f9fd1cad
|
[
"MIT"
] | null | null | null |
tests/test_models/test_state.py
|
adrian-blip/AirBnB_clone_v2
|
c27a9d923631c78ec437e4608b5c98f3f9fd1cad
|
[
"MIT"
] | null | null | null |
tests/test_models/test_state.py
|
adrian-blip/AirBnB_clone_v2
|
c27a9d923631c78ec437e4608b5c98f3f9fd1cad
|
[
"MIT"
] | 1
|
2021-07-07T21:37:54.000Z
|
2021-07-07T21:37:54.000Z
|
#!/usr/bin/python3
"""
===============================================================================
████████╗███████╗███████╗████████╗ ██████╗ █████╗ ███████╗███████╗███████╗
╚══██╔══╝██╔════╝██╔════╝╚══██╔══╝ ██╔════╝██╔══██╗██╔════╝██╔════╝██╔════╝
██║ █████╗ ███████╗ ██║ ██║ ███████║███████╗█████╗ ███████╗
██║ ██╔══╝ ╚════██║ ██║ ██║ ██╔══██║╚════██║██╔══╝ ╚════██║
██║ ███████╗███████║ ██║ ╚██████╗██║ ██║███████║███████╗███████║
╚═╝ ╚══════╝╚══════╝ ╚═╝ ╚═════╝╚═╝ ╚═╝╚══════╝╚══════╝╚══════╝
===============================================================================
"""
from models.base_model import BaseModel
from models.state import State
import unittest
import json
import pep8
import datetime
class TestState(unittest.TestCase):
""" Test State class implementation. """
def test_doc_module(self):
"""Module documentation"""
doc = State.__doc__
self.assertGreater(len(doc), 1)
def test_pep8_conformance_state(self):
""" Test that models/state.py conforms to PEP8. """
pep8style = pep8.StyleGuide(quiet=True)
result = pep8style.check_files(['models/state.py'])
self.assertEqual(result.total_errors, 0,
"Found code style errors (and warnings).")
def test_pep8_conformance_test_state(self):
"""
- Test that tests/test_models/test_state.py conforms to PEP8.
"""
pep8style = pep8.StyleGuide(quiet=True)
res = pep8style.check_files(['tests/test_models/test_state.py'])
self.assertEqual(res.total_errors, 0,
"Found code style errors (and warnings).")
def test_doc_constructor(self):
""" Constructor documentation. """
doc = State.__init__.__doc__
self.assertGreater(len(doc), 1)
def test_class(self):
""" Validate the types of the attributes an class. """
with self.subTest(msg='Inheritance'):
self.assertTrue(issubclass(State, BaseModel))
with self.subTest(msg='Attributes'):
self.assertIsInstance(State.name, str)
| 38.275862
| 80
| 0.440991
| 1,403
| 0.473986
| 0
| 0
| 0
| 0
| 0
| 0
| 1,862
| 0.629054
|
83fa4a8d78bac76a3513caf7e33e512c2461662e
| 39,191
|
py
|
Python
|
src/generatorse/EESG_1.7.x.py
|
WISDEM/GeneratorSE
|
ee85646bb82f9d120a3efd39a5530e674062da08
|
[
"Apache-2.0"
] | null | null | null |
src/generatorse/EESG_1.7.x.py
|
WISDEM/GeneratorSE
|
ee85646bb82f9d120a3efd39a5530e674062da08
|
[
"Apache-2.0"
] | 2
|
2017-04-19T20:55:22.000Z
|
2019-05-06T10:48:39.000Z
|
src/generatorse/EESG_1.7.x.py
|
WISDEM/GeneratorSE
|
ee85646bb82f9d120a3efd39a5530e674062da08
|
[
"Apache-2.0"
] | 3
|
2018-10-10T00:07:35.000Z
|
2021-04-19T18:41:05.000Z
|
"""EESG.py
Created by Latha Sethuraman, Katherine Dykes.
Copyright (c) NREL. All rights reserved.
Electromagnetic design based on conventional magnetic circuit laws
Structural design based on McDonald's thesis """
from openmdao.api import Group, Problem, Component,ExecComp,IndepVarComp,ScipyOptimizer,pyOptSparseDriver
from openmdao.drivers.pyoptsparse_driver import pyOptSparseDriver
from openmdao.drivers import *
import numpy as np
from numpy import array,float,min,sign
from math import pi, cos, sqrt, radians, sin, exp, log10, log, tan, atan
import pandas
class EESG(Component):
""" Estimates overall mass dimensions and Efficiency of Electrically Excited Synchronous generator. """
def __init__(self):
super(EESG, self).__init__()
# EESG generator design inputs
self.add_param('r_s', val=0.0, units ='m', desc='airgap radius r_s')
self.add_param('l_s', val=0.0, units ='m', desc='Stator core length l_s')
self.add_param('h_s', val=0.0, units ='m', desc='Yoke height h_s')
self.add_param('tau_p',val=0.0, units ='m', desc='Pole pitch self.tau_p')
self.add_param('machine_rating',val=0.0, units ='W', desc='Machine rating')
self.add_param('n_nom',val=0.0, units ='rpm', desc='rated speed')
self.add_param('Torque',val=0.0, units ='Nm', desc='Rated torque ')
self.add_param('I_f',val=0.0000,units='A',desc='Excitation current')
self.add_param('N_f',val=0.0,units='A',desc='field turns')
self.add_param('h_ys',val=0.0, units ='m', desc='Yoke height')
self.add_param('h_yr',val=0.0, units ='m', desc='rotor yoke height')
# structural design variables
self.add_param('n_s' ,val=0.0, desc='number of stator arms n_s')
self.add_param('b_st' , val=0.0, units ='m', desc='arm width b_st')
self.add_param('d_s',val=0.0,units ='m', desc='arm depth d_s')
self.add_param('t_ws' ,val=0.0,units ='m', desc='arm depth thickness self.t_wr')
self.add_param('n_r' ,val=0.0, desc='number of arms n')
self.add_param('b_r' ,val=0.0,units ='m', desc='arm width b_r')
self.add_param('d_r' ,val=0.0, units ='m', desc='arm depth d_r')
self.add_param('t_wr' ,val=0.0, units ='m', desc='arm depth thickness self.t_wr')
self.add_param('R_o',val=0.0, units ='m',desc='Shaft radius')
# EESG generator design outputs
# Magnetic loading
self.add_output('B_symax' ,val=0.0, desc='Peak Stator Yoke flux density B_ymax')
self.add_output('B_tmax',val=0.0, desc='Peak Teeth flux density')
self.add_output('B_rymax',val=0.0, desc='Peak Rotor yoke flux density')
self.add_output('B_gfm',val=0.0, desc='Average air gap flux density B_g')
self.add_output('B_g' ,val=0.0, desc='Peak air gap flux density B_g')
self.add_output('B_pc',val=0.0, desc='Pole core flux density')
# Stator design
self.add_output('N_s' ,val=0.0, desc='Number of turns in the stator winding')
self.add_output('b_s',val=0.0, desc='slot width')
self.add_output('b_t',val=0.0, desc='tooth width')
self.add_output('A_Cuscalc',val=0.0, desc='Conductor cross-section mm^2')
self.add_output('S',val=0.0, desc='Stator slots')
# # Output parameters : Rotor design
self.add_output('h_p',val=0.0, desc='Pole height')
self.add_output('b_p',val=0.0, desc='Pole width')
self.add_output('p',val=0.0, desc='No of pole pairs')
self.add_output('n_brushes',val=0.0, desc='number of brushes')
self.add_output('A_Curcalc',val=0.0, desc='Rotor Conductor cross-section')
# Output parameters : Electrical performance
self.add_output('E_s',val=0.0, desc='Stator phase voltage')
self.add_output('f',val=0.0, desc='Generator output frequency')
self.add_output('I_s',val=0.0, desc='Generator output phase current')
self.add_output('R_s',val=0.0, desc='Stator resistance')
self.add_output('R_r',val=0.0, desc='Rotor resistance')
self.add_output('L_m',val=0.0, desc='Stator synchronising inductance')
self.add_output('J_s',val=0.0, desc='Stator Current density')
self.add_output('J_f',val=0.0, desc='rotor Current density')
self.add_output('A_1',val=0.0, desc='Specific current loading')
self.add_output('Load_mmf_ratio',val=0.0, desc='mmf_ratio')
# Objective functions and output
self.add_output('Mass',val=0.0, desc='Actual mass')
self.add_output('K_rad',val=0.0, desc='K_rad')
self.add_output('Losses',val=0.0, desc='Total loss')
self.add_output('gen_eff',val=0.0, desc='Generator efficiency')
# Structural performance
self.add_output('u_Ar',val=0.0, desc='Rotor radial deflection')
self.add_output('y_Ar',val=0.0, desc='Rotor axial deflection')
self.add_output('z_A_r',val=0.0, desc='Rotor circumferential deflection')
self.add_output('u_As',val=0.0, desc='Stator radial deflection')
self.add_output('y_As',val=0.0, desc='Stator axial deflection')
self.add_output('z_A_s',val=0.0, desc='Stator circumferential deflection')
self.add_output('u_all_r',val=0.0, desc='Allowable radial rotor')
self.add_output('u_all_s',val=0.0, desc='Allowable radial stator')
self.add_output('y_all',val=0.0, desc='Allowable axial')
self.add_output('z_all_s',val=0.0, desc='Allowable circum stator')
self.add_output('z_all_r',val=0.0, desc='Allowable circum rotor')
self.add_output('b_all_s',val=0.0, desc='Allowable arm')
self.add_output('b_all_r',val=0.0, desc='Allowable arm dimensions')
self.add_output('TC1',val=0.0, desc='Torque constraint')
self.add_output('TC2',val=0.0, desc='Torque constraint-rotor')
self.add_output('TC3',val=0.0, desc='Torque constraint-stator')
#Material properties
self.add_param('rho_Fes',val=0.0,units='kg*m**-3', desc='Structural Steel density ')
self.add_param('rho_Fe',val=0.0,units='kg*m**-3', desc='Magnetic Steel density ')
self.add_param('rho_Copper',val=0.0,units='kg*m**-3', desc='Copper density ')
# Mass Outputs
self.add_output('Copper',val=0.0, desc='Copper Mass')
self.add_output('Iron',val=0.0, desc='Electrical Steel Mass')
self.add_output('Structural_mass' ,val=0.0, desc='Structural Mass')
# Other parameters
self.add_output('Power_ratio',val=0.0, desc='Power_ratio')
self.add_output('Slot_aspect_ratio',val=0.0,desc='Stator slot aspect ratio')
self.add_output('R_out',val=0.0, desc='Outer radius')
#inputs/outputs for interface with drivese
self.add_param('main_shaft_cm',val= np.array([0.0, 0.0, 0.0]),desc='Main Shaft CM')
self.add_param('main_shaft_length',val=0.0, desc='main shaft length')
self.add_output('I',val=np.array([0.0, 0.0, 0.0]),desc='Moments of Inertia for the component [Ixx, Iyy, Izz] around its center of mass')
self.add_output('cm', val=np.array([0.0, 0.0, 0.0]),desc='COM [x,y,z]')
self.gen_sizing = generator_sizing()
def solve_nonlinear(self, inputs, outputs, resid):
(outputs['B_symax'], outputs['B_tmax'], outputs['B_rymax'], outputs['B_gfm'], outputs['B_g'],outputs['B_pc'], outputs['N_s'], outputs['b_s'], \
outputs['b_t'], outputs['A_Cuscalc'],outputs['A_Curcalc'], outputs['b_p'], outputs['h_p'], outputs['p'], outputs['E_s'], outputs['f'], \
outputs['I_s'], outputs['R_s'], outputs['L_m'], outputs['A_1'], outputs['J_s'], outputs['R_r'],outputs['Losses'], \
outputs['Load_mmf_ratio'],outputs['Power_ratio'],outputs['n_brushes'],outputs['J_f'],outputs['K_rad'], outputs['gen_eff'], outputs['S'],
outputs['Slot_aspect_ratio'], outputs['Copper'],outputs['Iron'],outputs['u_Ar'], outputs['y_Ar'], \
outputs['z_A_r'], outputs['u_As'], outputs['y_As'], outputs['z_A_s'], outputs['u_all_r'], outputs['u_all_s'], \
outputs['y_all'], outputs['z_all_s'], outputs['z_all_r'], outputs['b_all_s'], outputs['b_all_r'], outputs['TC1'], \
outputs['TC2'], outputs['TC3'], outputs['R_out'], outputs['Structural_mass'],outputs['Mass'],outputs['cm'], outputs['I']) \
= self.gen_sizing.compute(inputs['r_s'], inputs['l_s'], inputs['h_s'], inputs['tau_p'], inputs['machine_rating'],
inputs['n_nom'], inputs['Torque'], inputs['I_f'],inputs['N_f'],inputs['h_ys'], inputs['h_yr'],inputs['rho_Fe'], inputs['rho_Copper'],inputs['b_st'], inputs['d_s'], \
inputs['t_ws'], inputs['n_r'],inputs['n_s'], inputs['b_r'],inputs['d_r'], inputs['t_wr'], \
inputs['R_o'], inputs['rho_Fes'],inputs['main_shaft_cm'],inputs['main_shaft_length'])
return outputs
class generator_sizing(object):
def __init__(self):
pass
def compute(self,r_s, l_s,h_s,tau_p,machine_rating,n_nom,Torque,I_f,N_f,h_ys,h_yr, \
rho_Fe,rho_Copper,b_st, d_s,t_ws, n_r,n_s, b_r,d_r, t_wr, \
R_o, rho_Fes,main_shaft_cm,main_shaft_length):
self.r_s=r_s
self.l_s=l_s
self.h_s=h_s
self.tau_p=tau_p
self.N_f=N_f
self.I_f=I_f
self.h_ys=h_ys
self.h_yr=h_yr
self.machine_rating=machine_rating
self.n_nom=n_nom
self.Torque=Torque
self.b_st=b_st
self.d_s=d_s
self.t_ws=t_ws
self.n_r=n_r
self.n_s=n_s
self.b_r=b_r
self.d_r=d_r
self.t_wr=t_wr
self.R_o=R_o
self.rho_Fe=rho_Fe
self.rho_Copper=rho_Copper
self.rho_Fes=rho_Fes
self.main_shaft_cm=main_shaft_cm
self.main_shaft_length=main_shaft_length
#Assign values to universal constants
g1 =9.81 # m/s^2 acceleration due to gravity
E =2e11 # N/m^2 young's modulus
sigma =48.373e3 # shear stress
mu_0 =pi*4e-7 # permeability of free space
phi =90*2*pi/360
#Assign values to design constants
h_w =0.005
b_so = 0.004 # Stator slot opening
m =3 # number of phases
q1 =2 # no of stator slots per pole per phase
b_s_tau_s=0.45 # ratio of slot width to slot pitch
k_sfil =0.65 # Slot fill factor
P_Fe0h =4 #specific hysteresis losses W/kg @ 1.5 T @50 Hz
P_Fe0e =1 #specific hysteresis losses W/kg @ 1.5 T @50 Hz
rho_Cu=1.8*10**(-8)*1.4 # resisitivity of copper
k_fes =0.9 # iron fill factor
y_tau_p=1 # coil span/pole pitch fullpitch
k_fillr = 0.7 # rotor slot fill factor
k_s=0.2 #magnetic saturation factor for iron
T = self.Torque
cos_phi=0.85 #power factor
# back iron thickness for rotor and stator
self.t_s =self.h_ys
self.t =self.h_yr
# Aspect ratio
self.K_rad=self.l_s/(2*self.r_s)
###################################################### Electromagnetic design#############################################
alpha_p=pi/2*.7
dia=2*self.r_s # air gap diameter
# air gap length and minimum values
g=0.001*dia
if(g<0.005):
g=0.005
r_r=self.r_s-g #rotor radius
d_se=dia+2*self.h_s+2*self.h_ys # stator outer diameter
self.p=round(pi*dia/(2*self.tau_p)) # number of pole pairs
self.S=2*self.p*q1*m # number of slots of stator phase winding
N_conductors=self.S*2
self.N_s=N_conductors/2/3 # Stator turns per phase
alpha =180/self.S/self.p #electrical angle
tau_s=pi*dia/self.S # slot pitch
h_ps=0.1*self.tau_p # height of pole shoe
b_pc=0.4*self.tau_p # width of pole core
h_pc=0.6*self.tau_p # height of pole core
self.h_p=0.7*tau_p # pole height
self.b_p=self.h_p
self.b_s=tau_s * b_s_tau_s #slot width
self.Slot_aspect_ratio=self.h_s/self.b_s
self.b_t=tau_s-self.b_s #tooth width
# Calculating carter factor and effective air gap
g_a=g
K_C1=(tau_s+10*g_a)/(tau_s-self.b_s+10*g_a) # salient pole rotor
g_1=K_C1*g
# calculating angular frequency
om_m=2*pi*self.n_nom/60
om_e=60
self.f = self.n_nom*self.p/60
# Slot fill factor according to air gap radius
if (2*self.r_s>2):
K_fills=0.65
else:
K_fills=0.4
# Calculating Stator winding factor
k_y1=sin(y_tau_p*pi/2) # chording factor
k_q1=sin(pi/6)/q1/sin(pi/6/q1) # winding zone factor
k_wd=k_y1*k_q1
# Calculating stator winding conductor length, cross-section and resistance
shortpitch=0
l_Cus = 2*self.N_s*(2*(self.tau_p-shortpitch/m/q1)+self.l_s) #length of winding
A_s = self.b_s*(self.h_s-h_w)
A_scalc=self.b_s*1000*(self.h_s*1000-h_w*1000) # cross section in mm^2
A_Cus = A_s*q1*self.p*K_fills/self.N_s
self.A_Cuscalc = A_scalc*q1*self.p*K_fills/self.N_s
self.R_s=l_Cus*rho_Cu/A_Cus
#field winding design, conductor lenght, cross-section and resistance
self.N_f=round(self.N_f) # rounding the field winding turns to the nearest integer
I_srated=self.machine_rating/(sqrt(3)*5000*cos_phi)
l_pole=self.l_s-0.05+0.120 # 50mm smaller than stator and 120mm longer to accommodate end stack
K_fe=0.95
l_pfe=l_pole*K_fe
l_Cur=4*self.p*self.N_f*(l_pfe+b_pc+pi/4*(pi*(r_r-h_pc-h_ps)/self.p-b_pc))
A_Cur=k_fillr*h_pc*0.5/self.N_f*(pi*(r_r-h_pc-h_ps)/self.p-b_pc)
self.A_Curcalc=k_fillr*h_pc*1000*0.5/self.N_f*(pi*(r_r-h_pc-h_ps)*1000/self.p-b_pc*1000)
Slot_Area=A_Cur*2*self.N_f/k_fillr
self.R_r=rho_Cu*l_Cur/A_Cur
#field winding current density
self.J_f=self.I_f/self.A_Curcalc
# calculating air flux density
self.B_gfm=mu_0*self.N_f*self.I_f/(g_1*(1+k_s)) #No load air gap flux density
self.B_g=self.B_gfm*4*sin(0.5*self.b_p*pi/self.tau_p)/pi # fundamental component
self.B_symax=self.tau_p*self.B_g/pi/self.h_ys #stator yoke flux density
L_fg=2*mu_0*self.p*self.l_s*4*self.N_f**2*((h_ps/(self.tau_p-self.b_p))+(h_pc/(3*pi*(r_r-h_pc-h_ps)/self.p-b_pc)))
# calculating no load voltage and stator current
self.E_s=2*self.N_s*self.l_s*self.r_s*k_wd*om_m*self.B_g/sqrt(2) #no load voltage
self.I_s=(self.E_s-(self.E_s**2-4*self.R_s*self.machine_rating/m)**0.5)/(2*self.R_s)
# Calculating stator winding current density and specific current loading
self.A_1 = 6*self.N_s*self.I_s/(pi*dia)
self.J_s=self.I_s/self.A_Cuscalc
# Calculating magnetic loading in other parts of the machine
delta_m=0 # Initialising load angle
# peak flux density in pole core, rotor yoke and stator teeth
self.B_pc=(1/b_pc)*((2*self.tau_p/pi)*self.B_g*cos(delta_m)+(2*mu_0*self.I_f*self.N_f*((2*h_ps/(self.tau_p-self.b_p))+(h_pc/(self.tau_p-b_pc)))))
self.B_rymax= 0.5*b_pc*self.B_pc/self.h_yr
self.B_tmax=(self.B_gfm+self.B_g)*tau_s*0.5/self.b_t
# Calculating leakage inductances in the stator
L_ssigmas=2*mu_0*self.l_s*self.N_s**2/self.p/q1*((self.h_s-h_w)/(3*self.b_s)+h_w/b_so) #slot leakage inductance
L_ssigmaew=mu_0*1.2*self.N_s**2/self.p*1.2*(2/3*self.tau_p+0.01) #end winding leakage inductance
L_ssigmag=2*mu_0*self.l_s*self.N_s**2/self.p/q1*(5*(g/b_so)/(5+4*(g/b_so))) # tooth tip leakage inductance
L_ssigma=(L_ssigmas+L_ssigmaew+L_ssigmag) # stator leakage inductance
# Calculating effective air gap
At_g=g_1*self.B_gfm/mu_0
At_t=self.h_s*(400*self.B_tmax+7*(self.B_tmax)**13)
At_sy=self.tau_p*0.5*(400*self.B_symax+7*(self.B_symax)**13)
At_pc=(h_pc+h_ps)*(400*self.B_pc+7*(self.B_pc)**13)
At_ry=self.tau_p*0.5*(400*self.B_rymax+7*(self.B_rymax)**13)
g_eff = (At_g+At_t+At_sy+At_pc+At_ry)*g_1/At_g
self.L_m = 6*k_wd**2*self.N_s**2*mu_0*self.r_s*self.l_s/pi/g_eff/self.p**2
B_r1=(mu_0*self.I_f*self.N_f*4*sin(0.5*(self.b_p/self.tau_p)*pi))/g_eff/pi
# Calculating direct axis and quadrature axes inductances
L_dm= (self.b_p/self.tau_p +(1/pi)*sin(pi*self.b_p/self.tau_p))*self.L_m
L_qm=(self.b_p/self.tau_p -(1/pi)*sin(pi*self.b_p/self.tau_p)+2/(3*pi)*cos(self.b_p*pi/2*self.tau_p))*self.L_m
# Calculating actual load angle
delta_m=(atan(om_e*L_qm*self.I_s/self.E_s))
L_d=L_dm+L_ssigma
L_q=L_qm+L_ssigma
I_sd=self.I_s*sin(delta_m)
I_sq=self.I_s*cos(delta_m)
# induced voltage
E_p=om_e*L_dm*I_sd+sqrt(self.E_s**2-(om_e*L_qm*I_sq)**2)
#M_sf =mu_0*8*self.r_s*self.l_s*k_wd*self.N_s*self.N_f*sin(0.5*self.b_p/self.tau_p*pi)/(self.p*g_eff*pi)
#I_f1=sqrt(2)*(E_p)/(om_e*M_sf)
#I_f2=(E_p/self.E_s)*self.B_g*g_eff*pi/(4*self.N_f*mu_0*sin(pi*self.b_p/2/self.tau_p))
#phi_max_stator=k_wd*self.N_s*pi*self.r_s*self.l_s*2*mu_0*self.N_f*self.I_f*4*sin(0.5*self.b_p/self.tau_p/pi)/(self.p*pi*g_eff*pi)
#M_sf=mu_0*8*self.r_s*self.l_s*k_wd*self.N_s*self.N_f*sin(0.5*b_p/self.tau_p/pi)/(self.p*g_eff*pi)
L_tot=self.l_s+2*self.tau_p
# Excitation power
V_fn=500
Power_excitation=V_fn*2*self.I_f #total rated power in excitation winding
self.Power_ratio =Power_excitation*100/self.machine_rating
# Calculating Electromagnetically Active mass
L_tot=self.l_s+2*self.tau_p
V_Cuss=m*l_Cus*A_Cus # volume of copper in stator
V_Cusr=l_Cur*A_Cur # volume of copper in rotor
V_Fest=(self.l_s*pi*((self.r_s+self.h_s)**2-self.r_s**2)-2*m*q1*self.p*self.b_s*self.h_s*self.l_s) # volume of iron in stator tooth
V_Fesy=self.l_s*pi*((self.r_s+self.h_s+self.h_ys)**2-(self.r_s+self.h_s)**2) # volume of iron in stator yoke
V_Fert=2*self.p*l_pfe*(h_pc*b_pc+self.b_p*h_ps) # volume of iron in rotor pole
V_Fery=l_pfe*pi*((r_r-h_ps-h_pc)**2-(r_r-h_ps-h_pc-self.h_yr)**2) # # volume of iron in rotor yoke
self.Copper=(V_Cuss+V_Cusr)*self.rho_Copper
M_Fest=V_Fest*self.rho_Fe
M_Fesy=V_Fesy*self.rho_Fe
M_Fert=V_Fert*self.rho_Fe
M_Fery=V_Fery*self.rho_Fe
self.Iron=M_Fest+M_Fesy+M_Fert+M_Fery
I_snom=self.machine_rating/(3*self.E_s*cos_phi)
## Optional## Calculating mmf ratio
F_1no_load=3*2**0.5*self.N_s*k_wd*self.I_s/(pi*self.p)
Nf_If_no_load=self.N_f*self.I_f
F_1_rated=(3*2**0.5*self.N_s*k_wd*I_srated)/(pi*self.p)
Nf_If_rated=2*Nf_If_no_load
self.Load_mmf_ratio=Nf_If_rated/F_1_rated
## Calculating losses
#1. Copper losses
K_R=1.2
P_Cuss=m*I_snom**2*self.R_s*K_R
P_Cusr=self.I_f**2*self.R_r
P_Cusnom_total=P_Cuss+P_Cusr
#2. Iron losses ( Hysteresis and Eddy currents)
P_Hyys=M_Fesy*(self.B_symax/1.5)**2*(P_Fe0h*om_e/(2*pi*60)) # Hysteresis losses in stator yoke
P_Ftys=M_Fesy*(self.B_symax/1.5)**2*(P_Fe0e*(om_e/(2*pi*60))**2) # Eddy losses in stator yoke
P_Fesynom=P_Hyys+P_Ftys
P_Hyd=M_Fest*(self.B_tmax/1.5)**2*(P_Fe0h*om_e/(2*pi*60)) # Hysteresis losses in stator teeth
P_Ftd=M_Fest*(self.B_tmax/1.5)**2*(P_Fe0e*(om_e/(2*pi*60))**2) # Eddy losses in stator teeth
P_Festnom=P_Hyd+P_Ftd
# brushes
delta_v=1
self.n_brushes=(self.I_f*2/120)
if (self.n_brushes<0.5):
self.n_brushes=1
else:
self.n_brushes=round(self.n_brushes)
#3. brush losses
p_b=2*delta_v*(self.I_f)
self.Losses=P_Cusnom_total+P_Festnom+P_Fesynom+p_b
self.gen_eff=self.machine_rating*100/(self.Losses+self.machine_rating)
################################################## Structural Design ########################################################
## Structural deflection calculations
#rotor structure
q3 = self.B_g**2/2/mu_0 # normal component of Maxwell's stress
l = self.l_s #l-stator core length
l_b = 2*self.tau_p #end winding length
l_e =self.l_s+2*0.001*self.r_s # equivalent core length
a_r = (self.b_r*self.d_r)-((self.b_r-2*self.t_wr)*(self.d_r-2*self.t_wr)) # cross-sectional area of rotor armms
A_r = l*self.t # cross-sectional area of rotor cylinder
N_r = round(self.n_r)
theta_r =pi/N_r # half angle between spokes
I_r =l*self.t**3/12 # second moment of area of rotor cylinder
I_arm_axi_r =((self.b_r*self.d_r**3)-((self.b_r-2*self.t_wr)*(self.d_r-2*self.t_wr)**3))/12 # second moment of area of rotor arm
I_arm_tor_r = ((self.d_r*self.b_r**3)-((self.d_r-2*self.t_wr)*(self.b_r-2*self.t_wr)**3))/12 # second moment of area of rotot arm w.r.t torsion
R = r_r-h_ps-h_pc-0.5*self.h_yr
R_1 = R-self.h_yr*0.5 # inner radius of rotor cylinder
k_1 = sqrt(I_r/A_r) # radius of gyration
m1 =(k_1/R)**2
c =R/500
self.u_all_r =R/10000 # allowable radial deflection
self.b_all_r =2*pi*self.R_o/N_r # allowable circumferential arm dimension
# Calculating radial deflection of rotor structure according to Mc Donald's
Numer=R**3*((0.25*(sin(theta_r)-(theta_r*cos(theta_r)))/(sin(theta_r))**2)-(0.5/sin(theta_r))+(0.5/theta_r))
Pov=((theta_r/(sin(theta_r))**2)+1/tan(theta_r))*((0.25*R/A_r)+(0.25*R**3/I_r))
Qov=R**3/(2*I_r*theta_r*(m1+1))
Lov=(R_1-R_o)/a_r
Denom=I_r*(Pov-Qov+Lov) # radial deflection % rotor
self.u_Ar =(q3*R**2/E/self.h_yr)*(1+Numer/Denom)
# Calculating axial deflection of rotor structure
w_r =self.rho_Fes*g1*sin(phi)*a_r*N_r
mass_st_lam=self.rho_Fe*2*pi*(R+0.5*self.h_yr)*l*self.h_yr # mass of rotor yoke steel
W =g1*sin(phi)*(mass_st_lam+(V_Cusr*self.rho_Copper)+M_Fert)/N_r # weight of rotor cylinder
l_ir =R # length of rotor arm beam at which rotor cylinder acts
l_iir =R_1
self.y_Ar =(W*l_ir**3/12/E/I_arm_axi_r)+(w_r*l_iir**4/24/E/I_arm_axi_r) # axial deflection
#Calculating torsional deflection of rotor structure
self.z_all_r =0.05*2*pi*R/360 # allowable torsional deflection
self.z_A_r =(2*pi*(R-0.5*self.h_yr)*l/N_r)*sigma*(l_ir-0.5*self.h_yr)**3/3/E/I_arm_tor_r # circumferential deflection
#STATOR structure
A_st =l*self.t_s
a_s = (self.b_st*self.d_s)-((self.b_st-2*self.t_ws)*(self.d_s-2*self.t_ws))
N_st = round(self.n_s)
theta_s =pi/N_st
I_st =l*self.t_s**3/12
I_arm_axi_s =((self.b_st*self.d_s**3)-((self.b_st-2*self.t_ws)*(self.d_s-2*self.t_ws)**3))/12 # second moment of area of stator arm
I_arm_tor_s = ((self.d_s*self.b_st**3)-((self.d_s-2*self.t_ws)*(self.b_st-2*self.t_ws)**3))/12 # second moment of area of rotot arm w.r.t torsion
R_st =(self.r_s+self.h_s+self.h_ys*0.5)
R_1s = R_st-self.h_ys*0.5
k_2 = sqrt(I_st/A_st)
m2 =(k_2/R_st)**2
# allowable deflections
self.b_all_s =2*pi*self.R_o/N_st
self.u_all_s = R_st/10000
self.y_all =2*l/100 # allowable axial deflection
self.z_all_s =0.05*2*pi*R_st/360 # allowable torsional deflection
# Calculating radial deflection according to McDonald's
Numers=R_st**3*((0.25*(sin(theta_s)-(theta_s*cos(theta_s)))/(sin(theta_s))**2)-(0.5/sin(theta_s))+(0.5/theta_s))
Povs=((theta_s/(sin(theta_s))**2)+1/tan(theta_s))*((0.25*R_st/A_st)+(0.25*R_st**3/I_st))
Qovs=R_st**3/(2*I_st*theta_s*(m2+1))
Lovs=(R_1s-R_o)*0.5/a_s
Denoms=I_st*(Povs-Qovs+Lovs)
self.R_out=(R/0.995+self.h_s+self.h_ys)
self.u_As =(q3*R_st**2/E/self.t_s)*(1+Numers/Denoms)
# Calculating axial deflection according to McDonald
l_is =R_st-self.R_o
l_iis =l_is
l_iiis =l_is
mass_st_lam_s= M_Fest+pi*l*self.rho_Fe*((R_st+0.5*self.h_ys)**2-(R_st-0.5*self.h_ys)**2)
W_is =g1*sin(phi)*(self.rho_Fes*l*self.d_s**2*0.5) # weight of rotor cylinder # length of rotor arm beam at which self-weight acts
W_iis =g1*sin(phi)*(V_Cuss*self.rho_Copper+mass_st_lam_s)/2/N_st
w_s =self.rho_Fes*g1*sin(phi)*a_s*N_st
X_comp1 = (W_is*l_is**3/12/E/I_arm_axi_s)
X_comp2 =(W_iis*l_iis**4/24/E/I_arm_axi_s)
X_comp3 =w_s*l_iiis**4/24/E/I_arm_axi_s
self.y_As =X_comp1+X_comp2+X_comp3 # axial deflection
# Calculating torsional deflection
self.z_A_s =2*pi*(R_st+0.5*self.t_s)*l/(2*N_st)*sigma*(l_is+0.5*self.t_s)**3/3/E/I_arm_tor_s
# tangential stress constraints
self.TC1=T/(2*pi*sigma)
self.TC2=R**2*l
self.TC3=R_st**2*l
mass_stru_steel =2*(N_st*(R_1s-self.R_o)*a_s*self.rho_Fes)
# Calculating inactive mass and total mass
self.Structural_mass=mass_stru_steel+(N_r*(R_1-self.R_o)*a_r*self.rho_Fes)
self.Mass=self.Copper+self.Iron+self.Structural_mass
self.I = np.array([0.0, 0.0, 0.0])
# Calculating mass moments of inertia and center of mass
self.I[0] = (0.5*self.Mass*self.R_out**2)
self.I[1] = (0.25*self.Mass*self.R_out**2+(1/12)*self.Mass*self.l_s**2)
self.I[2] = self.I[1]
self.cm = np.array([0.0, 0.0, 0.0])
self.cm[0] = self.main_shaft_cm[0] + self.main_shaft_length/2. + self.l_s/2
self.cm[1] = self.main_shaft_cm[1]
self.cm[2] = self.main_shaft_cm[2]
return(self.B_symax, self.B_tmax, self.B_rymax,self.B_gfm, self.B_g,self.B_pc, self.N_s, self.b_s, \
self.b_t, self.A_Cuscalc, self.A_Curcalc,self.b_p,self.h_p, self.p, self.E_s, self.f,self.I_s, self.R_s, self.L_m, self.A_1,\
self.J_s,self.R_r, self.Losses,self.Load_mmf_ratio,self.Power_ratio,self.n_brushes,self.J_f,self.K_rad, self.gen_eff,\
self.S, self.Slot_aspect_ratio, self.Copper,self.Iron,self.u_Ar,self.y_Ar,self.z_A_r,\
self.u_As,self.y_As,self.z_A_s,self.u_all_r,self.u_all_s,self.y_all,self.z_all_s,self.z_all_r,self.b_all_s, \
self.b_all_r,self.TC1,self.TC2,self.TC3,self.R_out,self.Structural_mass,self.Mass,self.cm,self.I)
####################################################Cost Analysis#######################################################################
class EESG_Cost(Component):
""" Provides a material cost estimate for EESG. Manufacturing costs are excluded"""
def __init__(self):
super(EESG_Cost, self).__init__()
# Inputs
# Specific cost of material by type
self.add_param('C_Cu',val=0.0, desc='Specific cost of copper')
self.add_param('C_Fe',val=0.0,desc='Specific cost of magnetic steel/iron')
self.add_param('C_Fes',val=0.0,desc='Specific cost of structural steel')
# Mass of each material type
self.add_param('Copper',val=0.0, desc='Copper mass')
self.add_param('Iron',val=0.0, desc='Iron mass')
self.add_param('Structural_mass',val=0.0, desc='Structural mass')
# Outputs
self.add_output('Costs',val=0.0,desc='Total cost')
self.gen_costs=generator_costing()
def solve_nonlinear(self,inputs,outputs,resid):
(outputs['Costs'])=self.gen_costs.compute(inputs['Copper'],inputs['C_Cu'], \
inputs['Iron'],inputs['C_Fe'],inputs['C_Fes'],inputs['Structural_mass'])
return outputs
class generator_costing(object):
def __init__(self):
pass
def compute(self,Copper,C_Cu,Iron,C_Fe,C_Fes,Structural_mass):
self.Copper=Copper
self.Iron=Iron
self.Structural_mass=Structural_mass
# Material cost as a function of material mass and specific cost of material
K_gen=self.Copper*C_Cu+self.Iron*C_Fe
Cost_str=C_Fes*self.Structural_mass
Costs=K_gen+Cost_str
return(Costs)
####################################################OPTIMISATION SET_UP ###############################################################
class EESG_Opt(Group):
""" Creates a new Group containing EESG and EESG_Cost"""
def __init__(self):
super(EESG_Opt, self).__init__()
self.add('machine_rating', IndepVarComp('machine_rating',0.0),promotes=['*'])
self.add('Torque',IndepVarComp('Torque', val=0.0),promotes=['*'])
self.add('n_nom', IndepVarComp('n_nom', val=0.0),promotes=['*'])
self.add('main_shaft_cm', IndepVarComp('main_shaft_cm',val=np.array([0.0, 0.0, 0.0])),promotes=['*'])
self.add('main_shaft_length',IndepVarComp('main_shaft_length',val=0.0),promotes=['*'])
self.add('r_s',IndepVarComp('r_s',0.0),promotes=['*'])
self.add('l_s',IndepVarComp('l_s',0.0),promotes=['*'])
self.add('h_s',IndepVarComp('h_s',0.0),promotes=['*'])
self.add('tau_p',IndepVarComp('tau_p',0.0),promotes=['*'])
self.add('I_f',IndepVarComp('I_f',0.0),promotes=['*'])
self.add('N_f',IndepVarComp('N_f',0.0),promotes=['*'])
self.add('h_ys',IndepVarComp('h_ys',0.0),promotes=['*'])
self.add('h_yr',IndepVarComp('h_yr',0.0),promotes=['*'])
self.add('n_s',IndepVarComp('n_s',0.0),promotes=['*'])
self.add('b_st',IndepVarComp('b_st',0.0),promotes=['*'])
self.add('n_r',IndepVarComp('n_r',0.0),promotes=['*'])
self.add('b_r',IndepVarComp('b_r',0.0),promotes=['*'])
self.add('d_r',IndepVarComp('d_r',0.0),promotes=['*'])
self.add('d_s',IndepVarComp('d_s',0.0),promotes=['*'])
self.add('t_wr',IndepVarComp('t_wr',0.0),promotes=['*'])
self.add('t_ws',IndepVarComp('t_ws',0.0),promotes=['*'])
self.add('R_o',IndepVarComp('R_o',0.0),promotes=['*'])
self.add('rho_Fes',IndepVarComp('rho_Fes',0.0),promotes=['*'])
self.add('rho_Fe',IndepVarComp('rho_Fe',0.0),promotes=['*'])
self.add('rho_Copper',IndepVarComp('rho_Copper',0.0),promotes=['*'])
# add EESG component, create constraint equations
self.add('EESG',EESG(),promotes=['*'])
self.add('con_uAs', ExecComp('con_uAs =u_all_s-u_As'),promotes=['*'])
self.add('con_zAs', ExecComp('con_zAs =z_all_s-z_A_s'),promotes=['*'])
self.add('con_yAs', ExecComp('con_yAs =y_all-y_As'),promotes=['*'])
self.add('con_bst', ExecComp('con_bst =b_all_s-b_st'),promotes=['*'])
self.add('con_uAr', ExecComp('con_uAr =u_all_r-u_Ar'),promotes=['*'])
self.add('con_zAr', ExecComp('con_zAr =z_all_r-z_A_r'),promotes=['*'])
self.add('con_yAr', ExecComp('con_yAr =y_all-y_Ar'),promotes=['*'])
self.add('con_br', ExecComp('con_br =b_all_r-b_r'),promotes=['*'])
self.add('con_TC2', ExecComp('con_TC2 =TC2-TC1'),promotes=['*'])
self.add('con_TC3', ExecComp('con_TC3 =TC3-TC1'),promotes=['*'])
# add EESG_Cost component
self.add('EESG_Cost',EESG_Cost(),promotes=['*'])
self.add('C_Cu',IndepVarComp('C_Cu',val=0.0),promotes=['*'])
self.add('C_Fe',IndepVarComp('C_Fe',val=0.0),promotes=['*'])
self.add('C_Fes',IndepVarComp('C_Fes',val=0.0),promotes=['*'])
def EESG_Opt_example():
opt_problem=Problem(root=EESG_Opt())
#Example optimization of an EESG for costs on a 5 MW reference turbine
# add optimizer and set-up problem (using user defined input on objective function)
#
opt_problem.driver=pyOptSparseDriver()
opt_problem.driver.options['optimizer'] = 'CONMIN'
opt_problem.driver.add_objective('Costs') # Define Objective
opt_problem.driver.opt_settings['IPRINT'] = 4
opt_problem.driver.opt_settings['ITRM'] = 3
opt_problem.driver.opt_settings['ITMAX'] = 10
opt_problem.driver.opt_settings['DELFUN'] = 1e-3
opt_problem.driver.opt_settings['DABFUN'] = 1e-3
opt_problem.driver.opt_settings['IFILE'] = 'CONMIN_EESG.out'
opt_problem.root.deriv_options['type']='fd'
# Specificiency target efficiency(%)
Eta_Target = 93.0
# Set bounds for design variables for an EESG designed for a 5MW turbine
opt_problem.driver.add_desvar('r_s',lower=0.5,upper=9.0)
opt_problem.driver.add_desvar('l_s', lower=0.5, upper=2.5)
opt_problem.driver.add_desvar('h_s', lower=0.06, upper=0.15)
opt_problem.driver.add_desvar('tau_p', lower=0.04, upper=0.2)
opt_problem.driver.add_desvar('N_f', lower=10, upper=300)
opt_problem.driver.add_desvar('I_f', lower=1, upper=500)
opt_problem.driver.add_desvar('n_r', lower=5.0, upper=15.0)
opt_problem.driver.add_desvar('h_yr', lower=0.01, upper=0.25)
opt_problem.driver.add_desvar('h_ys', lower=0.01, upper=0.25)
opt_problem.driver.add_desvar('b_r', lower=0.1, upper=1.5)
opt_problem.driver.add_desvar('d_r', lower=0.1, upper=1.5)
opt_problem.driver.add_desvar('t_wr', lower=0.001, upper=0.2)
opt_problem.driver.add_desvar('n_s', lower=5.0, upper=15.0)
opt_problem.driver.add_desvar('b_st', lower=0.1, upper=1.5)
opt_problem.driver.add_desvar('d_s', lower=0.1, upper=1.5)
opt_problem.driver.add_desvar('t_ws', lower=0.001, upper=0.2)
# set up constraints for the PMSG_arms generator
opt_problem.driver.add_constraint('B_symax',upper=2.0-1.0e-6) #1
opt_problem.driver.add_constraint('B_rymax',upper=2.0-1.0e-6) #2
opt_problem.driver.add_constraint('B_tmax',upper=2.0-1.0e-6) #3
opt_problem.driver.add_constraint('B_gfm',lower=0.617031,upper=1.057768) #4
opt_problem.driver.add_constraint('B_g',lower=0.7,upper=1.2) #5
opt_problem.driver.add_constraint('B_pc',upper=2.0) #6
opt_problem.driver.add_constraint('E_s',lower=500.0,upper=5000.0) #7
opt_problem.driver.add_constraint('con_uAs',lower=0.0+1.0e-6) #8
opt_problem.driver.add_constraint('con_zAs',lower=0.0+1.0e-6) #9
opt_problem.driver.add_constraint('con_yAs',lower=0.0+1.0e-6) #10
opt_problem.driver.add_constraint('con_uAr',lower=0.0+1.0e-6) #11
opt_problem.driver.add_constraint('con_zAr',lower=0.0+1.0e-6) #12
opt_problem.driver.add_constraint('con_yAr',lower=0.0+1.0e-6) #13
opt_problem.driver.add_constraint('con_TC2',lower=0.0+1.0e-6) #14
opt_problem.driver.add_constraint('con_TC3',lower=0.0+1e-6) #15
opt_problem.driver.add_constraint('con_br',lower=0.0+1e-6) #16
opt_problem.driver.add_constraint('con_bst',lower=0.0-1e-6) #17
opt_problem.driver.add_constraint('A_1',upper=60000.0-1e-6) #18
opt_problem.driver.add_constraint('J_s',upper=6.0) #19
opt_problem.driver.add_constraint('J_f',upper=6.0) #20
opt_problem.driver.add_constraint('A_Cuscalc',lower=5.0,upper=300) #22
opt_problem.driver.add_constraint('A_Curcalc',lower=10,upper=300) #23
opt_problem.driver.add_constraint('K_rad',lower=0.2+1e-6,upper=0.27) #24
opt_problem.driver.add_constraint('Slot_aspect_ratio',lower=4.0,upper=10.0)#25
opt_problem.driver.add_constraint('gen_eff',lower=Eta_Target) #26
opt_problem.driver.add_constraint('n_brushes',upper=6) #27
opt_problem.driver.add_constraint('Power_ratio',upper=2-1.0e-6) #28
opt_problem.setup()
# Specify Target machine parameters
opt_problem['machine_rating']=5000000.0
opt_problem['Torque']=4.143289e6
opt_problem['n_nom']=12.1
# Initial design variables
opt_problem['r_s']=3.2
opt_problem['l_s']=1.4
opt_problem['h_s']= 0.060
opt_problem['tau_p']= 0.170
opt_problem['I_f']= 69
opt_problem['N_f']= 100
opt_problem['h_ys']= 0.130
opt_problem['h_yr']= 0.120
opt_problem['n_s']= 5
opt_problem['b_st']= 0.470
opt_problem['n_r']=5
opt_problem['b_r']= 0.480
opt_problem['d_r']= 0.510
opt_problem['d_s']= 0.400
opt_problem['t_wr']=0.140
opt_problem['t_ws']=0.070
opt_problem['R_o']=0.43 #10MW: 0.523950817,#5MW: 0.43, #3MW:0.363882632 #1.5MW: 0.2775 0.75MW: 0.17625
# Costs
opt_problem['C_Cu']=4.786
opt_problem['C_Fe']= 0.556
opt_problem['C_Fes']=0.50139
#Material properties
opt_problem['rho_Fe']= 7700 #Magnetic Steel/iron density
opt_problem['rho_Fes']= 7850 #structural Steel density
opt_problem['rho_Copper']=8900 # Kg/m3 copper density
opt_problem['main_shaft_cm']=np.array([0.0, 0.0, 0.0])
opt_problem['main_shaft_length'] =2.0
#Run optimization
opt_problem.run()
"""Uncomment to print solution to screen/an excel file
raw_data = {'Parameters': ['Rating','Stator Arms', 'Stator Axial arm dimension','Stator Circumferential arm dimension',' Stator arm Thickness' ,'Rotor Arms', 'Rotor Axial arm dimension','Rotor Circumferential arm dimension',\
'Rotor Arm thickness', ' Rotor Radial deflection', 'Rotor Axial deflection','Rotor circum deflection', 'Stator Radial deflection',' Stator Axial deflection',' Stator Circumferential deflection','Air gap diameter', 'Stator length',\
'l/D ratio', 'Pole pitch', 'Stator slot height','Stator slot width','Slot aspect ratio','Stator tooth width', 'Stator yoke height', 'Rotor yoke height', 'Rotor pole height', 'Rotor pole width', 'Average no load flux density', \
'Peak air gap flux density','Peak stator yoke flux density','Peak rotor yoke flux density','Stator tooth flux density','Rotor pole core flux density','Pole pairs', 'Generator output frequency', 'Generator output phase voltage(rms value)', \
'Generator Output phase current', 'Stator resistance', 'Synchronous inductance','Stator slots','Stator turns','Stator conductor cross-section','Stator Current density ','Specific current loading','Field turns','Conductor cross-section',\
'Field Current','D.C Field resistance','MMF ratio at rated load(Rotor/Stator)','Excitation Power (% of Rated Power)','Number of brushes/polarity','Field Current density','Generator Efficiency', 'Iron mass', 'Copper mass','Mass of Arms','Total Mass','Total Cost'],\
'Values': [opt_problem['machine_rating']/1e6,opt_problem['n_s'],opt_problem['d_s']*1000,opt_problem['b_st']*1000,opt_problem['t_ws']*1000,opt_problem['n_r'],opt_problem['d_r']*1000,opt_problem['b_r']*1000,opt_problem['t_wr']*1000,opt_problem['u_Ar']*1000,\
opt_problem['y_Ar']*1000,opt_problem['z_A_r']*1000,opt_problem['u_As']*1000,opt_problem['y_As']*1000,opt_problem['z_A_s']*1000,2*opt_problem['r_s'],opt_problem['l_s'],opt_problem['K_rad'],opt_problem['tau_p']*1000,opt_problem['h_s']*1000,opt_problem['b_s']*1000,\
opt_problem['Slot_aspect_ratio'],opt_problem['b_t']*1000,opt_problem['h_ys']*1000,opt_problem['h_yr']*1000,opt_problem['h_p']*1000,opt_problem['b_p']*1000,opt_problem['B_gfm'],opt_problem['B_g'],opt_problem['B_symax'],opt_problem['B_rymax'],opt_problem['B_tmax'],\
opt_problem['B_pc'],opt_problem['p'],opt_problem['f'],opt_problem['E_s'],opt_problem['I_s'],opt_problem['R_s'],opt_problem['L_m'],opt_problem['S'],opt_problem['N_s'],opt_problem['A_Cuscalc'],opt_problem['J_s'],opt_problem['A_1']/1000,opt_problem['N_f'],opt_problem['A_Curcalc'],\
opt_problem['I_f'],opt_problem['R_r'],opt_problem['Load_mmf_ratio'],opt_problem['Power_ratio'],opt_problem['n_brushes'],opt_problem['J_f'],opt_problem['gen_eff'],opt_problem['Iron']/1000,opt_problem['Copper']/1000,opt_problem['Structural_mass']/1000,\
opt_problem['Mass']/1000,opt_problem['Costs']/1000],
'Limit': ['','','',opt_problem['b_all_s']*1000,'','','',opt_problem['b_all_r']*1000,'',opt_problem['u_all_r']*1000,opt_problem['y_all']*1000,opt_problem['z_all_r']*1000,opt_problem['u_all_s']*1000,opt_problem['y_all']*1000,opt_problem['z_all_s']*1000,\
'','','(0.2-0.27)','','','','(4-10)','','','','','','(0.62-1.05)','1.2','2','2','2','2','','(10-60)','','','','','','','','(3-6)','<60','','','','','','<2%','','(3-6)',Eta_Target,'','','','',''],
'Units':['MW','unit','mm','mm','mm','unit','mm','mm','mm','mm','mm','mm','mm','mm','mm','m','m','','','mm','mm','mm','mm','mm','mm','mm','mm','T','T','T','T','T','T','-','Hz','V','A','om/phase',\
'p.u','slots','turns','mm^2','A/mm^2','kA/m','turns','mm^2','A','ohm','%','%','brushes','A/mm^2','turns','%','tons','tons','tons','1000$']}
df=pandas.DataFrame(raw_data, columns=['Parameters','Values','Limit','Units'])
print df
df.to_excel('EESG_'+str(opt_problem['machine_rating']/1e6)+'MW_1.7.x.xlsx')
"""
if __name__=="__main__":
# Run an example optimization of EESG generator on cost
EESG_Opt_example()
| 48.264778
| 282
| 0.660177
| 29,108
| 0.742722
| 0
| 0
| 0
| 0
| 0
| 0
| 15,724
| 0.401215
|
83fc1d70cbfd496107dfaac0a519dc08e54e550f
| 2,879
|
py
|
Python
|
haiku/_src/integration/numpy_inputs_test.py
|
timwillhack/dm-haikuBah2
|
b76a3db3a39b82c8a1ae5a81a8a0173c23c252e5
|
[
"Apache-2.0"
] | 1,647
|
2020-02-21T14:24:31.000Z
|
2022-03-31T04:31:34.000Z
|
haiku/_src/integration/numpy_inputs_test.py
|
timwillhack/dm-haikuBah2
|
b76a3db3a39b82c8a1ae5a81a8a0173c23c252e5
|
[
"Apache-2.0"
] | 169
|
2020-02-21T14:07:25.000Z
|
2022-03-31T13:08:28.000Z
|
haiku/_src/integration/numpy_inputs_test.py
|
timwillhack/dm-haikuBah2
|
b76a3db3a39b82c8a1ae5a81a8a0173c23c252e5
|
[
"Apache-2.0"
] | 159
|
2020-02-21T19:31:02.000Z
|
2022-03-29T12:41:35.000Z
|
# Copyright 2020 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests whether modules produce similar output given np.ndarray inputs."""
import functools
from typing import Tuple
from absl.testing import absltest
from absl.testing import parameterized
import haiku as hk
from haiku._src import test_utils
from haiku._src.integration import descriptors
import jax
import jax.numpy as jnp
import numpy as np
ModuleFn = descriptors.ModuleFn
def tree_assert_allclose(a, b, *, atol=1e-6):
jax.tree_multimap(
functools.partial(np.testing.assert_allclose, atol=atol), a, b)
class NumpyInputsTest(parameterized.TestCase):
@test_utils.combined_named_parameters(
descriptors.ALL_MODULES,
test_utils.named_bools('np_inputs'),
test_utils.named_bools('np_params'),
test_utils.named_bools('close_over_params'))
def test_numpy_and_jax_results_close(
self,
module_fn: ModuleFn,
shape: Tuple[int, ...],
dtype: jnp.dtype,
np_params: bool,
np_inputs: bool,
close_over_params: bool,
):
if not (np_params or np_inputs):
self.skipTest('Pure JAX variants tested elsewhere')
f = hk.transform_with_state(lambda x: module_fn()(x)) # pylint: disable=unnecessary-lambda
rng = jax.random.PRNGKey(42)
x = jnp.ones(shape, dtype)
params, state = f.init(rng, x)
if close_over_params:
apply_fn = functools.partial(f.apply, params, state)
out, new_state = jax.jit(apply_fn)(rng, x)
else:
out, new_state = jax.jit(f.apply)(params, state, rng, x)
if np_inputs:
rng, x = jax.device_get((rng, x))
with self.subTest('init'):
params2, state2 = f.init(rng, x)
tree_assert_allclose(params, params2)
tree_assert_allclose(state, state2)
with self.subTest('apply'):
if np_params:
params, state = jax.device_get((params, state))
if close_over_params:
apply_fn = functools.partial(f.apply, params, state)
out2, new_state2 = jax.jit(apply_fn)(rng, x)
else:
out2, new_state2 = jax.jit(f.apply)(params, state, rng, x)
tree_assert_allclose(out, out2)
tree_assert_allclose(new_state, new_state2)
if __name__ == '__main__':
absltest.main()
| 31.637363
| 95
| 0.685655
| 1,614
| 0.560611
| 0
| 0
| 1,564
| 0.543244
| 0
| 0
| 893
| 0.310177
|
83fc4f9ad87af1b6d3bc93e82f86f3dfb8315e07
| 519
|
py
|
Python
|
OldStreamingExperiments/NeighbourReducerCounter.py
|
AldurD392/SubgraphExplorer
|
d7c5de234a9ae1a83a017e77074fde5fd1d430b9
|
[
"MIT"
] | null | null | null |
OldStreamingExperiments/NeighbourReducerCounter.py
|
AldurD392/SubgraphExplorer
|
d7c5de234a9ae1a83a017e77074fde5fd1d430b9
|
[
"MIT"
] | null | null | null |
OldStreamingExperiments/NeighbourReducerCounter.py
|
AldurD392/SubgraphExplorer
|
d7c5de234a9ae1a83a017e77074fde5fd1d430b9
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""A more advanced Reducer, using Python iterators and generators."""
from itertools import groupby
from operator import itemgetter
import sys
def read_mapper_output(file, separator='\t'):
for line in file:
yield line.rstrip().split(separator, 1)
def main(separator='\t'):
data = read_mapper_output(sys.stdin, separator=separator)
for key, value in groupby(data, itemgetter(0)):
print("{}\t{}".format(key, len(list(value))))
if __name__ == "__main__":
main()
| 24.714286
| 69
| 0.689788
| 0
| 0
| 115
| 0.22158
| 0
| 0
| 0
| 0
| 116
| 0.223507
|
83fd35f6f554a1bd8fc3e12924ca6da425b05086
| 486
|
py
|
Python
|
tests/programs/lists/member_isin.py
|
astraldawn/pylps
|
e9964a24bb38657b180d441223b4cdb9e1dadc8a
|
[
"MIT"
] | 1
|
2018-05-19T18:28:12.000Z
|
2018-05-19T18:28:12.000Z
|
tests/programs/lists/member_isin.py
|
astraldawn/pylps
|
e9964a24bb38657b180d441223b4cdb9e1dadc8a
|
[
"MIT"
] | 12
|
2018-04-26T00:58:11.000Z
|
2018-05-13T22:03:39.000Z
|
tests/programs/lists/member_isin.py
|
astraldawn/pylps
|
e9964a24bb38657b180d441223b4cdb9e1dadc8a
|
[
"MIT"
] | null | null | null |
from pylps.core import *
initialise(max_time=5)
create_actions('say(_, _)', 'say_single(_)')
create_events('member(_, _)')
create_facts('inp(_, _)')
create_variables('X', 'Y', 'F', 'Item', 'List', 'Tail')
inp([], [[]])
inp('z', ['a', 'b', 'c', 'd', 'e'])
inp('a', ['b', 'c', 'a'])
inp(['b', 'c'], ['d', ['a', 'c']])
inp(['b', 'c'], ['d', ['a', 'c'], ['b', 'c']])
reactive_rule(inp(Item, List)).then(
Item.is_in(List),
say(Item, List),
)
execute(debug=False)
show_kb_log()
| 20.25
| 55
| 0.522634
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 144
| 0.296296
|
83ff49e0443c8a936583d9a35d43b023aa52642a
| 6,663
|
py
|
Python
|
fastface/dataset/base.py
|
mdornseif/fastface
|
72772db1fae4af17e829cd5479c4848fe5eb8948
|
[
"MIT"
] | 72
|
2021-01-03T05:43:56.000Z
|
2021-09-17T06:09:35.000Z
|
fastface/dataset/base.py
|
mdornseif/fastface
|
72772db1fae4af17e829cd5479c4848fe5eb8948
|
[
"MIT"
] | 3
|
2021-09-23T22:26:57.000Z
|
2021-10-31T10:11:48.000Z
|
fastface/dataset/base.py
|
mdornseif/fastface
|
72772db1fae4af17e829cd5479c4848fe5eb8948
|
[
"MIT"
] | 6
|
2021-02-15T19:58:57.000Z
|
2021-08-19T12:46:41.000Z
|
import copy
import logging
import os
from typing import Dict, List, Tuple
import checksumdir
import imageio
import numpy as np
import torch
from torch.utils.data import DataLoader, Dataset
from tqdm import tqdm
from ..adapter import download_object
logger = logging.getLogger("fastface.dataset")
class _IdentitiyTransforms:
"""Dummy tranforms"""
def __call__(self, img: np.ndarray, targets: Dict) -> Tuple:
return img, targets
def default_collate_fn(batch):
batch, targets = zip(*batch)
batch = np.stack(batch, axis=0).astype(np.float32)
batch = torch.from_numpy(batch).permute(0, 3, 1, 2).contiguous()
for i, target in enumerate(targets):
for k, v in target.items():
if isinstance(v, np.ndarray):
targets[i][k] = torch.from_numpy(v)
return batch, targets
class BaseDataset(Dataset):
def __init__(self, ids: List[str], targets: List[Dict], transforms=None, **kwargs):
super().__init__()
assert isinstance(ids, list), "given `ids` must be list"
assert isinstance(targets, list), "given `targets must be list"
assert len(ids) == len(targets), "lenght of both lists must be equal"
self.ids = ids
self.targets = targets
self.transforms = _IdentitiyTransforms() if transforms is None else transforms
# set given kwargs to the dataset
for key, value in kwargs.items():
if hasattr(self, key):
# log warning
continue
setattr(self, key, value)
def __getitem__(self, idx: int) -> Tuple:
img = self._load_image(self.ids[idx])
targets = copy.deepcopy(self.targets[idx])
# apply transforms
img, targets = self.transforms(img, targets)
# clip boxes
targets["target_boxes"] = self._clip_boxes(
targets["target_boxes"], img.shape[:2]
)
# discard zero sized boxes
targets["target_boxes"] = self._discard_zero_size_boxes(targets["target_boxes"])
return (img, targets)
def __len__(self) -> int:
return len(self.ids)
@staticmethod
def _clip_boxes(boxes: np.ndarray, shape: Tuple[int, int]) -> np.ndarray:
# TODO pydoc
height, width = shape
boxes[:, [0, 2]] = boxes[:, [0, 2]].clip(min=0, max=width - 1)
boxes[:, [1, 3]] = boxes[:, [1, 3]].clip(min=0, max=height - 1)
return boxes
@staticmethod
def _discard_zero_size_boxes(boxes: np.ndarray) -> np.ndarray:
# TODO pydoc
scale = (boxes[:, [2, 3]] - boxes[:, [0, 1]]).min(axis=1)
return boxes[scale > 0]
@staticmethod
def _load_image(img_file_path: str):
"""loads rgb image using given file path
Args:
img_path (str): image file path to load
Returns:
np.ndarray: rgb image as np.ndarray
"""
img = imageio.imread(img_file_path)
if not img.flags["C_CONTIGUOUS"]:
# if img is not contiguous than fix it
img = np.ascontiguousarray(img, dtype=img.dtype)
if len(img.shape) == 4:
# found RGBA, converting to => RGB
img = img[:, :, :3]
elif len(img.shape) == 2:
# found GRAYSCALE, converting to => RGB
img = np.stack([img, img, img], axis=-1)
return np.array(img, dtype=np.uint8)
def get_dataloader(
self,
batch_size: int = 1,
shuffle: bool = False,
num_workers: int = 0,
collate_fn=default_collate_fn,
pin_memory: bool = False,
**kwargs
):
return DataLoader(
self,
batch_size=batch_size,
shuffle=shuffle,
num_workers=num_workers,
collate_fn=collate_fn,
pin_memory=pin_memory,
**kwargs
)
def get_mean_std(self) -> Dict:
# TODO pydoc
mean_sum, mean_sq_sum = np.zeros(3), np.zeros(3)
for img, _ in tqdm(
self, total=len(self), desc="calculating mean and std for the dataset"
):
d = img.astype(np.float32) / 255
mean_sum[0] += np.mean(d[:, :, 0])
mean_sum[1] += np.mean(d[:, :, 1])
mean_sum[2] += np.mean(d[:, :, 2])
mean_sq_sum[0] += np.mean(d[:, :, 0] ** 2)
mean_sq_sum[1] += np.mean(d[:, :, 1] ** 2)
mean_sq_sum[2] += np.mean(d[:, :, 2] ** 2)
mean = mean_sum / len(self)
std = (mean_sq_sum / len(self) - mean ** 2) ** 0.5
return {"mean": mean.tolist(), "std": std.tolist()}
def get_normalized_boxes(self) -> np.ndarray:
# TODO pydoc
normalized_boxes = []
for img, targets in tqdm(
self, total=len(self), desc="computing normalized target boxes"
):
if targets["target_boxes"].shape[0] == 0:
continue
max_size = max(img.shape)
normalized_boxes.append(targets["target_boxes"] / max_size)
return np.concatenate(normalized_boxes, axis=0)
def get_box_scale_histogram(self) -> Tuple[np.ndarray, np.ndarray]:
bins = map(lambda x: 2 ** x, range(10))
total_boxes = []
for _, targets in tqdm(self, total=len(self), desc="getting box sizes"):
if targets["target_boxes"].shape[0] == 0:
continue
total_boxes.append(targets["target_boxes"])
total_boxes = np.concatenate(total_boxes, axis=0)
areas = (total_boxes[:, 2] - total_boxes[:, 0]) * (
total_boxes[:, 3] - total_boxes[:, 1]
)
return np.histogram(np.sqrt(areas), bins=list(bins))
def download(self, urls: List, target_dir: str):
for k, v in urls.items():
keys = list(v["check"].items())
checked_keys = []
for key, md5hash in keys:
target_sub_dir = os.path.join(target_dir, key)
if not os.path.exists(target_sub_dir):
checked_keys.append(False)
else:
checked_keys.append(
checksumdir.dirhash(target_sub_dir, hashfunc="md5") == md5hash
)
if sum(checked_keys) == len(keys):
logger.debug("found {} at {}".format(k, target_dir))
continue
# download
adapter = v.get("adapter")
kwargs = v.get("kwargs", {})
logger.warning(
"{} not found in the {}, downloading...".format(k, target_dir)
)
download_object(adapter, dest_path=target_dir, **kwargs)
| 31.880383
| 88
| 0.559508
| 5,970
| 0.895993
| 0
| 0
| 1,249
| 0.187453
| 0
| 0
| 904
| 0.135675
|
83ff592ddca67877e6b752e54e39cc96af464cdd
| 1,565
|
py
|
Python
|
clevr_video/params.py
|
jiaqi-xi/slot_attention
|
8420414eb261501e5b056e4d409c338d909397ef
|
[
"Apache-2.0"
] | null | null | null |
clevr_video/params.py
|
jiaqi-xi/slot_attention
|
8420414eb261501e5b056e4d409c338d909397ef
|
[
"Apache-2.0"
] | null | null | null |
clevr_video/params.py
|
jiaqi-xi/slot_attention
|
8420414eb261501e5b056e4d409c338d909397ef
|
[
"Apache-2.0"
] | 1
|
2021-11-11T19:44:14.000Z
|
2021-11-11T19:44:14.000Z
|
from typing import Optional
from typing import Tuple
import attr
@attr.s(auto_attribs=True)
class SlotAttentionParams:
lr: float = 0.0004
batch_size: int = 64
val_batch_size: int = 64
resolution: Tuple[int, int] = (128, 128)
num_slots: int = 7 # 5 change to 7 according to official code
num_iterations: int = 3
data_root: str = "/scratch/ssd004/scratch/ziyiwu/data/clevr_video/train/"
gpus: int = 1
max_epochs: int = 8
num_sanity_val_steps: int = 1
scheduler_gamma: float = 0.5
weight_decay: float = 0.0
num_train_images: Optional[int] = None
num_val_images: Optional[int] = None
empty_cache: bool = True
is_logger_enabled: bool = True
is_verbose: bool = True
num_workers: int = 4
n_samples: int = 5
warmup_steps_pct: float = 0.02
decay_steps_pct: float = 0.2
# whether use relu in SlotModel
use_relu: bool = True
# MLP hidden size in Slot Attention
slot_mlp_size: int = 128
# use self-entropy loss to masks
use_entropy_loss: bool = False
entropy_loss_w: float = 1.0
# whether set the slot parameters as learnable (to be updated by BP)
# TODO: should be True in official code!!!
# TODO: but this codebase set it as False and I've done lots of exp using
# TODO: it so far... So I set False as the default value
learnable_slot = False
# whether train mu and sigma or slot embedding, or directly emb itself
random_slot: bool = True
# whether each slot shares one set of learned parameters
slot_agnostic: bool = True
| 34.021739
| 77
| 0.686901
| 1,469
| 0.938658
| 0
| 0
| 1,496
| 0.955911
| 0
| 0
| 561
| 0.358466
|
83fff1491dc4525ae4d3d5754c54e0efcce41659
| 989
|
py
|
Python
|
sources/car.py
|
amaurylrd/banlieu_drift
|
bd9e435bf5ce25e782a59de33472beb932cac9ad
|
[
"Apache-2.0"
] | 1
|
2021-11-26T16:57:54.000Z
|
2021-11-26T16:57:54.000Z
|
sources/car.py
|
amaurylrd/banlieu_drift
|
bd9e435bf5ce25e782a59de33472beb932cac9ad
|
[
"Apache-2.0"
] | null | null | null |
sources/car.py
|
amaurylrd/banlieu_drift
|
bd9e435bf5ce25e782a59de33472beb932cac9ad
|
[
"Apache-2.0"
] | null | null | null |
import pygame
import math
coef_turn = 0.3
coef_drift = 0.07 # adhérence au sol
coef_vel = 10
class Car:
def __init__(self):
self.dir_target = -1
self.dir = -1
self.posx = 0
self.velx = -1
self.w = 50
self.h = 100
def update(self, dt):
self.dir += dt * coef_turn * (self.dir_target - self.dir)
if abs(self.dir - self.velx) < 0.1:
self.velx = self.dir
self.velx += dt * coef_drift * (self.dir - self.velx)
self.posx += dt * coef_vel * self.velx
def display(self, screen):
theta = math.atan2(self.dir, 1)
points = []
for i in [1, -1]:
for j in [i, -i]:
x = 1920/2 + i * self.w / 2 * math.cos(theta) - j * self.h / 2 * math.sin(theta)
y = 700 + i * self.w / 2 * math.sin(theta) + j * self.h / 2 * math.cos(theta)
points.append((x, y))
pygame.draw.polygon(screen, (255, 0, 0), points)
| 29.088235
| 96
| 0.50455
| 894
| 0.90303
| 0
| 0
| 0
| 0
| 0
| 0
| 19
| 0.019192
|
8601805a4413deebe6198ae3e881b519806f6bcf
| 6,014
|
py
|
Python
|
test/test_static.py
|
fjarri/grunnur
|
5eea8ec408e431f43a59780cdf8be2f441a9ebb5
|
[
"MIT"
] | 1
|
2020-12-04T12:19:18.000Z
|
2020-12-04T12:19:18.000Z
|
test/test_static.py
|
fjarri/grunnur
|
5eea8ec408e431f43a59780cdf8be2f441a9ebb5
|
[
"MIT"
] | 11
|
2021-03-11T00:20:23.000Z
|
2021-03-11T01:05:54.000Z
|
test/test_static.py
|
fjarri/grunnur
|
5eea8ec408e431f43a59780cdf8be2f441a9ebb5
|
[
"MIT"
] | null | null | null |
import pytest
import numpy
from grunnur import (
cuda_api_id, opencl_api_id,
StaticKernel, VirtualSizeError, API, Context, Queue, MultiQueue, Array, MultiArray
)
from grunnur.template import DefTemplate
from .mock_base import MockKernel, MockDefTemplate, MockDefTemplate
from .mock_pycuda import PyCUDADeviceInfo
from .mock_pyopencl import PyOpenCLDeviceInfo
from .test_program import _test_constant_memory
SRC = """
KERNEL void multiply(GLOBAL_MEM int *dest, GLOBAL_MEM int *a, GLOBAL_MEM int *b)
{
${static.begin};
const int i = ${static.global_id}(0);
const int j = ${static.global_id}(1);
const int idx = ${static.global_flat_id}();
dest[idx] = a[i] * b[j];
}
"""
def test_compile_static(mock_or_real_context):
context, mocked = mock_or_real_context
if mocked:
kernel = MockKernel('multiply', [None, None, None], max_total_local_sizes={0: 1024})
src = MockDefTemplate(kernels=[kernel])
else:
src = SRC
a = numpy.arange(11).astype(numpy.int32)
b = numpy.arange(15).astype(numpy.int32)
ref = numpy.outer(a, b)
queue = Queue(context.device)
a_dev = Array.from_host(queue, a)
b_dev = Array.from_host(queue, b)
res_dev = Array.empty(context.device, (11, 15), numpy.int32)
multiply = StaticKernel(context.device, src, 'multiply', (11, 15))
multiply(queue, res_dev, a_dev, b_dev)
res = res_dev.get(queue)
if not mocked:
assert (res == ref).all()
def test_compile_static_multi_device(mock_or_real_multi_device_context):
context, mocked = mock_or_real_multi_device_context
if mocked:
kernel = MockKernel(
'multiply', [None, None, None], max_total_local_sizes={0: 1024, 1: 512})
src = MockDefTemplate(kernels=[kernel])
else:
src = SRC
a = numpy.arange(22).astype(numpy.int32)
b = numpy.arange(15).astype(numpy.int32)
ref = numpy.outer(a, b)
mqueue = MultiQueue.on_devices(context.devices[[0, 1]])
a_dev = MultiArray.from_host(mqueue, a)
b_dev = MultiArray.from_host(mqueue, b, splay=MultiArray.CloneSplay())
res_dev = MultiArray.empty(mqueue.devices, (22, 15), ref.dtype)
multiply = StaticKernel(mqueue.devices, src, 'multiply', res_dev.shapes)
multiply(mqueue, res_dev, a_dev, b_dev)
res = res_dev.get(mqueue)
if not mocked:
assert (res == ref).all()
def test_constant_memory(mock_or_real_context):
context, mocked = mock_or_real_context
_test_constant_memory(context=context, mocked=mocked, is_static=True)
def test_find_local_size(mock_context):
kernel = MockKernel('multiply', [None], max_total_local_sizes={0: 64})
src = MockDefTemplate(kernels=[kernel])
multiply = StaticKernel(mock_context.device, src, 'multiply', (11, 15))
assert multiply._vs_metadata[mock_context.devices[0]].real_global_size == (16, 12)
assert multiply._vs_metadata[mock_context.devices[0]].real_local_size == (16, 4)
def test_reserved_names(mock_context):
kernel = MockKernel('test', [None])
src = MockDefTemplate(kernels=[kernel])
with pytest.raises(ValueError, match="The global name 'static' is reserved in static kernels"):
multiply = StaticKernel(mock_context.device, src, 'test', (1024,), render_globals=dict(static=1))
def test_zero_max_total_local_size(mock_context):
kernel = MockKernel('test', [None], max_total_local_sizes={0: 0})
src = MockDefTemplate(kernels=[kernel])
with pytest.raises(
VirtualSizeError,
match="The kernel requires too much resourses to be executed with any local size"):
multiply = StaticKernel(mock_context.device, src, 'test', (1024,))
def test_virtual_sizes_error_propagated(mock_backend_pycuda):
# Testing for PyCUDA backend only since mocked PyOpenCL backend does not have a way
# to set maximum global sizes (PyOpenCL devices don't have a corresponding parameter),
# and PyCUDA is enough to test the required code path.
device_info = PyCUDADeviceInfo(
max_threads_per_block=2**4,
max_block_dim_x=2**4,
max_block_dim_y=2**4,
max_block_dim_z=2**4,
max_grid_dim_x=2**10,
max_grid_dim_y=2**10,
max_grid_dim_z=2**8)
mock_backend_pycuda.add_devices([device_info])
api = API.from_api_id(mock_backend_pycuda.api_id)
device = api.platforms[0].devices[0]
context = Context.from_devices([device])
kernel = MockKernel('test', [None], max_total_local_sizes={0: 16})
src = MockDefTemplate(kernels=[kernel])
# Just enough to fit in the grid limits
multiply = StaticKernel(context.device, src, 'test', (2**14, 2**10, 2**8), (2**4, 1, 1))
# Global size is too large to fit on the device,
# so virtual size finding fails and the error is propagated to the user.
with pytest.raises(
VirtualSizeError,
match="Bounding global size \\(16384, 2048, 256\\) is too large"):
multiply = StaticKernel(context.device, src, 'test', (2**14, 2**11, 2**8), (2**4, 1, 1))
def test_builtin_globals(mock_backend_pycuda):
mock_backend_pycuda.add_devices([
PyCUDADeviceInfo(max_threads_per_block=1024),
PyCUDADeviceInfo(max_threads_per_block=512)])
source_template = DefTemplate.from_string(
'mock_source', [],
"""
KERNEL void test()
{
int max_total_local_size = ${device_params.max_total_local_size};
}
""")
api = API.from_api_id(mock_backend_pycuda.api_id)
context = Context.from_devices([api.platforms[0].devices[0], api.platforms[0].devices[1]])
src = MockDefTemplate(
kernels=[MockKernel('test', [None], max_total_local_sizes={0: 1024, 1: 512})],
source_template=source_template)
kernel = StaticKernel(context.devices, src, 'test', (1024,))
assert 'max_total_local_size = 1024' in kernel.sources[context.devices[0]].source
assert 'max_total_local_size = 512' in kernel.sources[context.devices[1]].source
| 34.365714
| 105
| 0.689225
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,169
| 0.19438
|
86024a0f256f012bd58b4d8e9b5de4b21cc1702d
| 1,024
|
py
|
Python
|
stat_ip_in_hash_woker_table.py
|
ligang945/pyMisc
|
3107c80f7f53ffc797b289ec73d1ef4db80f0b63
|
[
"MIT"
] | null | null | null |
stat_ip_in_hash_woker_table.py
|
ligang945/pyMisc
|
3107c80f7f53ffc797b289ec73d1ef4db80f0b63
|
[
"MIT"
] | null | null | null |
stat_ip_in_hash_woker_table.py
|
ligang945/pyMisc
|
3107c80f7f53ffc797b289ec73d1ef4db80f0b63
|
[
"MIT"
] | null | null | null |
def sortedDict(adict):
keys = adict.keys()
keys.sort()
return map(adict.get, keys)
ipint2str = lambda x: '.'.join([str(x/(256**i)%256) for i in range(3,-1,-1)])
ipstr2int = lambda x:sum([256**j*int(i) for j,i in enumerate(x.split('.')[::-1])])
src_ip = dict()
dst_ip = dict()
i =0
with open('hash_key_value') as f:
for line in f:
i += 1
# if i==8424720:
if i==328:
break
ip = int(line.split(',')[0], 16)
dir = int(line.split(',')[1])
if dir==1:
src_ip.setdefault(ip, dir)
elif dir ==0:
dst_ip.setdefault(ip, dir)
print len(src_ip)
for key in src_ip:
print ipint2str(key)+' ' ,
print '======='
print len(dst_ip)
for key in dst_ip:
print ipint2str(key)+' ' ,
# keys = src_ip.items()
# keys.sort()
# for key in keys:
# print ipint2str(key[0])
# keys = dst_ip.items()
# keys.sort()
# for key in keys:
# print ipint2str(key[0])
| 20.078431
| 82
| 0.512695
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 219
| 0.213867
|
86028d3af8b32e8fcc5b56f0951579ff48885aaa
| 8,212
|
py
|
Python
|
platform/winrt/detect.py
|
bdero/godot
|
e7572c690a3a6792e5aa183e16d902bff77398bc
|
[
"CC-BY-3.0",
"MIT"
] | 24
|
2016-10-14T16:54:01.000Z
|
2022-01-15T06:39:17.000Z
|
platform/winrt/detect.py
|
bdero/godot
|
e7572c690a3a6792e5aa183e16d902bff77398bc
|
[
"CC-BY-3.0",
"MIT"
] | 2
|
2021-08-17T02:04:05.000Z
|
2021-09-18T13:55:13.000Z
|
platform/winrt/detect.py
|
bdero/godot
|
e7572c690a3a6792e5aa183e16d902bff77398bc
|
[
"CC-BY-3.0",
"MIT"
] | 9
|
2017-08-04T12:00:16.000Z
|
2021-12-10T06:48:28.000Z
|
import os
import sys
import string
def is_active():
return True
def get_name():
return "WinRT"
def can_build():
if (os.name=="nt"):
#building natively on windows!
if (os.getenv("VSINSTALLDIR")):
return True
return False
def get_opts():
return []
def get_flags():
return []
def configure(env):
env.Append(CPPPATH=['#platform/winrt', '#platform/winrt/include'])
arch = ""
if os.getenv('PLATFORM') == "ARM":
# compiler commandline
# debug: /Yu"pch.h" /MP /GS /analyze- /W3 /wd"4453" /wd"28204" /Zc:wchar_t /I"C:\Users\ariel\Documents\Visual Studio 2013\Projects\App2\App2\App2.WindowsPhone\" /I"Generated Files\" /I"ARM\Debug\" /I"C:\Users\ariel\Documents\Visual Studio 2013\Projects\App2\App2\App2.Shared\" /ZW:nostdlib /Zi /Gm- /Od /sdl /Fd"ARM\Debug\vc120.pdb" /fp:precise /D "PSAPI_VERSION=2" /D "WINAPI_FAMILY=WINAPI_FAMILY_PHONE_APP" /D "_UITHREADCTXT_SUPPORT=0" /D "_UNICODE" /D "UNICODE" /D "_DEBUG" /errorReport:prompt /WX- /Zc:forScope /RTC1 /ZW /Gd /Oy- /MDd /Fa"ARM\Debug\" /EHsc /nologo /Fo"ARM\Debug\" /Fp"ARM\Debug\App2.WindowsPhone.pch"
# release: /Yu"pch.h" /MP /GS /GL /analyze- /W3 /wd"4453" /wd"28204" /Gy /Zc:wchar_t /I"C:\Users\ariel\Documents\Visual Studio 2013\Projects\App2\App2\App2.WindowsPhone\" /I"Generated Files\" /I"ARM\Release\" /I"C:\Users\ariel\Documents\Visual Studio 2013\Projects\App2\App2\App2.Shared\" /ZW:nostdlib /Zi /Gm- /O2 /sdl /Fd"ARM\Release\vc120.pdb" /fp:precise /D "PSAPI_VERSION=2" /D "WINAPI_FAMILY=WINAPI_FAMILY_PHONE_APP" /D "_UITHREADCTXT_SUPPORT=0" /D "_UNICODE" /D "UNICODE" /errorReport:prompt /WX- /Zc:forScope /ZW /Gd /Oy- /Oi /MD /Fa"ARM\Release\" /EHsc /nologo /Fo"ARM\Release\" /Fp"ARM\Release\App2.WindowsPhone.pch"
# linker commandline
# debug: /OUT:"C:\Users\ariel\Documents\Visual Studio 2013\Projects\App2\ARM\Debug\App2.WindowsPhone\App2.WindowsPhone.exe" /MANIFEST:NO /NXCOMPAT /PDB:"C:\Users\ariel\Documents\Visual Studio 2013\Projects\App2\ARM\Debug\App2.WindowsPhone\App2.WindowsPhone.pdb" /DYNAMICBASE "WindowsPhoneCore.lib" "RuntimeObject.lib" "PhoneAppModelHost.lib" /DEBUG /MACHINE:ARM /NODEFAULTLIB:"kernel32.lib" /NODEFAULTLIB:"ole32.lib" /WINMD /APPCONTAINER /INCREMENTAL /PGD:"C:\Users\ariel\Documents\Visual Studio 2013\Projects\App2\ARM\Debug\App2.WindowsPhone\App2.WindowsPhone.pgd" /WINMDFILE:"C:\Users\ariel\Documents\Visual Studio 2013\Projects\App2\ARM\Debug\App2.WindowsPhone\App2.winmd" /SUBSYSTEM:WINDOWS /MANIFESTUAC:NO /ManifestFile:"ARM\Debug\App2.WindowsPhone.exe.intermediate.manifest" /ERRORREPORT:PROMPT /NOLOGO /TLBID:1
# release: /OUT:"C:\Users\ariel\Documents\Visual Studio 2013\Projects\App2\ARM\Release\App2.WindowsPhone\App2.WindowsPhone.exe" /MANIFEST:NO /LTCG /NXCOMPAT /PDB:"C:\Users\ariel\Documents\Visual Studio 2013\Projects\App2\ARM\Release\App2.WindowsPhone\App2.WindowsPhone.pdb" /DYNAMICBASE "WindowsPhoneCore.lib" "RuntimeObject.lib" "PhoneAppModelHost.lib" /DEBUG /MACHINE:ARM /NODEFAULTLIB:"kernel32.lib" /NODEFAULTLIB:"ole32.lib" /WINMD /APPCONTAINER /OPT:REF /PGD:"C:\Users\ariel\Documents\Visual Studio 2013\Projects\App2\ARM\Release\App2.WindowsPhone\App2.WindowsPhone.pgd" /WINMDFILE:"C:\Users\ariel\Documents\Visual Studio 2013\Projects\App2\ARM\Release\App2.WindowsPhone\App2.winmd" /SUBSYSTEM:WINDOWS /MANIFESTUAC:NO /ManifestFile:"ARM\Release\App2.WindowsPhone.exe.intermediate.manifest" /OPT:ICF /ERRORREPORT:PROMPT /NOLOGO /TLBID:1
arch = "arm"
env.Append(LINKFLAGS=['/INCREMENTAL:NO', '/MANIFEST:NO', '/NXCOMPAT', '/DYNAMICBASE', "WindowsPhoneCore.lib", "RuntimeObject.lib", "PhoneAppModelHost.lib", "/DEBUG", "/MACHINE:ARM", '/NODEFAULTLIB:"kernel32.lib"', '/NODEFAULTLIB:"ole32.lib"', '/WINMD', '/APPCONTAINER', '/MANIFESTUAC:NO', '/ERRORREPORT:PROMPT', '/NOLOGO', '/TLBID:1'])
env.Append(LIBPATH=['#platform/winrt/ARM/lib'])
env.Append(CCFLAGS=string.split('/MP /GS /wd"4453" /wd"28204" /analyze- /Zc:wchar_t /Zi /Gm- /Od /fp:precise /fp:precise /D "PSAPI_VERSION=2" /D "WINAPI_FAMILY=WINAPI_FAMILY_PHONE_APP" /DWINDOWSPHONE_ENABLED /D "_UITHREADCTXT_SUPPORT=0" /D "_UNICODE" /D "UNICODE" /errorReport:prompt /WX- /Zc:forScope /Gd /Oy- /Oi /MD /RTC1 /Gd /EHsc /nologo'))
env.Append(CXXFLAGS=string.split('/ZW'))
if (env["target"]=="release"):
env.Append(CCFLAGS=['/O2'])
env.Append(LINKFLAGS=['/SUBSYSTEM:WINDOWS'])
elif (env["target"]=="test"):
env.Append(CCFLAGS=['/O2','/DDEBUG_ENABLED','/DD3D_DEBUG_INFO'])
env.Append(LINKFLAGS=['/SUBSYSTEM:CONSOLE'])
elif (env["target"]=="debug"):
env.Append(CCFLAGS=['/Zi','/DDEBUG_ENABLED','/DD3D_DEBUG_INFO'])
env.Append(LINKFLAGS=['/SUBSYSTEM:CONSOLE'])
env.Append(LINKFLAGS=['/DEBUG', '/D_DEBUG'])
elif (env["target"]=="profile"):
env.Append(CCFLAGS=['-g','-pg'])
env.Append(LINKFLAGS=['-pg'])
env['ENV'] = os.environ;
# fix environment for windows phone 8.1
env['ENV']['WINDOWSPHONEKITDIR'] = env['ENV']['WINDOWSPHONEKITDIR'].replace("8.0", "8.1") # wtf
env['ENV']['INCLUDE'] = env['ENV']['INCLUDE'].replace("8.0", "8.1")
env['ENV']['LIB'] = env['ENV']['LIB'].replace("8.0", "8.1")
env['ENV']['PATH'] = env['ENV']['PATH'].replace("8.0", "8.1")
env['ENV']['LIBPATH'] = env['ENV']['LIBPATH'].replace("8.0\\Windows Metadata", "8.1\\References\\CommonConfiguration\\Neutral")
else:
arch = "x64"
env.Append(LINKFLAGS=['/MANIFEST:NO', '/NXCOMPAT', '/DYNAMICBASE', "kernel32.lib", '/MACHINE:X64', '/WINMD', '/APPCONTAINER', '/MANIFESTUAC:NO', '/ERRORREPORT:PROMPT', '/NOLOGO', '/TLBID:1'])
env.Append(LIBPATH=['#platform/winrt/x64/lib'])
if (env["target"]=="release"):
env.Append(CCFLAGS=['/O2'])
env.Append(LINKFLAGS=['/SUBSYSTEM:WINDOWS'])
env.Append(LINKFLAGS=['/ENTRY:mainCRTStartup'])
elif (env["target"]=="test"):
env.Append(CCFLAGS=['/O2','/DDEBUG_ENABLED','/DD3D_DEBUG_INFO'])
env.Append(LINKFLAGS=['/SUBSYSTEM:CONSOLE'])
elif (env["target"]=="debug"):
env.Append(CCFLAGS=['/Zi','/DDEBUG_ENABLED','/DD3D_DEBUG_INFO'])
env.Append(LINKFLAGS=['/SUBSYSTEM:CONSOLE'])
env.Append(LINKFLAGS=['/DEBUG', '/D_DEBUG'])
elif (env["target"]=="profile"):
env.Append(CCFLAGS=['-g','-pg'])
env.Append(LINKFLAGS=['-pg'])
env.Append(CCFLAGS=string.split('/MP /GS /wd"4453" /wd"28204" /Zc:wchar_t /Gm- /Od /fp:precise /D "_UNICODE" /D "UNICODE" /D "WINAPI_FAMILY=WINAPI_FAMILY_APP" /errorReport:prompt /WX- /Zc:forScope /RTC1 /Gd /MDd /EHsc /nologo'))
env.Append(CXXFLAGS=string.split('/ZW'))
env.Append(CCFLAGS=['/AI', os.environ['VCINSTALLDIR']+'\\vcpackages', '/AI', os.environ['WINDOWSSDKDIR']+'\\References\\CommonConfiguration\\Neutral'])
env.Append(CCFLAGS=['/DWINAPI_FAMILY=WINAPI_FAMILY_APP', '/D_WIN32_WINNT=0x0603', '/DNTDDI_VERSION=0x06030000'])
env['ENV'] = os.environ;
env["PROGSUFFIX"]="."+arch+env["PROGSUFFIX"]
env["OBJSUFFIX"]="."+arch+env["OBJSUFFIX"]
env["LIBSUFFIX"]="."+arch+env["LIBSUFFIX"]
#env.Append(CCFLAGS=['/Gd','/GR','/nologo', '/EHsc'])
#env.Append(CXXFLAGS=['/TP', '/ZW'])
#env.Append(CPPFLAGS=['/DMSVC', '/GR', ])
##env.Append(CCFLAGS=['/I'+os.getenv("WindowsSdkDir")+"/Include"])
env.Append(CCFLAGS=['/DWINRT_ENABLED'])
env.Append(CCFLAGS=['/DWINDOWS_ENABLED'])
env.Append(CCFLAGS=['/DRTAUDIO_ENABLED'])
#env.Append(CCFLAGS=['/DWIN32'])
env.Append(CCFLAGS=['/DTYPED_METHOD_BIND'])
env.Append(CCFLAGS=['/DGLES2_ENABLED'])
#env.Append(CCFLAGS=['/DGLES1_ENABLED'])
LIBS=[
#'winmm',
'libEGL',
'libGLESv2',
'libANGLE',
#'kernel32','ole32','user32', 'advapi32'
]
env.Append(LINKFLAGS=[p+".lib" for p in LIBS])
import methods
env.Append( BUILDERS = { 'GLSL120' : env.Builder(action = methods.build_legacygl_headers, suffix = 'glsl.h',src_suffix = '.glsl') } )
env.Append( BUILDERS = { 'GLSL' : env.Builder(action = methods.build_glsl_headers, suffix = 'glsl.h',src_suffix = '.glsl') } )
env.Append( BUILDERS = { 'HLSL9' : env.Builder(action = methods.build_hlsl_dx9_headers, suffix = 'hlsl.h',src_suffix = '.hlsl') } )
env.Append( BUILDERS = { 'GLSL120GLES' : env.Builder(action = methods.build_gles2_headers, suffix = 'glsl.h',src_suffix = '.glsl') } )
#/c/Program Files (x86)/Windows Phone Kits/8.1/lib/ARM/WindowsPhoneCore.lib
| 52.305732
| 846
| 0.693132
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 5,821
| 0.708841
|
8602e07af8df333a6a9bc854df324adb49b003af
| 6,934
|
py
|
Python
|
monasca-log-api-2.9.0/monasca_log_api/tests/test_role_middleware.py
|
scottwedge/OpenStack-Stein
|
7077d1f602031dace92916f14e36b124f474de15
|
[
"Apache-2.0"
] | null | null | null |
monasca-log-api-2.9.0/monasca_log_api/tests/test_role_middleware.py
|
scottwedge/OpenStack-Stein
|
7077d1f602031dace92916f14e36b124f474de15
|
[
"Apache-2.0"
] | 5
|
2019-08-14T06:46:03.000Z
|
2021-12-13T20:01:25.000Z
|
monasca-log-api-2.9.0/monasca_log_api/tests/test_role_middleware.py
|
scottwedge/OpenStack-Stein
|
7077d1f602031dace92916f14e36b124f474de15
|
[
"Apache-2.0"
] | 2
|
2020-03-15T01:24:15.000Z
|
2020-07-22T20:34:26.000Z
|
# Copyright 2015-2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from webob import response
from monasca_log_api.middleware import role_middleware as rm
from monasca_log_api.tests import base
class SideLogicTestEnsureLowerRoles(base.BaseTestCase):
def test_should_ensure_lower_roles(self):
roles = ['CMM-Admin', ' CmM-User ']
expected = ['cmm-admin', 'cmm-user']
self.assertItemsEqual(expected, rm._ensure_lower_roles(roles))
def test_should_return_empty_array_for_falsy_input_1(self):
roles = []
expected = []
self.assertItemsEqual(expected, rm._ensure_lower_roles(roles))
def test_should_return_empty_array_for_falsy_input_2(self):
roles = None
expected = []
self.assertItemsEqual(expected, rm._ensure_lower_roles(roles))
class SideLogicTestIntersect(base.BaseTestCase):
def test_should_intersect_seqs(self):
seq_1 = [1, 2, 3]
seq_2 = [2]
expected = [2]
self.assertItemsEqual(expected, rm._intersect(seq_1, seq_2))
self.assertItemsEqual(expected, rm._intersect(seq_2, seq_1))
def test_should_intersect_empty(self):
seq_1 = []
seq_2 = []
expected = []
self.assertItemsEqual(expected, rm._intersect(seq_1, seq_2))
self.assertItemsEqual(expected, rm._intersect(seq_2, seq_1))
def test_should_not_intersect_without_common_elements(self):
seq_1 = [1, 2, 3]
seq_2 = [4, 5, 6]
expected = []
self.assertItemsEqual(expected, rm._intersect(seq_1, seq_2))
self.assertItemsEqual(expected, rm._intersect(seq_2, seq_1))
class RolesMiddlewareSideLogicTest(base.BaseTestCase):
def test_should_apply_middleware_for_valid_path(self):
paths = ['/', '/v2.0/', '/v2.0/log/']
instance = rm.RoleMiddleware(None)
instance._path = paths
for p in paths:
req = mock.Mock()
req.method = 'GET'
req.path = p
self.assertTrue(instance._can_apply_middleware(req))
def test_should_apply_middleware_for_invalid_path(self):
paths = ['/v2.0/', '/v2.0/log/']
instance = rm.RoleMiddleware(None)
instance._path = paths
for p in paths:
pp = 'test/%s' % p
req = mock.Mock()
req.method = 'GET'
req.path = pp
self.assertFalse(instance._can_apply_middleware(req))
def test_should_reject_OPTIONS_request(self):
instance = rm.RoleMiddleware(None)
req = mock.Mock()
req.method = 'OPTIONS'
req.path = '/'
self.assertFalse(instance._can_apply_middleware(req))
def test_should_return_true_if_authenticated(self):
instance = rm.RoleMiddleware(None)
req = mock.Mock()
req.headers = {rm._X_IDENTITY_STATUS: rm._CONFIRMED_STATUS}
self.assertTrue(instance._is_authenticated(req))
def test_should_return_false_if_not_authenticated(self):
instance = rm.RoleMiddleware(None)
req = mock.Mock()
req.headers = {rm._X_IDENTITY_STATUS: 'Some_Other_Status'}
self.assertFalse(instance._is_authenticated(req))
def test_should_return_false_if_identity_status_not_found(self):
instance = rm.RoleMiddleware(None)
req = mock.Mock()
req.headers = {}
self.assertFalse(instance._is_authenticated(req))
def test_should_return_true_if_is_agent(self):
roles = 'cmm-admin,cmm-user'
roles_array = roles.split(',')
default_roles = [roles_array[0]]
admin_roles = [roles_array[1]]
instance = rm.RoleMiddleware(None)
instance._default_roles = default_roles
instance._agent_roles = admin_roles
req = mock.Mock()
req.headers = {rm._X_ROLES: roles}
is_agent = instance._is_agent(req)
self.assertTrue(is_agent)
class RolesMiddlewareLogicTest(base.BaseTestCase):
def test_not_process_further_if_cannot_apply_path(self):
roles = 'cmm-admin,cmm-user'
roles_array = roles.split(',')
default_roles = [roles_array[0]]
admin_roles = [roles_array[1]]
instance = rm.RoleMiddleware(None)
instance._default_roles = default_roles
instance._agent_roles = admin_roles
instance._path = ['/test']
# spying
instance._is_authenticated = mock.Mock()
instance._is_agent = mock.Mock()
req = mock.Mock()
req.headers = {rm._X_ROLES: roles}
req.path = '/different/test'
instance.process_request(req=req)
self.assertFalse(instance._is_authenticated.called)
self.assertFalse(instance._is_agent.called)
def test_not_process_further_if_cannot_apply_method(self):
roles = 'cmm-admin,cmm-user'
roles_array = roles.split(',')
default_roles = [roles_array[0]]
admin_roles = [roles_array[1]]
instance = rm.RoleMiddleware(None)
instance._default_roles = default_roles
instance._agent_roles = admin_roles
instance._path = ['/test']
# spying
instance._is_authenticated = mock.Mock()
instance._is_agent = mock.Mock()
req = mock.Mock()
req.headers = {rm._X_ROLES: roles}
req.path = '/test'
req.method = 'OPTIONS'
instance.process_request(req=req)
self.assertFalse(instance._is_authenticated.called)
self.assertFalse(instance._is_agent.called)
def test_should_produce_json_response_if_not_authenticated(
self):
instance = rm.RoleMiddleware(None)
is_agent = True
is_authenticated = False
instance._can_apply_middleware = mock.Mock(return_value=True)
instance._is_agent = mock.Mock(return_value=is_agent)
instance._is_authenticated = mock.Mock(return_value=is_authenticated)
req = mock.Mock()
req.environ = {}
req.headers = {
'X-Tenant-Id': '11111111'
}
result = instance.process_request(req=req)
self.assertIsNotNone(result)
self.assertIsInstance(result, response.Response)
status = result.status_code
json_body = result.json_body
message = json_body.get('message')
self.assertIn('Failed to authenticate request for', message)
self.assertEqual(401, status)
| 30.546256
| 77
| 0.657629
| 6,196
| 0.893568
| 0
| 0
| 0
| 0
| 0
| 0
| 914
| 0.131814
|
86035a85164418f81cd1bcd44a084b4bd7b49d04
| 328
|
py
|
Python
|
app/LOGS/logger_.py
|
innovationb1ue/XMU_HealthReport
|
6ee0c7830a0e30fc9730401585a303873f382bac
|
[
"MIT"
] | 2
|
2021-09-03T18:13:46.000Z
|
2022-01-13T08:48:36.000Z
|
app/LOGS/logger_.py
|
buuuuuuug/XMU_HealthReport
|
cb545959eceddf676b34237c38b1ba6f797764f5
|
[
"MIT"
] | null | null | null |
app/LOGS/logger_.py
|
buuuuuuug/XMU_HealthReport
|
cb545959eceddf676b34237c38b1ba6f797764f5
|
[
"MIT"
] | 1
|
2021-07-14T09:48:19.000Z
|
2021-07-14T09:48:19.000Z
|
import logging
class NewLogger:
def __init__(self, log_abs_path:str):
self.logger = logging.getLogger()
handler = logging.FileHandler(log_abs_path)
handler.setLevel(logging.ERROR)
self.logger.addHandler(handler)
def log(self, msg:str):
self.logger.log(logging.ERROR, msg)
| 18.222222
| 51
| 0.664634
| 305
| 0.929878
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
860369b74b7a50328a72400a0fc52d3fc97e9d16
| 80
|
py
|
Python
|
route/link.py
|
moluwole/Bast_skeleton
|
9e58c1c0da3085b377896aab1e3007689c328c1c
|
[
"MIT"
] | 3
|
2018-08-04T21:11:35.000Z
|
2018-08-24T04:47:16.000Z
|
route/link.py
|
moluwole/Bast_skeleton
|
9e58c1c0da3085b377896aab1e3007689c328c1c
|
[
"MIT"
] | 1
|
2018-08-24T20:57:36.000Z
|
2018-08-24T20:57:36.000Z
|
route/link.py
|
moluwole/Bast_skeleton
|
9e58c1c0da3085b377896aab1e3007689c328c1c
|
[
"MIT"
] | 2
|
2018-08-05T19:14:16.000Z
|
2018-08-15T08:13:50.000Z
|
from bast import Route
route = Route()
route.get('/', 'HelloController.index')
| 16
| 39
| 0.7125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 26
| 0.325
|
86040bdb269e301a7c36430ecb672c9bac61af90
| 1,626
|
py
|
Python
|
paradrop/daemon/paradrop/core/config/wifi.py
|
VegetableChook/Paradrop
|
a38e1773877d5b136c3b626edd8c033a12b43e56
|
[
"Apache-2.0"
] | 1
|
2018-03-22T13:04:19.000Z
|
2018-03-22T13:04:19.000Z
|
paradrop/daemon/paradrop/core/config/wifi.py
|
VegetableChook/Paradrop
|
a38e1773877d5b136c3b626edd8c033a12b43e56
|
[
"Apache-2.0"
] | null | null | null |
paradrop/daemon/paradrop/core/config/wifi.py
|
VegetableChook/Paradrop
|
a38e1773877d5b136c3b626edd8c033a12b43e56
|
[
"Apache-2.0"
] | null | null | null |
from paradrop.base.output import out
from paradrop.lib.utils import uci
from . import configservice, uciutils
def getOSWirelessConfig(update):
"""
Read settings from networkInterfaces for wireless interfaces.
Store wireless configuration settings in osWirelessConfig.
"""
# old code under lib.internal.chs.chutelxc same function name
interfaces = update.new.getCache('networkInterfaces')
if interfaces is None:
return
wifiIfaces = list()
for iface in interfaces:
# Only look at wifi interfaces.
if iface['netType'] != "wifi":
continue
config = {'type': 'wifi-iface'}
options = {
'device': iface['device'],
'network': iface['externalIntf'],
'mode': iface.get('mode', 'ap')
}
# Required for AP and client mode but not monitor mode.
if 'ssid' in iface:
options['ssid'] = iface['ssid']
# Optional encryption settings
if 'encryption' in iface:
options['encryption'] = iface['encryption']
if 'key' in iface:
options['key'] = iface['key']
# Add extra options.
options.update(iface['options'])
wifiIfaces.append((config, options))
update.new.setCache('osWirelessConfig', wifiIfaces)
def setOSWirelessConfig(update):
"""
Write settings from osWirelessConfig out to UCI files.
"""
changed = uciutils.setConfig(update.new, update.old,
cacheKeys=['osWirelessConfig'],
filepath=uci.getSystemPath("wireless"))
| 29.563636
| 72
| 0.602706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 638
| 0.392374
|
86040d6a3dcd14bd0d738d5bbdbdef5ec27bd32e
| 44,518
|
py
|
Python
|
xixi.py
|
niushuqing123/final-project
|
237745dd27a29c9a4b0574003c37fe4c875fde91
|
[
"MIT"
] | 9
|
2022-03-10T06:53:38.000Z
|
2022-03-19T08:54:51.000Z
|
xixi.py
|
niushuqing123/final-project
|
237745dd27a29c9a4b0574003c37fe4c875fde91
|
[
"MIT"
] | null | null | null |
xixi.py
|
niushuqing123/final-project
|
237745dd27a29c9a4b0574003c37fe4c875fde91
|
[
"MIT"
] | null | null | null |
import taichi as ti
import numpy as np
from functools import reduce
# from sph_base import SPHBase
# ti.init(arch=ti.cpu)
# Use GPU for higher peformance if available
ti.init(arch=ti.gpu, device_memory_GB=4, packed=True)
# 因为邻居搜索的网格不会做,所以尺寸数据只好也沿用助教的写法
# res = (720,720)
res = (512,512)
dim = 2
assert dim > 1
screen_to_world_ratio = 50
bound = np.array(res) / screen_to_world_ratio
print(bound)
# Material
material_boundary = 0
particle_radius = 0.05 # particle radius
particle_diameter = 2 * particle_radius
support_radius = particle_radius * 4.0 # support radius
m_V = 0.8 * particle_diameter ** dim
particle_max_num = 2 ** 15
particle_max_num_per_cell = 100
particle_max_num_neighbor = 200
particle_num = ti.field(int, shape=())
# gravity = -98.0 # 重力
viscosity = 0.05 # 黏性
density_0 = 1000.0 # 参照密度
mass = m_V * density_0
dt =3e-4
exponent = 7.0
stiffness = 50.0
# 粒子信息
x = ti.Vector.field(dim, dtype=float)
v = ti.Vector.field(dim, dtype=float)
d_velocity = ti.Vector.field(dim, dtype=float)
density = ti.field(dtype=float)
pressure = ti.field(dtype=float)
material = ti.field(dtype=int)
color = ti.field(dtype=int)
particle_neighbors = ti.field(int)
particle_neighbors_num = ti.field(int)
particles_node = ti.root.dense(ti.i, particle_max_num)
particles_node.place(x,v,d_velocity, density, pressure, material, color,particle_neighbors_num)
# Grid related properties
grid_size = support_radius
grid_num = np.ceil(np.array(res) / grid_size).astype(int)
print(grid_num)
grid_particles_num = ti.field(int)
grid_particles = ti.field(int)
padding = grid_size
particle_node = particles_node.dense(ti.j, particle_max_num_neighbor)
particle_node.place(particle_neighbors)
index = ti.ij if dim == 2 else ti.ijk
grid_node = ti.root.dense(index, grid_num)
grid_node.place(grid_particles_num)
cell_index = ti.k if dim == 2 else ti.l
cell_node = grid_node.dense(cell_index, particle_max_num_per_cell)
cell_node.place(grid_particles)
# ========================================
#
# boundary particle
# 圆形,粒子信息
circular_max_num=1000
circular_num= ti.field(int, shape=())
circular_node = ti.root.dense(ti.i, circular_max_num)
c_x=ti.Vector.field(dim, dtype=float)
c_v=ti.Vector.field(dim, dtype=float)
c_f=ti.Vector.field(dim, dtype=float)
c_r=ti.field(float)
c_m=ti.field(float)
fixed = ti.field(int)
circular_node.place(c_x,c_v,c_f,c_r,c_m,fixed)
Young_modulus=2000000
# 弹簧数据结构
rest_length = ti.field(dtype=float, shape=(circular_max_num, circular_max_num))
Young_modulus_spring=921000
dashpot_damping=300#弹簧减震器
离墙距离=0.2#粒子边界不能距离实际边界太近,否则无效,可能是网格问题?
@ti.func
def cubic_kernel( r_norm):
res = ti.cast(0.0, ti.f32)
h = support_radius
# value of cubic spline smoothing kernel
k = 1.0
if dim == 1:
k = 4 / 3
elif dim == 2:
k = 40 / 7 / np.pi
elif dim == 3:
k = 8 / np.pi
k /= h ** dim
q = r_norm / h
if q <= 1.0:
if q <= 0.5:
q2 = q * q
q3 = q2 * q
res = k * (6.0 * q3 - 6.0 * q2 + 1)
else:
res = k * 2 * ti.pow(1 - q, 3.0)
# res是一个被强制转换为ti.f32的值
return res
@ti.func
def cubic_kernel_derivative( r):
h = support_radius
# derivative of cubic spline smoothing kernel
k = 1.0
if dim == 1:
k = 4 / 3
elif dim == 2:
k = 40 / 7 / np.pi
elif dim == 3:
k = 8 / np.pi
k = 6. * k / h ** dim
r_norm = r.norm()
q = r_norm / h
res = ti.Vector([0.0 for _ in range(dim)])
if r_norm > 1e-5 and q <= 1.0:
grad_q = r / (r_norm * h)
if q <= 0.5:
res = k * q * (3.0 * q - 2.0) * grad_q
else:
factor = 1.0 - q
res = k * (-factor * factor) * grad_q
return res
@ti.func
def viscosity_force( p_i, p_j, r):
# Compute the viscosity force contribution
v_xy = (v[p_i] -
v[p_j]).dot(r)
res = 2 * (dim + 2) * viscosity * (mass / (density[p_j])) * v_xy / (
r.norm()**2 + 0.01 * support_radius**2) * cubic_kernel_derivative(r)
return res
@ti.func
def pressure_force( p_i, p_j, r):
# Compute the pressure force contribution, Symmetric Formula
res = -density_0 * m_V * (pressure[p_i] / density[p_i] ** 2
+ pressure[p_j] / density[p_j] ** 2) \
* cubic_kernel_derivative(r)
return res
@ti.func
def simulate_collisions( p_i, vec, d):
# Collision factor, assume roughly (1-c_f)*velocity loss after collision
c_f = 0.3
x[p_i] += vec * d
v[p_i] -= (1.0 + c_f) * v[p_i].dot(vec) * vec
@ti.kernel
def solve():
# 初始化网格,以搜索粒子的邻居
# initialize_particle_system()
for p in range(particle_num[None]):
cell = (x[p] / grid_size).cast(int)
offset = grid_particles_num[cell].atomic_add(1)
grid_particles[cell, offset] = p
#搜索邻居,不会打网格,借用助教的代码
# search_neighbors()
for i in range(particle_num[None]):
#感觉这个没什么用?# Skip boundary particles
# if material[i] == 0:
# continue
center_cell = (x[i] / grid_size).cast(int)
cnt = 0
for offset in ti.grouped(ti.ndrange(*((-1, 2),) * dim)):
if cnt >= particle_max_num_neighbor:
break
cell = center_cell + offset
flag = True
for d in ti.static(range(dim)):
flag = flag and (0 <= cell[d] < grid_num[d])
if not flag:
break
for j in range(grid_particles_num[cell]):
p_j = grid_particles[cell, j]
distance = (x[i] - x[p_j]).norm()
if i != p_j and distance < support_radius:
particle_neighbors[i, cnt] = p_j
cnt += 1
particle_neighbors_num[i] = cnt
#根据周围邻居,计算密度
# compute_densities()
for i in range(particle_num[None]):
x_i = x[i]
density[i] = 0.0#初始化密度
for j in range(particle_neighbors_num[i]):
p_j = particle_neighbors[i, j]
x_j = x[p_j]
密度权重=1#暂时解决了圆形周围的粒子会滑向边界
if(material[i]==2):
密度权重=6
density[i] += 密度权重*m_V * cubic_kernel((x_i - x_j).norm())
density[i] *= density_0
边界压力权重=1
# 边界粘性权重=1
# 边界系数_exponent=exponent
#根据密度,计算压力
# compute_pressure_forces()
for i in range(particle_num[None]):#可以合并到上面的循环里面
density[i] = ti.max(density[i], density_0)
# if(material[i]==2):
# 边界系数_exponent=7
# pressure[i] = stiffness * (ti.pow(density[i] / density_0, 边界系数_exponent) - 1.0)
pressure[i] = stiffness * (ti.pow(density[i] / density_0,exponent) - 1.0)
# 重力、计算压力、计算粘性力
# compute_non_pressure_forces()
for i in range(particle_num[None]):
if material[i] == 2:
continue
x_i = x[i]
dv = ti.Vector([0 ,-280])#重力
if(material[i]==3):
dv[1]*=-1
for j in range(particle_neighbors_num[i]):#根据邻居的数量,分别计算贡献
p_j = particle_neighbors[i, j]
if(material[p_j]==2):
# 边界压力权重
边界压力权重=4
# 边界粘性权重=1
x_j = x[p_j]
#计算压力
dv += 边界压力权重*pressure_force(i, p_j, x_i-x_j)
# 计算粘性力
# dv += 边界粘性权重*viscosity_force(i, p_j, x_i - x_j)
dv += viscosity_force(i, p_j, x_i - x_j)
d_velocity[i] = dv
#辛欧拉积分状态更新
for i in range(particle_num[None]):
if material[i] == 2:
continue
# if d_velocity[i].norm()>100:d_velocity[i]*=0.1#由于耦合的存在,经常产生高速粒子,对加速度做一下限制,不能太大,但可能影响性能,后期再测试
v[i] += dt * d_velocity[i]
if v[i].norm()>200:v[i]*=0.2#同上
x[i] += dt * v[i]
#仍然保留简陋的边界条件,用于限制坐标
# 虽然粒子也可以当边界,但是高速粒子可以穿透,仍然需要控制一下
for i in range(particle_num[None]):
if material[i] ==2:
continue
pos = x[i]
#离墙距离2是为了让这个边界稍微比粒子边界宽一点点,以免粒子卡在粒子边界上,v+=1是还想给点往出推的速度,确保不卡住,但现在不需要了
离墙距离2=离墙距离+0.01
if pos[0] < 离墙距离2:
# print("a")
x[i][0]+=-1.2*(pos[0] - 离墙距离2)
# v[i][0]+=4
if pos[0] > bound[0] - 离墙距离2:
# print("y")
x[i][0]-=-1.2*( bound[0] - 离墙距离2-pos[0])
# v[i][0]-=4
if pos[1] > bound[1] - 离墙距离2:
# print("s")
x[i][1]-=-1.2*(bound[1] - 离墙距离2-pos[1])
# v[i][1]-=4
if pos[1] < 离墙距离2:
# print("x")
x[i][1]+=-1.2*(pos[1] - 离墙距离2)
# v[i][1]+=4
# 成功将圆形碰撞耦合了进来!圆与粒子交互
for i in range(circular_num[None]):
pos1 = c_x[i]
质量比例=particle_radius/c_r[i]#其实就是面积(体积)比例,半径比例
for j in range(particle_num[None]):
direction_vector=pos1-x[j]
direction_vector_length=ti.sqrt(direction_vector[0]**2+direction_vector[1]**2)
if (direction_vector_length<=c_r[i]+particle_radius):
# if(material[j]==1):#只与流动的粒子作用,因为这个算法不太完善
#制作一个切向加速度,近似摩擦力.n为垂直与单位向量的法向量,vrel是速度在法线上的投影
n=ti.Vector([direction_vector[1],-direction_vector[0]])
v_rel = (c_v[i] - v[j]).dot(n)
v[j]-=v_rel*n*dt*7
c_v[i]+=v_rel*n*dt*7
if(material[j]==1):x[j]-=direction_vector*direction_vector_length*0.05#把粒子往出推一点,仅推一点,起到缓和冲击力就作用,这一操作会导致水里的物体一动,周围的粒子会跟着震动。。
elastic_force=2000*(direction_vector/direction_vector_length)*(c_r[i]+particle_radius-direction_vector_length)
v[j]-=elastic_force#由于杨氏模量数值的缘故,dt可以省去了
c_v[i]+=elastic_force*质量比例*0.3#使粒子对圆的影响小一点
# 圆形的相互碰撞部分、处理链接弹簧
# for i in range(circular_num[None]):#最外层循环可以合并
for j in range(i+1,circular_num[None]):
direction_vector=c_x[j]-pos1 # direction_vector=c_x[j]-c_x[i],pos1替换一下
d = (direction_vector).normalized() # 两个粒子的单位向量
if rest_length[i, j] == 0: # 是否存在弹簧
direction_vector_length=ti.sqrt(direction_vector[0]**2+direction_vector[1]**2)
if (direction_vector_length<=c_r[i] + c_r[j]):
elastic_force=Young_modulus*(direction_vector/direction_vector_length)*(c_r[i]+c_r[j]-direction_vector_length)
elastic_damping = (c_v[i] - c_v[j]).dot(direction_vector/direction_vector_length)
c_v[i] += -elastic_damping*10 * (direction_vector/direction_vector_length)*dt
c_v[j] -= -elastic_damping*10 * (direction_vector/direction_vector_length)*dt
c_v[i]-=elastic_force*dt
c_v[j]+=elastic_force*dt
else:
# 计算弹簧要用f,因为阻尼里面有v的影响,不能直接更新v
# c_v[i] += Young_modulus_spring*(direction_vector.norm()/rest_length[j, i]-1)*d*dt
c_f[i] += Young_modulus_spring*(direction_vector.norm()/rest_length[j, i]-1)*d
# c_v[j] += -Young_modulus_spring*(direction_vector.norm()/rest_length[j, i]-1)*d*dt
c_f[j] += -Young_modulus_spring*(direction_vector.norm()/rest_length[j, i]-1)*d
v_rel = (c_v[j] - c_v[i]).dot(d)
# c_v[i] += v_rel*dashpot_damping* d*dt
c_f[i] += v_rel*dashpot_damping* d
# c_v[j] += -v_rel*dashpot_damping* d*dt
c_f[j] += -v_rel*dashpot_damping* d
# 半隐式欧拉更新圆形位置
for i in range(circular_num[None]):
#在这里做出区分是有必要的,因为粒子会影响圆的速度,而圆的状态更新公式里有受速度影响的项,所以要及时给速度和力置零,否则弹簧会出问题
if fixed[i]==0:
c_v[i]+=c_f[i]*dt
c_v[i]*=0.995
c_f[i]=[0,-2800]#用完重置力
c_x[i] += c_v[i]*dt
else:
c_v[i]=[0,0]
c_f[i]=[0,0]
#圆形的碰撞边界可以去掉,但是要测试粒子做边界的效果
#圆形的边界碰撞,挪用小作业中简化的过的公式,1000本来是杨氏模量,但是这份代码杨氏模量大,沿用1000可以省去*dt
for i in range(circular_num[None]):
if(c_x[i][0]<c_r[i]):c_v[i][0]+=(1000*(c_r[i]-c_x[i][0])-0.1*c_v[i][0])
if(c_x[i][1]<c_r[i]):c_v[i][1]+=(1000*(c_r[i]-c_x[i][1])-0.1*c_v[i][0])
if(c_x[i][0]+c_r[i]>bound[0]):c_v[i][0]+=(1000*(bound[0]-c_x[i][0]-c_r[i])-0.1*c_v[i][0])
if(c_x[i][1]+c_r[i]>bound[1]):c_v[i][1]+=(1000*(bound[1]-c_x[i][1]-c_r[i])-0.1*c_v[i][1])
def substep():
grid_particles_num.fill(0)
particle_neighbors.fill(-1)
solve()
@ti.kernel
def add_particle(posx:ti.f32,posy:ti.f32, vx:ti.f32,vy:ti.f32, material1:ti.i32, color1:ti.i32):
# print(x1)
color_=color1
if(color1==0):
if(material1==3):color_=0x00cc33
if(material1==2):color_=0x696969
if(material1==1):color_=0x87CEFA
num =particle_num[None]
x[num]= [posx,posy]
v[num]= [vx,vy]
density[num] = 1000
material[num] = material1
color[num] = color_
particle_num[None] += 1
作为边界的的粒子=0
def build_boundary():
#左
# 离墙距离=0.15
边界粒度=36
边界数量=380
for i in range(边界数量):
add_particle(离墙距离,i/边界粒度,0,0,2,0)
#下
for i in range(边界数量):
add_particle(i/边界粒度,离墙距离,0,0,2,0)
#上
for i in range(边界数量):
add_particle(i/边界粒度,res[0] /screen_to_world_ratio-离墙距离,0,0,2,0)
#右
for i in range(边界数量):
add_particle(res[0] /screen_to_world_ratio-离墙距离,i/边界粒度,0,0,2,0)
global 作为边界的的粒子
作为边界的的粒子=particle_num[None]
print(作为边界的的粒子)
# 接近边界会失效?网格影响?
# 修改粒子密度权重,使用一层薄的粒子做边界
# 全部用粒子做边界,可以不再需要圆形的边界判断
上一个粒子画的线=[0,0]
def p_bondary(pos1_,pos2_,dxdy):
#两点确定斜率,用描点画线的方式近似画一个边界
# dxdy粒度
#换位的目的是,永远只考虑从低往高画
if (pos1_[1] <=pos2_[1]):
pos1=pos1_
pos2=pos2_
else:
pos1=pos2_
pos2=pos1_
print(pos1)
print(pos2)
#两点坐标之差算斜率k
k=(pos2[1]-pos1[1])/(pos2[0]-pos1[0])
print("k:",k)
dx=dy=dxdy#默认都为一倍的粒度
if k<0:
if(k>-1):
dx*=-1
dy=k*dxdy*-1#dy要为正数
else:
dx=(1/k)*dxdy
else:
if(k>=1):
dx=(1/k)*dxdy
else:
dy=k*dxdy
print("dx,dy:",dx,dy)
posx=posy=0
global 上一个粒子画的线
上一个粒子画的线[0]=particle_num[None]
if(k<0):
while(1):
add_particle((pos1[0]+posx),(pos1[1]+posy),0,0,2,0)
if(pos1[0]+posx>pos2[0]):posx+=dx#对于斜率的正负要做出区分
if(pos1[1]+posy<pos2[1]):posy+=dy
# if(pos1[1]+posy>pos2[1]):break
if(pos1[0]+posx<pos2[0]):break
print((posx,posy))
# print(pos1[0]+posx)
# add_particle((pos1[0]+posx),(pos1[1]+posy),0,0,2,0x956333)
else:
while(1):
add_particle((pos1[0]+posx),(pos1[1]+posy),0,0,2,0)
if(pos1[0]+posx<pos2[0]):posx+=dx#对于斜率的正负要做出区分
if(pos1[1]+posy<pos2[1]):posy+=dy
# if(pos1[1]+posy>pos2[1]):break
if(pos1[0]+posx>pos2[0]):break
print((pos1[0]+posx,pos1[1]+posy))
# add_particle((pos1[0]+posx),(pos1[1]+posy),0,0,2,0x956333)
上一个粒子画的线[1]=particle_num[None]
# print(上一个粒子画的线[1]-上一个粒子画的线[0])
def 边界粒子变流体(上一个粒子画的线):
if 上一个粒子画的线[0]==0 and 上一个粒子画的线[1]==0 :
for i in range(作为边界的的粒子,particle_num[None]):
if(material[i]==2):
material[i] = 1
else:
for i in range(上一个粒子画的线[0],上一个粒子画的线[1]):
material[i] = 1
# @ti.kernel
def 范围边界变流体(pos_xx: ti.f32, pos_yy: ti.f32,搜索半径: ti.f32):
# print("aaaa")
pos=ti.Vector([pos_xx,pos_yy])
# print(pos)
for i in range(particle_num[None]):
# print(i)
if material[i]== 2:
dpos=pos-x[i]
# print(dpos)
d = (dpos).norm() # 粒子与鼠标的距离
# print(d)
if(d<=搜索半径):
material[i]=1
color[i]=0x87CEFA
print("sss")
def add_particle_cube(pos,size,material,color_):
li=(int)(size[0]*10)
lj=(int)(size[1]*10)
for i in range(li):
for j in range(lj):
pass
add_particle(pos[0]+i/18,pos[1]+j/18,0,0,material,color_)
def 一个水枪(水枪位置,水枪速度,material):
add_particle(水枪位置[0],水枪位置[1]+0.05, 水枪速度[0],水枪速度[1],material,0)
add_particle(水枪位置[0],水枪位置[1]+0.1, 水枪速度[0],水枪速度[1],material,0)
add_particle(水枪位置[0],水枪位置[1]+0.15, 水枪速度[0],水枪速度[1],material,0)
add_particle(水枪位置[0],水枪位置[1]+0.2, 水枪速度[0],水枪速度[1],material,0)
@ti.kernel
def delete_particle(num1:ti.i32):
if(particle_num[None]>作为边界的的粒子):
num2 =particle_num[None]
particle_num[None]-=num1
for i in range(num2-num1,num2):
x[i]=[0,0]
v[i]=[0,0]
d_velocity[i]=[0,0]
pressure[i]= 0
density[i] = 0
particle_neighbors_num[i]=0
material[i] = 0
color[i] = 0
pass
@ti.kernel
def copy_to_numpy_nd( np_arr: ti.ext_arr(), src_arr: ti.template(),num:ti.i32):
for i in range(num):
for j in ti.static(range(dim)):
np_arr[i, j] = src_arr[i][j]
@ti.kernel
def copy_to_numpy_nd1( np_arr: ti.ext_arr(), src_arr: ti.template(),num:ti.i32):
for i in range(num):
np_arr[i] = src_arr[i]
@ti.kernel
def copy_to_numpy_for_radius(np_arr: ti.ext_arr(),num:ti.i32):
for i in range(num):
if(material[i]==2):
np_arr[i]=9
else:
np_arr[i]=1
@ti.kernel
def copy_to_numpy_for_color(np_arr: ti.ext_arr(),num:ti.i32):
for i in range(num):
np_arr[i]=color[i]
@ti.kernel
def add_circular(pos_x: ti.f32, pos_y: ti.f32, r1: ti.f32,vx:ti.f32,vy:ti.f32,spring:ti.i32,fix:ti.i32,判定距离:ti.f32,链接长度:ti.f32):
#起到默认值的效果
判定距离_=判定距离
链接长度_=链接长度
if(判定距离==0):判定距离_=1.25
if(链接长度==0):链接长度_=1
num=circular_num[None]
c_x[num] = ti.Vector([pos_x, pos_y]) # 将新粒子的位置存入x
c_v[num]=ti.Vector([vx, vy])
fixed[num]=fix
c_r[num]=r1
c_m[num]=r1*r1
circular_num[None] += 1 # 粒子数量加一
if(spring==1):
for i in range(num): # 遍历粒子库,判断新粒子与其他粒子的距离
if(c_x[num]-c_x[i]).norm() < 判定距离_: # 若小于0.15,在弹簧状态矩阵中添加两个粒子之间的弹簧
rest_length[num, i] = 链接长度_ # 弹簧的静止长度
rest_length[i, num] = 链接长度_
def add_circular_cube(x,y,size,r):
li=(int)(size[0]/r)
lj=(int)(size[1]/r)
奇怪的参数=(1.6-r)
#这个数最早是2,意思是两圆相隔两个半径的距离,小一点是为了让每两个圆重合一部分,总体看上去像矩形,但又不会被粒子穿透
#但是,对于大半径的粒子来说可以让这个数较小,对于小半径的粒子来说,同样的数值会导致爆炸,似乎与杨氏模量有关,1.6暂时在一定范围内稳定
for i in range (li):
for j in range (lj):
#3.8,这个数字要大于2倍根2小于4,可获得稳定矩形,小于则缺少中间的弹簧,大于4则弹簧链接过多
# add_circular(x+i*2*r,y+j*2*r,r,0,0,1,0,3.8*r,(1.45-r)*r)
add_circular(x+i*2*r,y+j*2*r,r,0,0,1,0,3.8*r,奇怪的参数*r)
def add_circular_cube_hollow(x,y,size,r):
pass
li=(int)(size[0]/r)
lj=(int)(size[1]/r)
奇怪的参数=(1.6-r)
#这个数最早是2,意思是两圆相隔两个半径的距离,小一点是为了让每两个圆重合一部分,总体看上去像矩形,但又不会被粒子穿透
#但是,对于大半径的粒子来说可以让这个数较小,对于小半径的粒子来说,同样的数值会导致爆炸,似乎与杨氏模量有关,1.6暂时在一定范围内稳定
for i in range (li):
for j in range (lj):
#3.8,这个数字要大于2倍根2小于4,可获得稳定矩形,小于则缺少中间的弹簧,大于4则弹簧链接过多
# add_circular(x+i*2*r,y+j*2*r,r,0,0,1,0,3.8*r,(1.45-r)*r)
add_circular(x+i*2*r,y+j*2*r,r,0,0,1,0,3.8*r,奇怪的参数*r)
#做一个弹性绳,注意链接弹簧的技巧,
#由于又是画直线,参考p_bond
#单独操作弹簧矩阵的数据结构,避免不必要的粘连
def build_a_chain(pos1_,pos2_,dxdy,fixed_first,fixed_end,弹簧原长比例):
#弹簧原长,在固定两端的情况下,也反映了硬度
# r=dxdy/2
r=dxdy/1.8#其实应该是2,稍微小一点,否则圆间隔太大
弹簧长度=1.8
if (pos1_[1] <=pos2_[1]):
pos1=pos1_
pos2=pos2_
else:
pos1=pos2_
pos2=pos1_
#两点坐标之差算斜率k
k=(pos2[1]-pos1[1])/(pos2[0]-pos1[0])
print("k:",k)
dx=dy=dxdy#默认都为一倍的粒度
if k<0:
if(k>-1):
dx*=-1
dy=k*dxdy*-1#dy要为正数
else:
dx=(1/k)*dxdy
else:
if(k>=1):
dx=(1/k)*dxdy
else:
dy=k*dxdy
posx=posy=0
if(k<0):
while(1):
add_circular((pos1[0]+posx),(pos1[1]+posy),r,0,0,0,fixed_first,2*r,弹簧原长比例*r)
rest_length[circular_num[None],circular_num[None]-1]=弹簧原长比例*r
rest_length[circular_num[None]-1,circular_num[None]]=弹簧原长比例*r
fixed_first=0
if(pos1[0]+posx>pos2[0]):posx+=dx#对于斜率的正负要做出区分
if(pos1[1]+posy<pos2[1]):posy+=dy
# if(pos1[1]+posy>pos2[1]):break
if(pos1[0]+posx<pos2[0]):break
# print((posx,posy))
# print(pos1[0]+posx)
# add_particle((pos1[0]+posx),(pos1[1]+posy),0,0,2,0x956333)
else:
while(1):
add_circular((pos1[0]+posx),(pos1[1]+posy),r,0,0,0,fixed_first,2*r,弹簧原长比例*r)
rest_length[circular_num[None],circular_num[None]-1]=弹簧原长比例*r
rest_length[circular_num[None]-1,circular_num[None]]=弹簧原长比例*r
fixed_first=0
if(pos1[0]+posx<pos2[0]):posx+=dx#对于斜率的正负要做出区分
if(pos1[1]+posy<pos2[1]):posy+=dy
# if(pos1[1]+posy>pos2[1]):break
if(pos1[0]+posx>pos2[0]):break
# print((pos1[0]+posx,pos1[1]+posy))
# add_particle((pos1[0]+posx),(pos1[1]+posy),0,0,2,0x956333)
#要撤掉最后一个圆与下一个圆的弹簧
rest_length[circular_num[None],circular_num[None]-1]=0
rest_length[circular_num[None]-1,circular_num[None]]=0
fixed[circular_num[None]-1]=fixed_end#最后一个点为固定点?
@ti.kernel
def attract(pos_x: ti.f32, pos_y: ti.f32):
for i in range(circular_num[None]):
if fixed[i] == 0: # 如果粒子没有被定住则受外力影响
v[i] += -dt*10*(x[i]-ti.Vector([pos_x, pos_y]))*100
# 施加一个速度增量带代替力,因为受力计算在substep()里并且每次都将其重置为仅有重力
# 时间步*最小帧步长*位置差*常量系数。因为这函数调用在主函数substep循环之外,所以变化的幅度要乘以最小帧步长,以保证数据同比例变化。
# 负号是因为,粒子位置减目标位置,向量(x1-x2)的方向是由x2指向x1,所以为了使粒子向着目标方向移动需要得到相反的方向
被选中的圆 = ti.field(int, shape=())
#可以实现删除被选择的圆,有两种手段,给圆的信息加一条是否生效,被删除的圆仍存在,但不生效。或者删除这个圆所有的信息,然后重排数组,稍微有点麻烦
@ti.kernel
def attract_one(pos_x: ti.f32, pos_y: ti.f32):
if fixed[被选中的圆[None]] == 0: # 如果粒子没有被定住则受外力影响
# print("attract_one")
c_v[被选中的圆[None]] += -dt*(c_x[被选中的圆[None]]-ti.Vector([pos_x, pos_y]))*200000
@ti.kernel
def search_circular(pos_x: ti.f32, pos_y: ti.f32):
pos=ti.Vector([pos_x,pos_y])
# print("search_circular")
# print(pos)
for i in range(circular_num[None]):
#顶点也可以被选定
# if fixed[i]== 1: # 如果粒子没有被定住则受外力影响
# continue
# print(c_x[i])
dpos=pos-c_x[i]
# print(dpos)
d = (dpos).norm() # 两个粒子的距离
# print(d)
# print(c_r[i])
if( d<=c_r[i]):
被选中的圆[None]=i
print("被选中的圆:",被选中的圆[None])
@ti.kernel
def switch_fixed():#切换被选中圆的固定状态
if fixed[被选中的圆[None]] == 0:
fixed[被选中的圆[None]]=1
else:
fixed[被选中的圆[None]]=0
旋转圆下标buff=[0,0]
def build_a_wheel(pos,size_L,c_r,内半径与sizeL比例):
pos[0]
pos[1]
旋转圆下标buff[0]=circular_num[None]
#二分之根号三等于0.866
#手动构造弹簧,为了避免不必要的粘连
#1
add_circular(pos[0],pos[1],内半径与sizeL比例*size_L,0,0,0,1,size_L,size_L)
#2
add_circular(pos[0]+size_L,pos[1],c_r,0,0,0,0,size_L+0.01,size_L)
rest_length[circular_num[None]-1,circular_num[None]-2]=size_L
rest_length[circular_num[None]-2,circular_num[None]-1]=size_L
#3
add_circular(pos[0]+0.5*size_L,pos[1]+0.866*size_L,c_r,0,0,0,0,size_L+0.05,size_L)
rest_length[circular_num[None]-1,circular_num[None]-2]=size_L
rest_length[circular_num[None]-2,circular_num[None]-1]=size_L
rest_length[circular_num[None]-1,旋转圆下标buff[0]]=size_L
rest_length[旋转圆下标buff[0],circular_num[None]-1]=size_L
#4
add_circular(pos[0]-0.5*size_L,pos[1]+0.866*size_L,c_r,0,0,0,0,size_L+0.05,size_L)
rest_length[circular_num[None]-1,circular_num[None]-2]=size_L
rest_length[circular_num[None]-2,circular_num[None]-1]=size_L
rest_length[circular_num[None]-1,旋转圆下标buff[0]]=size_L
rest_length[旋转圆下标buff[0],circular_num[None]-1]=size_L
#5
add_circular(pos[0]-size_L,pos[1],c_r,0,0,0,0,size_L+0.01,size_L)
rest_length[circular_num[None]-1,circular_num[None]-2]=size_L
rest_length[circular_num[None]-2,circular_num[None]-1]=size_L
rest_length[circular_num[None]-1,旋转圆下标buff[0]]=size_L
rest_length[旋转圆下标buff[0],circular_num[None]-1]=size_L
add_circular(pos[0]-0.5*size_L,pos[1]-0.866*size_L,c_r,0,0,0,0,size_L+0.05,size_L)
rest_length[circular_num[None]-1,circular_num[None]-2]=size_L
rest_length[circular_num[None]-2,circular_num[None]-1]=size_L
rest_length[circular_num[None]-1,旋转圆下标buff[0]]=size_L
rest_length[旋转圆下标buff[0],circular_num[None]-1]=size_L
add_circular(pos[0]+0.5*size_L,pos[1]-0.866*size_L,c_r,0,0,0,0,size_L+0.05,size_L)
rest_length[circular_num[None]-1,circular_num[None]-2]=size_L
rest_length[circular_num[None]-2,circular_num[None]-1]=size_L
rest_length[circular_num[None]-1,旋转圆下标buff[0]]=size_L
rest_length[旋转圆下标buff[0],circular_num[None]-1]=size_L
rest_length[circular_num[None]-1,旋转圆下标buff[0]+1]=size_L
rest_length[旋转圆下标buff[0]+1,circular_num[None]-1]=size_L
旋转圆下标buff[1]=circular_num[None]
@ti.kernel
def applied_rotating(num1:ti.i32,num2:ti.i32):
#这是什么情况,为什么参数传进来了还必须写这两句话,不写报错,非得写num=num
num1=num1
num2=num2
# print(num1)
# print(num2)
x1 = c_x[num1]
num1 += 1
for i in range(num1, num2):
#本质上是求一个切线,法向量乘一个系数作为加速度
c_v[i][0] = (c_x[i][1]-x1[1])*80#速度恒定
c_v[i][1] = -(c_x[i][0]-x1[0])*80
# c_v[i][0] += (c_x[i][1]-x1[1])*80#加速度恒定
# c_v[i][1] += -(c_x[i][0]-x1[0])*80
# 施加转速注意事项:方向与默认方向相同或相反,根据圆心点减去圆上点或者圆上点减圆心点决定
# 统一自转方向
@ti.kernel
def reset_allcirculars():
for i in range(circular_num[None]):
c_x[i] =[0, 0]
c_v[i]=[0, 0]
c_f[i]=[0, 0]
c_r[i]=0
c_m[i]=0
# c_m[i]=0
# 弹簧清空
for i in range(circular_num[None]):
for j in range(circular_num[None]):
rest_length[i, j] = 0
circular_num[None]=0
def revocation_a_cirulars():
circular_num[None] -= 1
num=circular_num[None]
# 重置运动状态
c_x[num] = ti.Vector([0, 0])
c_v[num]=ti.Vector([0, 0])
c_r[num]=0
c_m[num]=0
fixed[num]=0
for i in range(num):
# 卸掉弹簧
rest_length[i, num] = 0
rest_length[num, i] = 0
# 重置运动状态
def demo1():
build_a_wheel((3.6,2.6),0.6,0.1,1)
add_particle_cube((6,1),(2,2),3,0)
add_particle_cube((6,7),(3,1),1,0)
add_particle_cube((1,8),(2,2),1,0)
p_bondary((0.028*res[0] /screen_to_world_ratio,0.8*res[1] /screen_to_world_ratio),(0.3*res[0] /screen_to_world_ratio,0.4*res[1] /screen_to_world_ratio),0.02)
build_a_chain((0.7*res[0] /screen_to_world_ratio,0.9*res[1] /screen_to_world_ratio),(0.9*res[0] /screen_to_world_ratio,0.9*res[1] /screen_to_world_ratio),0.1,1,1,1.8)
# add_circular_cube(0.4*res[0] /screen_to_world_ratio,0.08*res[1] /screen_to_world_ratio,(0.4,0.6),0.1)
def demo2():
p_bondary((0.6*res[0] /screen_to_world_ratio,0.01*res[1] /screen_to_world_ratio),(0.61*res[0] /screen_to_world_ratio,0.8*res[1] /screen_to_world_ratio),0.02)
# build_a_wheel((3.6,2.6),0.6,0.1,1)
#所有的可操作参数在这个函数里打印出来
def display_information():
pass
# 暂时不太需要了
def main():
pause=-1
fream=0
#液体矩形尺寸
operate_p_cube_w=2
operate_p_cube_h=2
#弹性矩形尺寸
operate_c_cube_w=1
operate_c_cube_h=1
#弹性圆半径
operate_c_r=0.2
#弹簧数据
operate_spring_length=1.25#生效的弹簧长度
operate_spring_detect=1#探测弹簧的距离
#轮子尺寸,轮子上的圆半径
wheel_sizeL=0.6
wheel_c_r=0.1
#链子的细度和硬度
chain_粒度=0.2
chain_弹簧比例=1.8
#反重力液体
是否反重力液体=1
#记录两次坐标的缓存变量
x_LMB_水枪1=[]
x_LMB_水枪2=[]
x_LMB_line=[]
x_LMB_chain=[]
水枪开关1=-1
水枪位置1=(1,8)
水枪速度1=(1,0)
水枪开关2=-1
水枪位置2=(1,8)
水枪速度2=(1,0)
旋转开关=-1
清除粒子开关=-1
擦除的搜索半径=0.2
build_boundary()
# demo1()
# demo2()
# add_particle_cube((2,2),(10,3),1)
# add_particle_cube((6,1),(2,2),3)
# build_a_wheel((7,7),wheel_sizeL,wheel_c_r,1)
gui = ti.GUI("xixi",res=((int)(res[0]*1.8),(int)(res[1]*1.8)),background_color=0xFFDEAD)
fream=0
while gui.running:
for e in gui.get_events(ti.GUI.PRESS):
if e.key in [ti.GUI.ESCAPE, ti.GUI.EXIT]:
exit()
elif e.key == ti.GUI.LMB: # 鼠标左键,增加粒子
# add_circular(e.pos[0]*res[0] /screen_to_world_ratio,e.pos[1]*res[0] /screen_to_world_ratio,operate_c_r,0,0,0,0,operate_spring_detect,operate_spring_length)
# add_particle(e.pos[0]*res[0] /screen_to_world_ratio,e.pos[1]*res[0] /screen_to_world_ratio,0,0,1,0x956333)
# attract_lastone(e.pos[0]*res[0] /screen_to_world_ratio,e.pos[1]*res[0] /screen_to_world_ratio)
if(gui.is_pressed(ti.GUI.SHIFT)):
search_circular(e.pos[0]*res[0] /screen_to_world_ratio,e.pos[1]*res[1] /screen_to_world_ratio)
# 范围边界变流体(e.pos[0]*res[0] /screen_to_world_ratio,e.pos[1]*res[0] /screen_to_world_ratio)
elif gui.is_pressed('v'):
范围边界变流体(e.pos[0]*res[0] /screen_to_world_ratio,e.pos[1]*res[0] /screen_to_world_ratio,擦除的搜索半径)
elif(gui.is_pressed(ti.GUI.CTRL)):
add_circular(e.pos[0]*res[0] /screen_to_world_ratio,e.pos[1]*res[1] /screen_to_world_ratio,operate_c_r,0,0,0,0,operate_spring_detect,operate_spring_length)
elif e.key == ti.GUI.RMB: # 鼠标右键,增加粒子,带弹簧,如果按下ctrl,就是固定点
if gui.is_pressed('1'):
x_LMB_水枪1.append(gui.get_cursor_pos())
print(x_LMB_水枪1)
if(len(x_LMB_水枪1) == 2):
#这里写的有点乱,n是为了算一个单位向量,/20是给n一个合适的值,起到水流最小速度的作用,
#+=120是为了让速度的大小,受一定的两点距离影响,公式可以化简,有点乱
n=np.linalg.norm([x_LMB_水枪1[1][0]-x_LMB_水枪1[0][0],x_LMB_水枪1[1][1]-x_LMB_水枪1[0][1]])/20
print(n)
水枪位置1=(x_LMB_水枪1[0][0]* res[0] /screen_to_world_ratio,x_LMB_水枪1[0][1]* res[1] /screen_to_world_ratio )
水枪速度1 = [x_LMB_水枪1[1][0]-x_LMB_水枪1[0][0],x_LMB_水枪1[1][1]-x_LMB_水枪1[0][1]]/n
水枪速度1[0] +=120*(x_LMB_水枪1[1][0]-x_LMB_水枪1[0][0])
水枪速度1[1] +=120*(x_LMB_水枪1[1][1]-x_LMB_水枪1[0][1])
x_LMB_水枪1 = []
elif gui.is_pressed('2'):
x_LMB_水枪2.append(gui.get_cursor_pos())
print(x_LMB_水枪2)
if(len(x_LMB_水枪2) == 2):
#这里写的有点乱,n是为了算一个单位向量,/20是给n一个合适的值,起到水流最小速度的作用,
#+=120是为了让速度的大小,受一定的两点距离影响,公式可以化简,有点乱
n=np.linalg.norm([x_LMB_水枪2[1][0]-x_LMB_水枪2[0][0],x_LMB_水枪2[1][1]-x_LMB_水枪2[0][1]])/20
print(n)
水枪位置2=(x_LMB_水枪2[0][0]* res[0] /screen_to_world_ratio,x_LMB_水枪2[0][1]* res[1] /screen_to_world_ratio )
水枪速度2 = [x_LMB_水枪2[1][0]-x_LMB_水枪2[0][0],x_LMB_水枪2[1][1]-x_LMB_水枪2[0][1]]/n
水枪速度2[0] +=120*(x_LMB_水枪2[1][0]-x_LMB_水枪2[0][0])
水枪速度2[1] +=120*(x_LMB_水枪2[1][1]-x_LMB_水枪2[0][1])
x_LMB_水枪2 = []
elif gui.is_pressed('l'):#粒子边界画线
x_LMB_line.append(gui.get_cursor_pos())
if(len(x_LMB_line) == 2):
p_bondary((x_LMB_line[0][0]*res[0] /screen_to_world_ratio,x_LMB_line[0][1]*res[1] /screen_to_world_ratio),
(x_LMB_line[1][0]*res[0] /screen_to_world_ratio,x_LMB_line[1][1]*res[1] /screen_to_world_ratio),0.02)
# print(x_LMB_line)
x_LMB_line = []
elif gui.is_pressed('j'):#弹性圆画线
x_LMB_chain.append(gui.get_cursor_pos())
if(len(x_LMB_chain) == 2):
build_a_chain((x_LMB_chain[0][0]*res[0] /screen_to_world_ratio,x_LMB_chain[0][1]*res[1] /screen_to_world_ratio),
(x_LMB_chain[1][0]*res[0] /screen_to_world_ratio,x_LMB_chain[1][1]*res[1] /screen_to_world_ratio),chain_粒度,1,1,chain_弹簧比例)
# print(x_LMB_c_line)
x_LMB_chain = []
elif gui.is_pressed('m'):
build_a_wheel((e.pos[0]*res[0] /screen_to_world_ratio,e.pos[1]*res[1] /screen_to_world_ratio),wheel_sizeL,wheel_c_r,1)
else:
add_circular(e.pos[0]*res[0] /screen_to_world_ratio,e.pos[1]*res[1] /screen_to_world_ratio,operate_c_r,0,0,1,int(gui.is_pressed(ti.GUI.CTRL)),operate_spring_detect,operate_spring_length)
#键盘按钮
elif gui.is_pressed('1'):#水龙头开关
if(gui.is_pressed(ti.GUI.CTRL)):水枪开关1*=-1
elif gui.is_pressed('2'):#水龙头开关
if(gui.is_pressed(ti.GUI.CTRL)):水枪开关2*=-1
elif gui.is_pressed('q'):
if(gui.is_pressed(ti.GUI.SHIFT)):
if(operate_p_cube_w<6):
operate_p_cube_w+=0.1
print("液体矩形宽度修改为:",operate_p_cube_w)
if(gui.is_pressed(ti.GUI.CTRL)):
if(operate_p_cube_w>0.5):
operate_p_cube_w-=0.1
print("液体矩形宽度修改为:",operate_p_cube_w)
elif gui.is_pressed('a'):
if(gui.is_pressed(ti.GUI.SHIFT)):
if(operate_p_cube_h<6):
operate_p_cube_h+=0.1
print("液体矩形高度修改为:",operate_p_cube_h)
if(gui.is_pressed(ti.GUI.CTRL)):
if(operate_p_cube_h>0.5):
operate_p_cube_h-=0.
print("液体矩形高度修改为:",operate_p_cube_h)
elif gui.is_pressed('w'):
if(gui.is_pressed(ti.GUI.SHIFT)):
if(operate_c_cube_w<4):
operate_c_cube_w+=0.05
print("弹性矩形宽度修改为:",operate_c_cube_w)
if(gui.is_pressed(ti.GUI.CTRL)):
if(operate_c_cube_w>0.2):
operate_c_cube_w-=0.05
print("弹性矩形宽度修改为:",operate_c_cube_w)
elif gui.is_pressed('s'):
if(gui.is_pressed(ti.GUI.SHIFT)):
if(operate_c_cube_h<4):
operate_c_cube_h+=0.05
print("弹性矩形高度修改为:",operate_c_cube_h)
if(gui.is_pressed(ti.GUI.CTRL)):
if(operate_c_cube_h>0.2):
operate_c_cube_h-=0.05
print("弹性矩形高度修改为:",operate_c_cube_h)
elif gui.is_pressed('e'):
if(gui.is_pressed(ti.GUI.SHIFT)):
if(operate_spring_length<2):
operate_spring_length+=0.01
print("弹簧长度修改为:",operate_spring_length)
if(gui.is_pressed(ti.GUI.CTRL)):
if(operate_spring_length>0.1):
operate_spring_length-=0.01
print("弹簧长度修改为:",operate_spring_length)
elif gui.is_pressed('d'):
if(gui.is_pressed(ti.GUI.SHIFT)):
if(operate_spring_detect<2.2):
operate_spring_detect+=0.01
print("弹簧检测距离修改为:",operate_spring_detect)
if(gui.is_pressed(ti.GUI.CTRL)):
if(operate_spring_detect>0.12):
operate_spring_detect-=0.01
print("弹簧检测距离修改为:",operate_spring_detect)
elif gui.is_pressed('x'):
if(gui.is_pressed(ti.GUI.SHIFT)):
if(wheel_sizeL<2):
wheel_sizeL+=0.01
print("轮子半径修改为:",wheel_sizeL)
if(gui.is_pressed(ti.GUI.CTRL)):
if(wheel_sizeL>0.1):
wheel_sizeL-=0.01
print("轮子半径修改为:",wheel_sizeL)
elif gui.is_pressed('0'):
if(gui.is_pressed(ti.GUI.SHIFT)):
if(wheel_c_r<1.8):
wheel_c_r+=0.01
print("轮子上的圆半径修改为:",wheel_c_r)
if(gui.is_pressed(ti.GUI.CTRL)):
if(wheel_c_r>0.16):
wheel_c_r-=0.01
print("轮子上的圆半径修改为:",wheel_c_r)
elif gui.is_pressed('t'):
if(gui.is_pressed(ti.GUI.SHIFT)):
if(chain_粒度<1):
chain_粒度+=0.01
print("链子粒度修改为:",chain_粒度)
if(gui.is_pressed(ti.GUI.CTRL)):
if(chain_粒度>0.06):
chain_粒度-=0.01
print("链子粒度修改为:",chain_粒度)
elif gui.is_pressed('g'):
if(gui.is_pressed(ti.GUI.SHIFT)):
if(chain_弹簧比例<1.5):
chain_弹簧比例+=0.01
print("链子的弹簧半径比:",chain_弹簧比例)
if(gui.is_pressed(ti.GUI.CTRL)):
if(chain_弹簧比例>0.4):
chain_弹簧比例-=0.01
print("链子的弹簧半径比:",chain_弹簧比例)
elif gui.is_pressed('c'):
if(gui.is_pressed(ti.GUI.SHIFT)):
if operate_c_r<1:
operate_c_r+=0.01
print("之后生成圆形半径为:",operate_c_r)
if(gui.is_pressed(ti.GUI.CTRL)):
if operate_c_r>0.06:
operate_c_r-=0.01
print("之后生成圆形半径为:",operate_c_r)
elif gui.is_pressed('v'):
if(gui.is_pressed(ti.GUI.SHIFT)):
if(擦除的搜索半径<0.8):
擦除的搜索半径+=0.01
print("擦除的搜索半径修改为:",擦除的搜索半径)
if(gui.is_pressed(ti.GUI.CTRL)):
if(擦除的搜索半径>0.02):
擦除的搜索半径-=0.01
print("擦除的搜索半径修改为:",擦除的搜索半径)
elif gui.is_pressed('r'):#清空所有圆
reset_allcirculars()
elif gui.is_pressed('f'):#设置圆的固定开关
switch_fixed()
elif gui.is_pressed('h'):#弄一个弹性矩形
add_circular_cube(e.pos[0]*res[0] /screen_to_world_ratio,e.pos[1]*res[1] /screen_to_world_ratio,(operate_c_cube_w,operate_c_cube_h),operate_c_r)
# add_circular_cube(e.pos[0]*10,e.pos[1]*10,(0.5,1),0.07)
elif gui.is_pressed('y'):
if(是否反重力液体==1):
是否反重力液体=3
print("生成的液体设置为:反重力!")
else:
是否反重力液体=1
print("生成的液体设置为:正常重力!")
elif gui.is_pressed('o'):
add_particle_cube((e.pos[0]*res[0] /screen_to_world_ratio,e.pos[1]*res[1] /screen_to_world_ratio),(operate_p_cube_w,operate_p_cube_h),是否反重力液体,0)
#上一个边界粒子线变液体,全部边界变液体
elif gui.is_pressed('b'):
if(gui.is_pressed(ti.GUI.CTRL)):
# print(上一个粒子画的线)
边界粒子变流体(上一个粒子画的线)
else:
边界粒子变流体((0,0))
elif gui.is_pressed('n'):
旋转开关*=-1
#撤销相关
elif gui.is_pressed('z'):
if(gui.is_pressed(ti.GUI.CTRL)):
# print(上一个粒子画的线)
delete_particle(上一个粒子画的线[1]-上一个粒子画的线[0])
elif(gui.is_pressed(ti.GUI.SHIFT)):
清除粒子开关*=-1
# delete_particle(particle_num[None])#清除全部粒子
else:
revocation_a_cirulars()
elif gui.is_pressed('p'):
pause*=-1
# 撤销操作,分别对于圆与流体
# 点击两点,确定一面墙
# 旋转的水车,或者带动力的水车
#(已完成)
#增加删除粒子的功能,单个粒子,或所有流体,注意数据结构也要同步处理(完成)
#撤销上一个粒子画线操作(完成)
#增加粒子的函数可以改成kernel(完成)
#制作反重力液体(已完成)
#粒子与圆耦合的部分,添加摩擦力(粗糙的完成了= =)
#圆的边界碰撞可以去除?(不可以)
#制作弹性的链子(完成)
#弹性圆的尺寸是否合理,弹簧长度是否合理
#处理弹簧粘连问题(解决了链子的弹簧粘连,其他形状则比较复杂)
#尝试空心cube(先不做了,主要是构造起来麻烦)
#圆的边界碰撞可以去除?
#制作一个作为滑块的圆形,左右移动,类似box2dliu中的滑块正方形(没必要,可以拖拽了)
#从墙缝伸出一个机械臂,带一个轮子?(不用弄了,已经可以实现)
#更多的交互功能,比如鼠标吸引流体或者圆(吸引圆,已完成)
#指定圆(已完成)
#指定圆的固定与解固定(已完成)
#杨氏模量可交互调参,测试硬东西。。
#多个水枪(已完成,目前两个,可拓展)
#对于撤销操作,可否使用堆栈实现多步撤销(太麻烦)
#作为边界的粒子,显示半径也可以大一点,颜色也可以调整一下
#合理的调色
#不同颜色的流体,看看混合效果(反重力液体实现了)
#颜色切换,提前准备一个颜色数组
#合理的参数命名
#制作小场景demo
#是否启用粒子边界做一个宏开关,应为粒子边界效果一般还有点卡
#存在一个很不美观的问题,刚生成的矩形粒子一碰就炸,应该是跟边界权重有关(已修复)
#圆一碰边界,边界粒子变液体
#ggui??
#流体表面重建????
if pause==-1:
fream+=1
if gui.is_pressed(ti.GUI.LMB):
c = gui.get_cursor_pos()
attract_one(c[0]*res[0] /screen_to_world_ratio,c[1]*res[1] /screen_to_world_ratio)
if(fream%2==0):
if 水枪开关1==1:
一个水枪(水枪位置1, 水枪速度1,是否反重力液体)
# my_add_particle((3,2), (2,0),1,0x956333)#螺旋喷射= =
if 水枪开关2==1:
一个水枪(水枪位置2, 水枪速度2,是否反重力液体)
# my_add_particle((3,2), (2,0),1,0x956333)#螺旋喷射= =
if 旋转开关==1:
applied_rotating(旋转圆下标buff[0],旋转圆下标buff[1])
if 清除粒子开关==1:
delete_particle(5)
for i in range(3):
substep()
p_num=particle_num[None]
c_num=circular_num[None]
np_x = np.ndarray((p_num, dim), dtype=np.float32)
copy_to_numpy_nd(np_x, x,p_num)
np_color = np.ndarray(p_num, dtype=np.int32)
copy_to_numpy_for_color(np_color,p_num)
r = np.ndarray(p_num, dtype=np.float32)#为了让边界粒子有更大半径,是一组比例系数
copy_to_numpy_for_radius(r,p_num)
X = np.ndarray((c_num, dim), dtype=np.float32)
copy_to_numpy_nd(X, c_x,c_num)
R = np.ndarray(c_num, dtype=np.float32)
for i in range(c_num): R[i]=c_r[i]
# print(particle_info['position'])
# gui.circles(np_x ,radius=particle_radius / 1.5 * screen_to_world_ratio,color=0x956333)
# gui.text("test",[0.5,0.5])#gui显示文本有问题、、、
gui.circles(np_x * screen_to_world_ratio / res,radius=r*particle_radius / 1.5 * screen_to_world_ratio,color=np_color)
gui.circles(X * screen_to_world_ratio / res,radius=R* screen_to_world_ratio*1.75,color=0x5534993)
# gui.circles(X * screen_to_world_ratio / 512,radius=256 ,color=0x5534993)
#画线是严重帧率的事情,能不能把要画的线放到数据结构里,用kernel算好再画
#圆比较多的时候建议注释掉画线这部分''''''
'''for i in range(c_num):
for j in range(i+1, c_num): # 原本是for j in range(n),实际上只需从上一层循环的i开始即可,可以省去一半的遍历量和绘图量
if rest_length[i, j] != 0:
gui.line(begin=X[i] * screen_to_world_ratio / res, end=X[j] * screen_to_world_ratio / res,
color=0X888888, radius=2) # 画线
'''
gui.show()
if __name__ == "__main__":
main()
| 32.471189
| 207
| 0.536682
| 0
| 0
| 0
| 0
| 17,124
| 0.321989
| 0
| 0
| 14,720
| 0.276785
|
86048fe3a3ee21e21bf198adc5adb3af9aad7917
| 1,129
|
py
|
Python
|
pexen/factory/module.py
|
comps/pexen
|
3da6b110cf063646e5d2ae671f4408a14662fcca
|
[
"MIT"
] | 1
|
2019-09-06T10:41:30.000Z
|
2019-09-06T10:41:30.000Z
|
pexen/factory/module.py
|
comps/pexen
|
3da6b110cf063646e5d2ae671f4408a14662fcca
|
[
"MIT"
] | null | null | null |
pexen/factory/module.py
|
comps/pexen
|
3da6b110cf063646e5d2ae671f4408a14662fcca
|
[
"MIT"
] | 1
|
2020-06-24T00:31:13.000Z
|
2020-06-24T00:31:13.000Z
|
import inspect
from fnmatch import fnmatchcase
from ..sched import meta
from .base import BaseFactory
class ModuleFactory(BaseFactory):
"""
Takes an imported module object and extracts callable objects from it.
A valid callable is any object that can be called and has pexen.sched
metadata.
Arguments:
match - also include metadata-less callables that fnmatch this string
"""
def __init__(self, match=None):
super().__init__()
self.match = match
def is_valid_callable(self, objname, obj):
if not callable(obj):
return False
if meta.has_meta(obj):
return True
if self.match and fnmatchcase(objname, self.match):
return True
return False
def extract_from_mod(self, mod):
"""Extract callables from an imported module."""
for name, obj in inspect.getmembers(mod):
if not self.is_valid_callable(name, obj):
continue
self.callpath_burn(obj, name)
yield obj
def __call__(self, mod):
yield from self.extract_from_mod(mod)
| 28.225
| 77
| 0.639504
| 1,024
| 0.906997
| 352
| 0.31178
| 0
| 0
| 0
| 0
| 317
| 0.280779
|
860663bf7fc7f279ff0aaf05a3df989c0b80600b
| 2,431
|
py
|
Python
|
Python/zzz_training_challenge/Python_Challenge/solutions/ch07_recursion_advanced/solutions/ex07_water_jugs.py
|
Kreijeck/learning
|
eaffee08e61f2a34e01eb8f9f04519aac633f48c
|
[
"MIT"
] | null | null | null |
Python/zzz_training_challenge/Python_Challenge/solutions/ch07_recursion_advanced/solutions/ex07_water_jugs.py
|
Kreijeck/learning
|
eaffee08e61f2a34e01eb8f9f04519aac633f48c
|
[
"MIT"
] | null | null | null |
Python/zzz_training_challenge/Python_Challenge/solutions/ch07_recursion_advanced/solutions/ex07_water_jugs.py
|
Kreijeck/learning
|
eaffee08e61f2a34e01eb8f9f04519aac633f48c
|
[
"MIT"
] | null | null | null |
# Beispielprogramm für das Buch "Python Challenge"
#
# Copyright 2020 by Michael Inden
def solve_water_jugs(size1, size2, desired_liters):
return __solve_water_jugs_rec(size1, size2,
desired_liters, 0, 0, {})
def __solve_water_jugs_rec(size1, size2, desired_liters,
current_jug1, current_jug2, already_tried):
if is_solved(current_jug1, current_jug2, desired_liters):
print("Solved Jug 1:", current_jug1, " / 2:", current_jug2)
return True
key = (current_jug1, current_jug2)
if not key in already_tried:
already_tried[key] = True
# Probiere alle 6 Varianten aus
print("Jug 1:", current_jug1, " / 2: ", current_jug2)
min_2_1 = min(current_jug2, (size1 - current_jug1))
min_1_2 = min(current_jug1, (size2 - current_jug2))
result = __solve_water_jugs_rec(size1, size2, desired_liters,
0, current_jug2, already_tried) or \
__solve_water_jugs_rec(size1, size2, desired_liters,
current_jug1, 0, already_tried) or \
__solve_water_jugs_rec(size1, size2, desired_liters,
size1, current_jug2, already_tried) or \
__solve_water_jugs_rec(size1, size2, desired_liters,
current_jug1, size2, already_tried) or \
__solve_water_jugs_rec(size1, size2, desired_liters,
current_jug1 + min_2_1,
current_jug2 - min_2_1,
already_tried) or \
__solve_water_jugs_rec(size1, size2, desired_liters,
current_jug1 - min_1_2,
current_jug2 + min_1_2,
already_tried)
# Memoization:
already_tried[key] = result
return result
return False
def is_solved(current_jug1, current_jug2, desired_liters):
return (current_jug1 == desired_liters and current_jug2 == 0) or \
(current_jug2 == desired_liters and current_jug1 == 0)
def main():
print(solve_water_jugs(4, 3, 2))
print(solve_water_jugs(4, 4, 2))
#print(solveWaterJugs(5, 2, 4))
if __name__ == "__main__":
main()
| 37.4
| 80
| 0.559441
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 209
| 0.085938
|
8606af2e767f86b97fc991c40020f2652a4de91b
| 9,424
|
py
|
Python
|
cancat/vstruct/defs/elf.py
|
kimocoder/CanCat
|
e06f45b22db68b67b6fd93d63d826df9b5d1069c
|
[
"BSD-2-Clause"
] | 2
|
2020-06-07T04:05:29.000Z
|
2022-02-09T00:00:49.000Z
|
vstruct/defs/elf.py
|
ConfusedMoonbear/vivisect
|
8d6048037f85f745cd11923c6a8d662c150fe330
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
vstruct/defs/elf.py
|
ConfusedMoonbear/vivisect
|
8d6048037f85f745cd11923c6a8d662c150fe330
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
import vstruct
from vstruct.primitives import *
EI_NIDENT = 4
EI_PADLEN = 7
class Elf32(vstruct.VStruct):
def __init__(self, bigend=False):
vstruct.VStruct.__init__(self)
self.e_ident = v_bytes(EI_NIDENT)
self.e_class = v_uint8()
self.e_data = v_uint8()
self.e_fileversion = v_uint8()
self.e_osabi = v_uint8()
self.e_abiversio = v_uint8()
self.e_pad = v_bytes(EI_PADLEN)
self.e_type = v_uint16(bigend=bigend)
self.e_machine = v_uint16(bigend=bigend)
self.e_version = v_uint32(bigend=bigend)
self.e_entry = v_uint32(bigend=bigend)
self.e_phoff = v_uint32(bigend=bigend)
self.e_shoff = v_uint32(bigend=bigend)
self.e_flags = v_uint32(bigend=bigend)
self.e_ehsize = v_uint16(bigend=bigend)
self.e_phentsize = v_uint16(bigend=bigend)
self.e_phnum = v_uint16(bigend=bigend)
self.e_shentsize = v_uint16(bigend=bigend)
self.e_shnum = v_uint16(bigend=bigend)
self.e_shstrndx = v_uint16(bigend=bigend)
class Elf32Section(vstruct.VStruct):
def __init__(self, bigend=False):
vstruct.VStruct.__init__(self)
self.sh_name = v_uint32(bigend=bigend)
self.sh_type = v_uint32(bigend=bigend)
self.sh_flags = v_uint32(bigend=bigend)
self.sh_addr = v_uint32(bigend=bigend)
self.sh_offset = v_uint32(bigend=bigend)
self.sh_size = v_uint32(bigend=bigend)
self.sh_link = v_uint32(bigend=bigend)
self.sh_info = v_uint32(bigend=bigend)
self.sh_addralign = v_uint32(bigend=bigend)
self.sh_entsize = v_uint32(bigend=bigend)
class Elf32Pheader(vstruct.VStruct):
def __init__(self, bigend=False):
vstruct.VStruct.__init__(self)
self.p_type = v_uint32(bigend=bigend)
self.p_offset = v_uint32(bigend=bigend)
self.p_vaddr = v_uint32(bigend=bigend)
self.p_paddr = v_uint32(bigend=bigend)
self.p_filesz = v_uint32(bigend=bigend)
self.p_memsz = v_uint32(bigend=bigend)
self.p_flags = v_uint32(bigend=bigend)
self.p_align = v_uint32(bigend=bigend)
class Elf32Reloc(vstruct.VStruct):
def __init__(self, bigend=False):
vstruct.VStruct.__init__(self)
self.r_offset = v_ptr32(bigend=bigend)
self.r_info = v_uint32(bigend=bigend)
def __eq__(self, other):
if self.name != other.name:
return False
if self.r_offset != other.r_offset:
return False
if self.r_info != other.r_info:
return False
return True
class Elf32Reloca(Elf32Reloc):
def __init__(self, bigend=False):
Elf32Reloc.__init__(self)
self.r_addend = v_uint32(bigend=bigend)
def __eq__(self, other):
if self.name != other.name:
return False
if self.r_offset != other.r_offset:
return False
if self.r_info != other.r_info:
return False
if self.r_addend != other.r_addend:
return False
return True
class Elf32Symbol(vstruct.VStruct):
def __init__(self, bigend=False):
vstruct.VStruct.__init__(self)
self.st_name = v_uint32(bigend=bigend)
self.st_value = v_uint32(bigend=bigend)
self.st_size = v_uint32(bigend=bigend)
self.st_info = v_uint8()
self.st_other = v_uint8()
self.st_shndx = v_uint16(bigend=bigend)
def __eq__(self, other):
if self.st_value != other.st_value:
return False
if self.st_name != other.st_name:
return False
if self.st_size != other.st_size:
return False
if self.st_info != other.st_info:
return False
if self.st_other != other.st_other:
return False
if self.st_shndx != other.st_shndx:
return False
return True
class Elf32Dynamic(vstruct.VStruct):
def __init__(self, bigend=False):
vstruct.VStruct.__init__(self)
self.d_tag = v_uint32(bigend=bigend)
self.d_value = v_uint32(bigend=bigend)
def __eq__(self, other):
if self.d_tag != other.d_tag:
return False
if self.d_value != other.d_value:
return False
return True
class Elf64(vstruct.VStruct):
def __init__(self, bigend=False):
vstruct.VStruct.__init__(self)
self.e_ident = v_bytes(EI_NIDENT)
self.e_class = v_uint8()
self.e_data = v_uint8()
self.e_fileversion = v_uint8()
self.e_osabi = v_uint8()
self.e_abiversio = v_uint8()
self.e_pad = v_bytes(EI_PADLEN)
self.e_type = v_uint16(bigend=bigend)
self.e_machine = v_uint16(bigend=bigend)
self.e_version = v_uint32(bigend=bigend)
self.e_entry = v_uint64(bigend=bigend)
self.e_phoff = v_uint64(bigend=bigend)
self.e_shoff = v_uint64(bigend=bigend)
self.e_flags = v_uint32(bigend=bigend)
self.e_ehsize = v_uint16(bigend=bigend)
self.e_phentsize = v_uint16(bigend=bigend)
self.e_phnum = v_uint16(bigend=bigend)
self.e_shentsize = v_uint16(bigend=bigend)
self.e_shnum = v_uint16(bigend=bigend)
self.e_shstrndx = v_uint16(bigend=bigend)
class Elf64Section(Elf32Section):
def __init__(self, bigend=False):
vstruct.VStruct.__init__(self)
self.sh_name = v_uint32(bigend=bigend)
self.sh_type = v_uint32(bigend=bigend)
self.sh_flags = v_uint64(bigend=bigend)
self.sh_addr = v_uint64(bigend=bigend)
self.sh_offset = v_uint64(bigend=bigend)
self.sh_size = v_uint64(bigend=bigend)
self.sh_link = v_uint32(bigend=bigend)
self.sh_info = v_uint32(bigend=bigend)
self.sh_addralign = v_uint64(bigend=bigend)
self.sh_entsize = v_uint64(bigend=bigend)
class Elf64Pheader(Elf32Pheader):
def __init__(self, bigend=False):
vstruct.VStruct.__init__(self)
self.p_type = v_uint32(bigend=bigend)
self.p_flags = v_uint32(bigend=bigend)
self.p_offset = v_uint64(bigend=bigend)
self.p_vaddr = v_uint64(bigend=bigend)
self.p_paddr = v_uint64(bigend=bigend)
self.p_filesz = v_uint64(bigend=bigend)
self.p_memsz = v_uint64(bigend=bigend)
self.p_align = v_uint64(bigend=bigend)
class Elf64Reloc(vstruct.VStruct):
def __init__(self, bigend=False):
vstruct.VStruct.__init__(self)
self.r_offset = v_ptr64(bigend=bigend)
self.r_info = v_uint64(bigend=bigend)
def __eq__(self, other):
if self.name != other.name:
return False
if self.r_offset != other.r_offset:
return False
if self.r_info != other.r_info:
return False
return True
class Elf64Reloca(Elf64Reloc):
def __init__(self, bigend=False):
#Elf64Reloc.__init__(self)
vstruct.VStruct.__init__(self)
self.r_offset = v_uint64(bigend=bigend)
self.r_info = v_uint64(bigend=bigend)
self.r_addend = v_uint64(bigend=bigend)
def __eq__(self, other):
if self.name != other.name:
return False
if self.r_offset != other.r_offset:
return False
if self.r_info != other.r_info:
return False
if self.r_addend != other.r_addend:
return False
return True
class Elf64Symbol(vstruct.VStruct):
def __init__(self, bigend=False):
vstruct.VStruct.__init__(self)
self.st_name = v_uint32(bigend=bigend)
self.st_info = v_uint8()
self.st_other = v_uint8()
self.st_shndx = v_uint16(bigend=bigend)
self.st_value = v_uint64(bigend=bigend)
self.st_size = v_uint64(bigend=bigend)
def __eq__(self, other):
if self.st_value != other.st_value:
return False
if self.st_name != other.st_name:
return False
if self.st_size != other.st_size:
return False
if self.st_info != other.st_info:
return False
if self.st_other != other.st_other:
return False
if self.st_shndx != other.st_shndx:
return False
return True
class Elf64Dynamic(Elf32Dynamic):
def __init__(self, bigend=False):
vstruct.VStruct.__init__(self)
self.d_tag = v_uint64(bigend=bigend)
self.d_value = v_uint64(bigend=bigend)
class ElfNote(vstruct.VStruct):
def __init__(self, bigend=False):
vstruct.VStruct.__init__(self)
self.namesz = v_uint32(bigend=bigend)
self.descsz = v_uint32(bigend=bigend)
self.ntype = v_uint32(bigend=bigend)
self.name = v_bytes()
self.desc = vstruct.VArray()
def pcb_namesz(self):
# padded to 4 byte alignment
namesz = ((self.namesz +3) /4) *4
self['name'].vsSetLength( namesz )
def pcb_descsz(self):
# padded to 4 byte alignment
descct = ((self.descsz +3) /4)
elems = [ v_uint32() for i in xrange(descct) ]
self.desc = vstruct.VArray(elems=elems)
| 35.969466
| 54
| 0.612585
| 9,316
| 0.98854
| 0
| 0
| 0
| 0
| 0
| 0
| 88
| 0.009338
|
8606c99e338a87761250aeed31095b32a52bc802
| 10,041
|
py
|
Python
|
Acquire/Identity/_useraccount.py
|
openghg/acquire
|
8af8701b092f7304c02fea1ee6360e53502dfd64
|
[
"Apache-2.0"
] | 1
|
2021-10-18T17:11:47.000Z
|
2021-10-18T17:11:47.000Z
|
Acquire/Identity/_useraccount.py
|
openghg/acquire
|
8af8701b092f7304c02fea1ee6360e53502dfd64
|
[
"Apache-2.0"
] | null | null | null |
Acquire/Identity/_useraccount.py
|
openghg/acquire
|
8af8701b092f7304c02fea1ee6360e53502dfd64
|
[
"Apache-2.0"
] | null | null | null |
__all__ = ["UserAccount"]
_user_root = "identity/users"
def _encode_username(username):
"""This function returns an encoded (sanitised) version of
the username. This will ensure that the username
is valid (must be between 3 and 50 characters).
The sanitised username is the encoded version,
meaning that a user can use a unicode (emoji)
username if they so desire
"""
if username is None:
return None
if len(username) < 3 or len(username) > 150:
from Acquire.Identity import UsernameError
raise UsernameError("The username must be between 3 and 150 characters!")
from Acquire.ObjectStore import string_to_encoded as _string_to_encoded
return _string_to_encoded(username)
class UserAccount:
"""This class holds all information about a user's account,
e.g. their username, the sanitised username for the person
on the system, their account keys, status etc.
This data can be serialised to an from json to allow
easy saving a retrieval from an object store
"""
def __init__(self, username=None, user_uid=None, private_key=None, status=None):
"""Construct from the passed username"""
self._username = username
self._uid = user_uid
self._privkey = private_key
self._status = status
@staticmethod
def create(username, password, _service_uid=None, _service_public_key=None):
"""Create a new account with username 'username', which will
be secured using the passed password.
Note that this will create an account with a specified
user UID, meaning that different users can have the same
username. We identify the right user via the combination
of username, password and OTP code.
Normally the UID of the service, and the skeleton key
used to encrypt the backup password are obtained
directly from the service. However, when initialising
a new service we must pass these directly. In those
cases, pass the object using _service_uid and
_service_public_key
This returns a tuple of the user_uid and OTP for the
newly-created account
"""
from Acquire.ObjectStore import create_uuid as _create_uuid
from Acquire.Crypto import PrivateKey as _PrivateKey
from Acquire.Crypto import PublicKey as _PublicKey
from Acquire.ObjectStore import ObjectStore as _ObjectStore
from Acquire.Service import get_service_account_bucket as _get_service_account_bucket
from Acquire.ObjectStore import bytes_to_string as _bytes_to_string
from Acquire.Identity import UserCredentials as _UserCredentials
from Acquire.ObjectStore import get_datetime_now_to_string as _get_datetime_now_to_string
if _service_public_key is None:
from Acquire.Service import get_this_service as _get_this_service
service_pubkey = _get_this_service().public_skeleton_key()
assert service_pubkey is not None
else:
service_pubkey = _service_public_key
if not isinstance(service_pubkey, _PublicKey):
raise TypeError("The service public key must be type PublicKey")
if _service_uid is None:
from Acquire.Service import get_this_service as _get_this_service
service_uid = _get_this_service(need_private_access=False).uid()
else:
service_uid = _service_uid
# create a UID for this new user
user_uid = _create_uuid()
# now create the primary password for this user and use
# this to encrypt the special keys for this user
privkey = _PrivateKey(name="user_secret_key %s %s" % (username, user_uid))
primary_password = _PrivateKey.random_passphrase()
bucket = _get_service_account_bucket()
# now create the credentials used to validate a login
otp = _UserCredentials.create(user_uid=user_uid, password=password, primary_password=primary_password)
# create the user account
user = UserAccount(username=username, user_uid=user_uid, private_key=privkey, status="active")
# now save a lookup from the username to this user_uid
# (many users can have the same username). Use this lookup
# to hold a recovery password for this account
recovery_password = _bytes_to_string(service_pubkey.encrypt(primary_password))
key = "%s/names/%s/%s" % (_user_root, user.encoded_name(), user_uid)
_ObjectStore.set_string_object(bucket=bucket, key=key, string_data=recovery_password)
# now save a lookup from the hashed username+password
# to the user_uid, so that we can
# quickly find matching user_uids (expect few people will have
# exactly the same username and password). This will
# save the exact time this username-password combination
# was set
encoded_password = _UserCredentials.hash(
username=username, password=password, service_uid=service_uid
)
key = "%s/passwords/%s/%s" % (_user_root, encoded_password, user_uid)
_ObjectStore.set_string_object(bucket=bucket, key=key, string_data=_get_datetime_now_to_string())
# finally(!) save the account itself to the object store
key = "%s/uids/%s" % (_user_root, user_uid)
data = user.to_data(passphrase=primary_password)
_ObjectStore.set_object_from_json(bucket=bucket, key=key, data=data)
# return the OTP and user_uid
return (user_uid, otp)
@staticmethod
def login(credentials, user_uid=None, remember_device=False):
"""Login to the session with specified 'short_uid' with the
user with passed 'username' and 'credentials',
optionally specifying the user_uid
"""
if user_uid is None:
# find all of the user_uids of accounts with this
# username+password combination
from Acquire.ObjectStore import ObjectStore as _ObjectStore
from Acquire.Service import get_service_account_bucket as _get_service_account_bucket
from Acquire.Client import Credentials as _Credentials
from Acquire.Identity import UserCredentials as _UserCredentials
from Acquire.Service import get_this_service as _get_this_service
if not isinstance(credentials, _Credentials):
raise TypeError("The credentials must be type Credentials")
bucket = _get_service_account_bucket()
encoded_password = _UserCredentials.hash(
username=credentials.username(), password=credentials.password()
)
prefix = "%s/passwords/%s/" % (_user_root, encoded_password)
try:
names = _ObjectStore.get_all_object_names(bucket=bucket, prefix=prefix)
except:
names = []
user_uids = []
for name in names:
user_uids.append(name.split("/")[-1])
else:
user_uids = [user_uid]
if len(user_uids) == 0:
from Acquire.Identity import UserValidationError
raise UserValidationError("No user with name '%s'" % credentials.username())
from Acquire.Identity import UserCredentials as _UserCredentials
return _UserCredentials.login(
credentials=credentials, user_uids=user_uids, remember_device=remember_device
)
def __str__(self):
return "UserAccount(name : %s)" % self._username
def name(self):
"""Return the name of this account"""
return self._username
def username(self):
"""Synonym for 'name'"""
return self.name()
def encoded_name(self):
"""Return the encoded (sanitised) username"""
return _encode_username(self._username)
def uid(self):
"""Return the globally unique ID for this account"""
return self._uid
def login_root_url(self):
"""Return the root URL used to log into this account"""
from Acquire.Service import get_this_service as _get_this_service
return _get_this_service().canonical_url()
def is_valid(self):
"""Return whether or not this is a valid account"""
return not (self._status is None)
def is_active(self):
"""Return whether or not this is an active account"""
if self._status is None:
return False
else:
return self._status == "active"
def public_key(self):
"""Return the lines of the public key for this account"""
return self._privkey.public_key()
def private_key(self):
"""Return the lines of the private key for this account"""
return self._privkey
def status(self):
"""Return the status for this account"""
if self._status is None:
return "invalid"
return self._status
def to_data(self, passphrase, mangleFunction=None):
"""Return a data representation of this object (dictionary)"""
if self._username is None:
return None
data = {}
data["username"] = self._username
data["status"] = self._status
data["uid"] = self._uid
data["private_key"] = self._privkey.to_data(passphrase=passphrase, mangleFunction=mangleFunction)
return data
@staticmethod
def from_data(data, passphrase, mangleFunction=None):
"""Return a UserAccount constructed from the passed
data (dictionary)
"""
user = UserAccount()
if data is not None and len(data) > 0:
from Acquire.Crypto import PrivateKey as _PrivateKey
user._username = data["username"]
user._status = data["status"]
user._uid = data["uid"]
user._privkey = _PrivateKey.from_data(
data=data["private_key"], passphrase=passphrase, mangleFunction=mangleFunction
)
return user
| 37.74812
| 110
| 0.66587
| 9,292
| 0.925406
| 0
| 0
| 6,780
| 0.675232
| 0
| 0
| 3,403
| 0.33891
|
86073f9a281f5bb9b144352abdd430f4d907d4bc
| 3,337
|
py
|
Python
|
saharaclient/api/job_binaries.py
|
openstack/python-saharaclient
|
2f01b878a9e07bc712fae9c6c2c5f823bd986dd6
|
[
"Apache-2.0"
] | 34
|
2015-01-26T21:39:46.000Z
|
2021-01-16T17:30:25.000Z
|
saharaclient/api/job_binaries.py
|
openstack/python-saharaclient
|
2f01b878a9e07bc712fae9c6c2c5f823bd986dd6
|
[
"Apache-2.0"
] | null | null | null |
saharaclient/api/job_binaries.py
|
openstack/python-saharaclient
|
2f01b878a9e07bc712fae9c6c2c5f823bd986dd6
|
[
"Apache-2.0"
] | 15
|
2015-03-13T23:24:59.000Z
|
2017-06-22T12:15:46.000Z
|
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from saharaclient.api import base
class JobBinaries(base.Resource):
resource_name = 'Job Binary'
class JobBinariesManagerV1(base.ResourceManager):
resource_class = JobBinaries
version = 1.1
def create(self, name, url, description=None, extra=None, is_public=None,
is_protected=None):
"""Create a Job Binary.
:param dict extra: authentication info needed for some job binaries,
containing the keys `user` and `password` for job binary in Swift
or the keys `accesskey`, `secretkey`, and `endpoint` for job
binary in S3
"""
data = {
"name": name,
"url": url
}
self._copy_if_defined(data, description=description, extra=extra,
is_public=is_public, is_protected=is_protected)
return self._create('/job-binaries', data, 'job_binary')
def list(self, search_opts=None, limit=None, marker=None,
sort_by=None, reverse=None):
"""Get a list of Job Binaries."""
query = base.get_query_string(search_opts, limit=limit, marker=marker,
sort_by=sort_by, reverse=reverse)
url = "/job-binaries%s" % query
return self._page(url, 'binaries', limit)
def get(self, job_binary_id):
"""Get information about a Job Binary."""
return self._get('/job-binaries/%s' % job_binary_id, 'job_binary')
def delete(self, job_binary_id):
"""Delete a Job Binary."""
self._delete('/job-binaries/%s' % job_binary_id)
def get_file(self, job_binary_id):
"""Download a Job Binary."""
resp = self.api.get('/job-binaries/%s/data' % job_binary_id)
if resp.status_code != 200:
self._raise_api_exception(resp)
return resp.content
def update(self, job_binary_id, data):
"""Update Job Binary.
:param dict data: dict that contains fields that should be updated
with new values.
Fields that can be updated:
* name
* description
* url
* is_public
* is_protected
* extra - dict with the keys `user` and `password` for job binary
in Swift, or with the keys `accesskey`, `secretkey`, and `endpoint`
for job binary in S3
"""
if self.version >= 2:
UPDATE_FUNC = self._patch
else:
UPDATE_FUNC = self._update
return UPDATE_FUNC(
'/job-binaries/%s' % job_binary_id, data, 'job_binary')
class JobBinariesManagerV2(JobBinariesManagerV1):
version = 2
# NOTE(jfreud): keep this around for backwards compatibility
JobBinariesManager = JobBinariesManagerV1
| 32.086538
| 78
| 0.631106
| 2,606
| 0.780941
| 0
| 0
| 0
| 0
| 0
| 0
| 1,686
| 0.505244
|
860766f5b7e396034ed70275ab698c4665993ebb
| 584
|
py
|
Python
|
tests/test_translator.py
|
Attsun1031/schematics
|
90dee53fd1d5c29f2c947bec6f5ffe5f74305ab1
|
[
"BSD-3-Clause"
] | 1,430
|
2015-01-01T19:22:19.000Z
|
2022-03-29T11:34:35.000Z
|
tests/test_translator.py
|
Attsun1031/schematics
|
90dee53fd1d5c29f2c947bec6f5ffe5f74305ab1
|
[
"BSD-3-Clause"
] | 360
|
2015-01-02T05:27:34.000Z
|
2022-03-18T14:08:27.000Z
|
tests/test_translator.py
|
Attsun1031/schematics
|
90dee53fd1d5c29f2c947bec6f5ffe5f74305ab1
|
[
"BSD-3-Clause"
] | 222
|
2015-01-07T20:07:02.000Z
|
2022-03-22T16:12:47.000Z
|
# -*- coding: utf-8 -*-
import pytest
def test_translator():
def translator(string):
translations = {'String value is too long.': 'Tamanho de texto muito grande.'}
return translations.get(string, string)
from schematics.translator import register_translator
register_translator(translator)
from schematics.types import StringType
from schematics.exceptions import ValidationError
with pytest.raises(ValidationError) as exc:
StringType(max_length=1).validate_length('Abc')
assert exc.value == ['Tamanho de texto muito grande.']
| 30.736842
| 86
| 0.72089
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 119
| 0.203767
|
860804e29db65321937c10951cae50769822d370
| 641
|
py
|
Python
|
1014 Trie Tree/test.py
|
SLAPaper/hihoCoder
|
3f64d678c5dd46db36345736eb56880fb2d2c5fe
|
[
"MIT"
] | null | null | null |
1014 Trie Tree/test.py
|
SLAPaper/hihoCoder
|
3f64d678c5dd46db36345736eb56880fb2d2c5fe
|
[
"MIT"
] | null | null | null |
1014 Trie Tree/test.py
|
SLAPaper/hihoCoder
|
3f64d678c5dd46db36345736eb56880fb2d2c5fe
|
[
"MIT"
] | null | null | null |
# generate 900k word and 900k query to test the runtime
from main import TrieTree
import time
import random
vocal = list(range(26))
trie = TrieTree()
words = [[random.choice(vocal) for _ in range(random.randrange(1, 11))] for _ in range(100000)]
queries = [[random.choice(vocal) for _ in range(random.randrange(1, 11))] for _ in range(100000)]
begin = time.time()
for word in words:
trie.insert(word)
insert_end = time.time()
for query in queries:
trie.query(query)
end = time.time()
print("insert time used:", insert_end - begin, 's')
print("query time used:", end - insert_end, 's')
print("time used:", end - begin, 's')
| 22.103448
| 97
| 0.692668
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 113
| 0.176287
|
86081d580f0b29a7dc03878e4040b5668fd409d4
| 894
|
py
|
Python
|
tools/mo/openvino/tools/mo/front/onnx/mean_variance_normalization_ext.py
|
pazamelin/openvino
|
b7e8ef910d7ed8e52326d14dc6fd53b71d16ed48
|
[
"Apache-2.0"
] | 1
|
2021-10-21T03:04:16.000Z
|
2021-10-21T03:04:16.000Z
|
tools/mo/openvino/tools/mo/front/onnx/mean_variance_normalization_ext.py
|
pazamelin/openvino
|
b7e8ef910d7ed8e52326d14dc6fd53b71d16ed48
|
[
"Apache-2.0"
] | 58
|
2020-11-06T12:13:45.000Z
|
2022-03-28T13:20:11.000Z
|
tools/mo/openvino/tools/mo/front/onnx/mean_variance_normalization_ext.py
|
pazamelin/openvino
|
b7e8ef910d7ed8e52326d14dc6fd53b71d16ed48
|
[
"Apache-2.0"
] | 2
|
2019-09-20T01:33:37.000Z
|
2019-09-20T08:42:11.000Z
|
# Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
from openvino.tools.mo.ops.mvn import MVNOnnx
from openvino.tools.mo.front.common.partial_infer.utils import int64_array
from openvino.tools.mo.front.extractor import FrontExtractorOp
from openvino.tools.mo.front.onnx.extractors.utils import onnx_attr
class MeanVarianceNormalizationExtractor(FrontExtractorOp):
op = 'MeanVarianceNormalization'
enabled = True
@classmethod
def extract(cls, node):
axes = onnx_attr(node, 'axes', 'ints',
default=int64_array([0, 2, 3]),
dst_type=lambda x: int64_array(x))
attrs = {
'eps': 1e-9,
'normalize_variance': 1,
'axes': axes,
'eps_mode': 'outside_sqrt',
}
MVNOnnx.update_node_stat(node, attrs)
return cls.enabled
| 30.827586
| 74
| 0.647651
| 556
| 0.621924
| 0
| 0
| 435
| 0.486577
| 0
| 0
| 174
| 0.194631
|
f7a5670ed8d1bfbab967804b7afd49109f337bfe
| 11,661
|
py
|
Python
|
enemy.py
|
KasiaWo/Rabbit_Bobble
|
89afbaa4f8b46e20ad33e9c410f50c85ddae747b
|
[
"MIT"
] | null | null | null |
enemy.py
|
KasiaWo/Rabbit_Bobble
|
89afbaa4f8b46e20ad33e9c410f50c85ddae747b
|
[
"MIT"
] | null | null | null |
enemy.py
|
KasiaWo/Rabbit_Bobble
|
89afbaa4f8b46e20ad33e9c410f50c85ddae747b
|
[
"MIT"
] | null | null | null |
"""
Module for managing enemies.
"""
import random
import constants as const
import pygame
import random
import platforms
from spritesheet_functions import SpriteSheet
class Enemy(pygame.sprite.Sprite):
# -- Methods
def __init__(self, x_cord, y_cord,level, x_speed=2, char_type=0):
""" Constructor function """
# Call the parent's constructor
super().__init__()
# Create an image of the block, and fill it with a color.
# This could also be an image loaded from the disk.
self.direction="R"
self.load_images()
self.image = self.standing_frames[0]
self.current_frame = 0
self.last_update = 0
self.rect = self.image.get_rect()
self.radius= 20
self.walking = False
self.jumping = False
self.type = char_type
# Set a referance to the image rect.
self.rect = self.image.get_rect()
self.radius = 35
#
self.rect.x = x_cord
self.rect.y = y_cord
self.change_x = x_speed
self.change_y = 0
self.sign_direc = 1
# List of sprites we can bump against
self.level = level
self.platforms = level.platform_list
def load_images(self):
sprite_sheet = SpriteSheet("spritesheet_players.png")
self.standing_frames = [sprite_sheet.get_image(156, 101, 45,54,const.WHITE)]
for frame in self.standing_frames:
frame.set_colorkey(const.BLACK)
self.walk_frames_r = [sprite_sheet.get_image(156, 156, 45,54,const.BLACK),
sprite_sheet.get_image(115, 48, 45, 52,const.BLACK),
sprite_sheet.get_image(156, 101, 45, 54,const.BLACK)]
self.walk_frames_l = []
for frame in self.walk_frames_r:
frame.set_colorkey(const.BLACK)
self.walk_frames_l.append(pygame.transform.flip(frame, True, False))
self.jump_frame = sprite_sheet.get_image(156, 101, 45, 54,const.BLACK)
self.jump_frame.set_colorkey(const.BLACK)
def animate(self):
now = pygame.time.get_ticks()
if self.change_x != 0:
self.walking = True
else:
self.walking = False
# show walk animation
if self.walking:
if now - self.last_update > 400:
self.last_update = now
self.current_frame = (self.current_frame + 1) % len(self.walk_frames_l)
#bottom = self.rect.bottom
if self.sign_direc > 0:
self.image = self.walk_frames_r[self.current_frame]
else:
self.image = self.walk_frames_l[self.current_frame]
#self.rect.bottom = bottom
# show idle animation
if not self.jumping and not self.walking:
if now - self.last_update > 350:
self.last_update = now
self.current_frame = (self.current_frame + 1) % len(self.standing_frames)
#bottom = self.rect.bottom
self.image = self.standing_frames[self.current_frame]
#self.rect = self.image.get_rect()
#self.rect.bottom = bottom
self.mask = pygame.mask.from_surface(self.image)
# Set a referance to the image rect.
#self.rect = self.image.get_rect()
def update(self):
self.animate()
self.calc_grav()
# If the player gets near the right side, shift the world left (-x)
if self.rect.right > const.SCREEN_WIDTH:
self.rect.right = const.SCREEN_WIDTH
self.sign_direc = -self.sign_direc
# If the player gets near the left side, shift the world right (+x)
if self.rect.left < 0:
self.rect.left = 0
self.sign_direc = -self.sign_direc
# If the player gets near the right side, shift the world left (-x)
if self.rect.bottom > const.SCREEN_HEIGHT:
self.rect.bottom = const.SCREEN_HEIGHT
# If the player gets near the left side, shift the world right (+x)
if self.rect.top < 0:
self.rect.top = 0
self.rect.x += self.sign_direc * self.change_x
# Check where is the enemy
# Check if enemy is on the platform
platform_hit_list = pygame.sprite.spritecollide(self, self.platforms, False)
self.rect.x += self.sign_direc * 1
# Check if there if another platform next to
platform_hit_list_2 = pygame.sprite.spritecollide(self, self.platforms, False)
self.rect.y -= 2
self.rect.x -= self.sign_direc * 1
# if enemy is only on one platform we have to check if there is on an edge
if platform_hit_list == platform_hit_list_2 and len(platform_hit_list)==1:
for block in platform_hit_list:
if self.sign_direc > 0:
if self.rect.right >= block.rect.right :
self.sign_direc = -self.sign_direc
#self.image = self.smaller_left
elif self.sign_direc < 0:
if self.rect.left <= block.rect.left :
self.sign_direc = -self.sign_direc
#self.image=self.smaller_right
#self.rect.top = block.rect.bottom
else:
self.sign_direc=self.sign_direc
for block in platform_hit_list:
if self.change_y > 0 and self.rect.bottom >= block.rect.top and self.rect.top <= block.rect.top:
self.rect.bottom = block.rect.top
self.change_y = 0
if self.type and random.uniform(0,1)<0.1 and len(platform_hit_list)>0:
self.change_y = -3
block_hit_list = pygame.sprite.spritecollide(self, self.level.platform_stone_list, False)
for block in block_hit_list :
# Reset our position based on the top/bottom of the object.
if self.change_y < 0 and self.rect.top <= block.rect.bottom and self.rect.bottom >= block.rect.bottom:
self.rect.top = block.rect.bottom +2
self.change_y = -self.change_y
block_hit_list = pygame.sprite.spritecollide(self, self.level.block_list, False)
if len(block_hit_list)>=1:
if len(block_hit_list)>=1:
block= block_hit_list[0]
if self.sign_direc > 0:
self.rect.right = block.rect.left
self.sign_direc = -self.sign_direc
elif self.sign_direc < 0:
# Otherwise if we are moving left, do the opposite.
self.rect.left = block.rect.right
self.sign_direc = -self.sign_direc
self.rect.y += self.change_y
def calc_grav(self):
""" Calculate effect of gravity. """
if self.change_y == 0:
self.change_y = 1
else:
self.change_y += .15
# See if we are on the ground.
if self.rect.y >= const.SCREEN_HEIGHT - self.rect.height and self.change_y >= 0:
self.change_y = 0
self.rect.y = const.SCREEN_HEIGHT - self.rect.height
class Enemy_bubble(pygame.sprite.Sprite):
# -- Methods
def __init__(self, enemy):
""" Constructor function """
# Call the parent's constructor
super().__init__()
# Create an image of the block, and fill it with a color.
# This could also be an image loaded from the disk.
self.direction= enemy.direction
sprite_sheet_left = SpriteSheet("playerBlue_dead.png")
# Grab the image for this platform
self.image_left = sprite_sheet_left.get_image(0,
0,
45,
47, const.BLACK)
# Set a referance to the image rect.
self.size = self.image_left.get_size()
self.smaller_left = pygame.transform.scale(self.image_left, (int(self.size[0]*0.7), int(self.size[1]*0.7)))
self.type=enemy.type
self.x_speed=enemy.change_x
bub_2 = pygame.image.load("bubble.png").convert()
bub = pygame.Surface([225, 225]).convert()
#bub_2.set_alpha(90)
# Copy the sprite from the large sheet onto the smaller image
bub.blit(bub_2, (0, 0))
bub=pygame.transform.scale(bub, (int(bub.get_size()[0]*0.2), int(bub.get_size()[1]*0.2)))
# Assuming black works as the transparent color
bub.set_colorkey(const.BLACK)
bub.set_alpha(150)
pygame.Surface.blit(bub,self.smaller_left, (6, 4))
#self.smaller_right.blit(bub,(-100,-100))
bub.set_colorkey(const.BLACK)
#bub.set_alpha(90)
self.image = bub
#self.image.set_alpha(90)
# Set a referance to the image rect.
self.rect = self.image.get_rect()
self.radius = 35
#pygame.draw.circle(self.image, RED, self.rect.center , self.radius)
self.start_time = pygame.time.get_ticks()
self.time = self.start_time
self.rect.x = enemy.rect.x
self.rect.y = enemy.rect.y
self.platforms = enemy.platforms
self.change_y = -3
self.level = enemy.level
def update(self):
# If the player gets near the right side, shift the world left (-x)
if self.rect.right > const.SCREEN_WIDTH:
self.rect.right = const.SCREEN_WIDTH
self.speedy=-self.speedy
# If the player gets near the left side, shift the world right (+x)
if self.rect.left < 0:
self.rect.left = 0
# If the player gets near the right side, shift the world left (-x)
if self.rect.bottom > const.SCREEN_HEIGHT:
self.rect.bottom = const.SCREEN_HEIGHT
# If the player gets near the left side, shift the world right (+x)
if self.rect.top < 0:
self.rect.top = 0
# life time
self.time += 1
block_hit_list = pygame.sprite.spritecollide(self, self.level.platform_stone_list, False)
for block in block_hit_list :
if self.change_y > 0 and self.rect.bottom >= block.rect.top and self.rect.top <= block.rect.top:
self.rect.bottom = block.rect.top -2
self.change_y = 0
elif self.change_y < 0 and self.rect.top <= block.rect.bottom and self.rect.bottom >= block.rect.bottom:
self.rect.top = block.rect.bottom +2
self.change_y = 0
self.rect.y += self.change_y
if self.time - self.start_time > 500:
enemy=Enemy(self.rect.x, self.rect.y, self.level,self.x_speed,self.type)
self.level.enemy_list.add(enemy)
self.level.active_sprite.add(enemy)
self.level.active_sprite.remove(self)
self.kill()
| 35.769939
| 116
| 0.54738
| 11,452
| 0.982077
| 0
| 0
| 0
| 0
| 0
| 0
| 2,166
| 0.185747
|
f7a6e933e409ba532f518e3e1b2e619a58f1715d
| 10,715
|
py
|
Python
|
schicluster/_hicluster_internal.py
|
zhoujt1994/scHiCluster
|
1f7e0cc5a56a357659a6b10b34053e6addbf30a5
|
[
"MIT"
] | 27
|
2019-07-10T23:17:33.000Z
|
2022-01-14T07:34:42.000Z
|
schicluster/_hicluster_internal.py
|
zhoujt1994/scHiCluster
|
1f7e0cc5a56a357659a6b10b34053e6addbf30a5
|
[
"MIT"
] | 4
|
2019-11-01T01:12:09.000Z
|
2022-03-29T11:24:35.000Z
|
schicluster/_hicluster_internal.py
|
zhoujt1994/scHiCluster
|
1f7e0cc5a56a357659a6b10b34053e6addbf30a5
|
[
"MIT"
] | 8
|
2019-12-24T13:54:11.000Z
|
2022-01-26T17:21:55.000Z
|
import argparse
import inspect
import logging
import sys
from .__main__ import setup_logging
log = logging.getLogger()
DESCRIPTION = """
hic-internal is used for automation, not intend to be used by end user.
Use hicluster instead.
"""
EPILOG = ''
def impute_chromosome_internal_subparser(subparser):
parser = subparser.add_parser('impute-chromosome',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
help="RWR imputation for one chromosome in one cell")
parser_req = parser.add_argument_group("Required inputs")
parser_req.add_argument(
"--scool_url",
type=str,
required=True
)
parser_req.add_argument(
"--chrom",
type=str,
required=True
)
parser_req.add_argument(
"--resolution",
type=int,
required=True
)
parser_req.add_argument(
"--output_path",
type=str,
required=True
)
parser.add_argument(
"--logscale",
type=bool,
default=True,
)
parser.add_argument(
"--pad",
type=int,
default=1
)
parser.add_argument(
"--std",
type=int,
default=1
)
parser.add_argument(
"--rp",
type=float,
default=0.5
)
parser.add_argument(
"--tol",
type=float,
default=0.01
)
parser.add_argument(
"--window_size",
type=int,
default=500000000
)
parser.add_argument(
"--step_size",
type=int,
default=10000000
)
parser.add_argument(
"--output_dist",
type=int,
default=500000000
)
parser.add_argument(
"--min_cutoff",
type=float,
default=0
)
return
def aggregate_chromosomes_internal_subparser(subparser):
parser = subparser.add_parser('aggregate-chromosomes',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
help="Aggregate chromosome HDFs for one cell")
parser_req = parser.add_argument_group("Required inputs")
parser_req.add_argument(
"--chrom_size_path",
type=str,
required=True
)
parser_req.add_argument(
"--resolution",
type=int,
required=True
)
parser_req.add_argument(
"--input_dir",
type=str,
required=True
)
parser_req.add_argument(
"--output_path",
type=str,
required=True
)
parser_req.add_argument(
"--chrom_wildcard",
type=str,
default='{chrom}.hdf'
)
def calculate_loop_matrix_internal_subparser(subparser):
parser = subparser.add_parser('calculate-loop-matrix',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
help="Calculate Loop Matrix E and T for single chromosome of one cell")
parser_req = parser.add_argument_group("Required inputs")
parser_req.add_argument(
"--cell_url",
type=str,
required=True
)
parser_req.add_argument(
"--chrom",
type=str,
required=True
)
parser_req.add_argument(
"--resolution",
type=int,
required=True
)
parser_req.add_argument(
"--output_prefix",
type=str,
required=True
)
parser.add_argument(
"--dist",
type=int,
default=10050000
)
parser.add_argument(
"--cap",
type=int,
default=5
)
parser.add_argument(
"--pad",
type=int,
default=5
)
parser.add_argument(
"--gap",
type=int,
default=2
)
parser.add_argument(
"--min_cutoff",
type=float,
default=1e-6
)
parser.add_argument('--log_e', dest='log_e', action='store_true',
help='Normalize E at log scale')
parser.set_defaults(log_e=False)
def merge_cell_impute_matrix_internal_subparser(subparser):
parser = subparser.add_parser('merge-cell-impute-matrix',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
help="Merge Loop Matrix E and T from cells to group")
parser_req = parser.add_argument_group("Required inputs")
parser_req.add_argument(
"--cell_urls_path",
type=str,
required=True
)
parser_req.add_argument(
"--chrom",
type=str,
required=True
)
parser_req.add_argument(
"--output_prefix",
type=str,
required=True
)
return
def merge_cell_loop_bkg_internal_subparser(subparser):
parser = subparser.add_parser('merge-loop-matrix',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
help="Merge Loop Matrix E and T from cells to group")
parser_req = parser.add_argument_group("Required inputs")
parser_req.add_argument(
"--output_dir",
type=str,
required=True
)
parser_req.add_argument(
"--output_prefix",
type=str,
required=True
)
parser_req.add_argument(
"--merge_type",
type=str,
required=True
)
def merge_group_chunks_internal_subparser(subparser):
parser = subparser.add_parser('merge-group-chunks',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
help="Aggregate group chunks cool files to one scool file per group.")
parser_req = parser.add_argument_group("Required inputs")
parser_req.add_argument(
"--chrom_size_path",
type=str,
required=True
)
parser_req.add_argument(
"--resolution",
type=int,
required=True
)
parser_req.add_argument(
"--group",
type=str,
required=True
)
parser_req.add_argument(
"--output_dir",
type=str,
required=True
)
def merge_raw_scool_internal_subparser(subparser):
parser = subparser.add_parser('merge-raw-scool',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
help="Merge single cell raw matrix by cluster stored in scool files.")
parser_req = parser.add_argument_group("Required inputs")
parser_req.add_argument(
"--chrom_size_path",
type=str,
required=True
)
parser_req.add_argument(
"--resolution",
type=int,
required=True
)
parser_req.add_argument(
"--cell_table_path",
type=str,
required=True
)
parser_req.add_argument(
"--output_dir",
type=str,
required=True
)
parser.add_argument(
"--cpu",
type=int,
default=1
)
def call_loop_internal_subparser(subparser):
parser = subparser.add_parser('call-loop',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
help="Call loop using group matrix stored in scool file.")
parser_req = parser.add_argument_group("Required inputs")
parser_req.add_argument(
"--group_prefix",
type=str,
required=True
)
parser_req.add_argument(
"--resolution",
type=int,
required=True
)
parser_req.add_argument(
"--output_prefix",
type=str,
required=True
)
parser_req.add_argument(
"--thres_bl",
type=float,
default=1.33
)
parser_req.add_argument(
"--thres_donut",
type=float,
default=1.33
)
parser_req.add_argument(
"--thres_h",
type=float,
default=1.2
)
parser_req.add_argument(
"--thres_v",
type=float,
default=1.2
)
parser_req.add_argument(
"--fdr_thres",
type=float,
default=0.1
)
parser_req.add_argument(
"--dist_thres",
type=float,
default=20000
)
parser_req.add_argument(
"--size_thres",
type=float,
default=1
)
def internal_main():
parser = argparse.ArgumentParser(description=DESCRIPTION,
epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter)
subparsers = parser.add_subparsers(
title="functions",
dest="command",
metavar=""
)
# add subparsers
current_module = sys.modules[__name__]
# get all functions in parser
for name, register_subparser_func in inspect.getmembers(current_module, inspect.isfunction):
if 'internal_subparser' in name:
register_subparser_func(subparsers)
# initiate
args = None
if len(sys.argv) > 1:
args = parser.parse_args()
else:
# print out help
parser.parse_args(["-h"])
exit()
# set up logging
if not logging.root.handlers:
setup_logging(stdout=True,
quiet=False)
# execute command
args_vars = vars(args)
for k, v in args_vars.items():
logging.debug(k, v, type(v), sep='\t')
cur_command = args_vars.pop('command')
# Do real import here:
if cur_command == 'impute-chromosome':
from .impute.impute_chromosome import impute_chromosome as func
elif cur_command == 'aggregate-chromosomes':
from .cool.utilities import aggregate_chromosomes as func
elif cur_command == 'calculate-loop-matrix':
from .loop.loop_bkg import calculate_chrom_background_normalization as func
elif cur_command == 'merge-loop-matrix':
from .loop.merge_cell_to_group import merge_cells_for_single_chromosome as func
elif cur_command == 'merge-group-chunks':
from .loop.merge_cell_to_group import merge_group_chunks_to_group_cools as func
elif cur_command == 'merge-cell-impute-matrix':
from .impute.merge_cell_to_group import merge_cells_for_single_chromosome as func
elif cur_command == 'call-loop':
from .loop.loop_calling import call_loops as func
elif cur_command == 'merge-raw-scool':
from .loop.merge_raw_matrix import merge_raw_scool_by_cluster as func
else:
log.debug(f'{cur_command} not Known, check the main function if else part')
parser.parse_args(["-h"])
return
# run the command
func(**args_vars)
return
| 23.653422
| 105
| 0.583481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,997
| 0.186374
|
f7a809bbbe91eb7260be62d0dad3baf769a4cf97
| 5,013
|
py
|
Python
|
tests/test_host_resolver.py
|
mssaleh/aioatomapi
|
2b9f00fce993153c52595e09ecc80562574af62c
|
[
"MIT"
] | null | null | null |
tests/test_host_resolver.py
|
mssaleh/aioatomapi
|
2b9f00fce993153c52595e09ecc80562574af62c
|
[
"MIT"
] | 27
|
2021-10-13T17:17:38.000Z
|
2022-03-31T17:24:08.000Z
|
tests/test_host_resolver.py
|
mssaleh/aioatomapi
|
2b9f00fce993153c52595e09ecc80562574af62c
|
[
"MIT"
] | null | null | null |
import asyncio
import socket
import pytest
from mock import AsyncMock, MagicMock, patch
import aioatomapi.host_resolver as hr
from aioatomapi.core import APIConnectionError
@pytest.fixture
def async_zeroconf():
with patch("zeroconf.asyncio.AsyncZeroconf") as klass:
yield klass.return_value
@pytest.fixture
def addr_infos():
return [
hr.AddrInfo(
family=socket.AF_INET,
type=socket.SOCK_STREAM,
proto=socket.IPPROTO_TCP,
sockaddr=hr.IPv4Sockaddr(address="10.0.0.42", port=6052),
),
hr.AddrInfo(
family=socket.AF_INET6,
type=socket.SOCK_STREAM,
proto=socket.IPPROTO_TCP,
sockaddr=hr.IPv6Sockaddr(
address="2001:db8:85a3::8a2e:370:7334",
port=6052,
flowinfo=0,
scope_id=0,
),
),
]
@pytest.mark.asyncio
async def test_resolve_host_zeroconf(async_zeroconf, addr_infos):
info = MagicMock()
info.addresses_by_version.return_value = [
b"\n\x00\x00*",
b" \x01\r\xb8\x85\xa3\x00\x00\x00\x00\x8a.\x03ps4",
]
async_zeroconf.async_get_service_info = AsyncMock(return_value=info)
async_zeroconf.async_close = AsyncMock()
loop = asyncio.get_event_loop()
ret = await hr._async_resolve_host_zeroconf(loop, "asdf", 6052)
async_zeroconf.async_get_service_info.assert_called_once_with(
"_atomlib._tcp.local.", "asdf._atomlib._tcp.local.", 3000
)
async_zeroconf.async_close.assert_called_once_with()
assert ret == addr_infos
@pytest.mark.asyncio
async def test_resolve_host_zeroconf_empty(async_zeroconf):
async_zeroconf.async_get_service_info = AsyncMock(return_value=None)
async_zeroconf.async_close = AsyncMock()
loop = asyncio.get_event_loop()
ret = await hr._async_resolve_host_zeroconf(loop, "asdf.local", 6052)
assert ret == []
@pytest.mark.asyncio
async def test_resolve_host_getaddrinfo(addr_infos):
eventloop = AsyncMock()
eventloop.getaddrinfo.return_value = [
(
socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_TCP,
"canon1",
("10.0.0.42", 6052),
),
(
socket.AF_INET6,
socket.SOCK_STREAM,
socket.IPPROTO_TCP,
"canon2",
("2001:db8:85a3::8a2e:370:7334", 6052, 0, 0),
),
(-1, socket.SOCK_STREAM, socket.IPPROTO_TCP, "canon3", ("10.0.0.42", 6052)),
]
ret = await hr._async_resolve_host_getaddrinfo(eventloop, "example.com", 6052)
assert ret == addr_infos
@pytest.mark.asyncio
async def test_resolve_host_getaddrinfo_oserror():
eventloop = AsyncMock()
eventloop.getaddrinfo.side_effect = OSError()
with pytest.raises(APIConnectionError):
await hr._async_resolve_host_getaddrinfo(eventloop, "example.com", 6052)
@pytest.mark.asyncio
@patch("aioatomapi.host_resolver._async_resolve_host_zeroconf")
@patch("aioatomapi.host_resolver._async_resolve_host_getaddrinfo")
async def test_resolve_host_mdns(resolve_addr, resolve_zc, addr_infos):
resolve_zc.return_value = addr_infos
loop = asyncio.get_event_loop()
ret = await hr.async_resolve_host(loop, "example.local", 6052)
resolve_zc.assert_called_once_with(loop, "example", 6052, zeroconf_instance=None)
resolve_addr.assert_not_called()
assert ret == addr_infos[0]
@pytest.mark.asyncio
@patch("aioatomapi.host_resolver._async_resolve_host_zeroconf")
@patch("aioatomapi.host_resolver._async_resolve_host_getaddrinfo")
async def test_resolve_host_mdns_empty(resolve_addr, resolve_zc, addr_infos):
resolve_zc.return_value = []
resolve_addr.return_value = addr_infos
loop = asyncio.get_event_loop()
ret = await hr.async_resolve_host(loop, "example.local", 6052)
resolve_zc.assert_called_once_with(loop, "example", 6052, zeroconf_instance=None)
resolve_addr.assert_called_once_with(loop, "example.local", 6052)
assert ret == addr_infos[0]
@pytest.mark.asyncio
@patch("aioatomapi.host_resolver._async_resolve_host_zeroconf")
@patch("aioatomapi.host_resolver._async_resolve_host_getaddrinfo")
async def test_resolve_host_addrinfo(resolve_addr, resolve_zc, addr_infos):
resolve_addr.return_value = addr_infos
ret = await hr.async_resolve_host(None, "example.com", 6052)
resolve_zc.assert_not_called()
resolve_addr.assert_called_once_with(None, "example.com", 6052)
assert ret == addr_infos[0]
@pytest.mark.asyncio
@patch("aioatomapi.host_resolver._async_resolve_host_zeroconf")
@patch("aioatomapi.host_resolver._async_resolve_host_getaddrinfo")
async def test_resolve_host_addrinfo_empty(resolve_addr, resolve_zc, addr_infos):
resolve_addr.return_value = []
with pytest.raises(APIConnectionError):
await hr.async_resolve_host(None, "example.com", 6052)
resolve_zc.assert_not_called()
resolve_addr.assert_called_once_with(None, "example.com", 6052)
| 32.764706
| 85
| 0.710353
| 0
| 0
| 113
| 0.022541
| 4,808
| 0.959106
| 3,388
| 0.675843
| 873
| 0.174147
|
f7a952870293e9fc90ffc7d9d4a818a5a7c4f56d
| 85,241
|
py
|
Python
|
azure-mgmt-web/azure/mgmt/web/operations/diagnostics_operations.py
|
JonathanGailliez/azure-sdk-for-python
|
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
|
[
"MIT"
] | 1
|
2021-09-07T18:36:04.000Z
|
2021-09-07T18:36:04.000Z
|
azure-mgmt-web/azure/mgmt/web/operations/diagnostics_operations.py
|
JonathanGailliez/azure-sdk-for-python
|
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
|
[
"MIT"
] | 2
|
2019-10-02T23:37:38.000Z
|
2020-10-02T01:17:31.000Z
|
azure-mgmt-web/azure/mgmt/web/operations/diagnostics_operations.py
|
JonathanGailliez/azure-sdk-for-python
|
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
|
[
"MIT"
] | 1
|
2019-06-17T22:18:23.000Z
|
2019-06-17T22:18:23.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from .. import models
class DiagnosticsOperations(object):
"""DiagnosticsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: API Version. Constant value: "2018-02-01".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2018-02-01"
self.config = config
def list_hosting_environment_detector_responses(
self, resource_group_name, name, custom_headers=None, raw=False, **operation_config):
"""List Hosting Environment Detector Responses.
List Hosting Environment Detector Responses.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param name: Site Name
:type name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of DetectorResponse
:rtype:
~azure.mgmt.web.models.DetectorResponsePaged[~azure.mgmt.web.models.DetectorResponse]
:raises:
:class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list_hosting_environment_detector_responses.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.DefaultErrorResponseException(self._deserialize, response)
return response
# Deserialize response
deserialized = models.DetectorResponsePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.DetectorResponsePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list_hosting_environment_detector_responses.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/hostingEnvironments/{name}/detectors'}
def get_hosting_environment_detector_response(
self, resource_group_name, name, detector_name, start_time=None, end_time=None, time_grain=None, custom_headers=None, raw=False, **operation_config):
"""Get Hosting Environment Detector Response.
Get Hosting Environment Detector Response.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param name: App Service Environment Name
:type name: str
:param detector_name: Detector Resource Name
:type detector_name: str
:param start_time: Start Time
:type start_time: datetime
:param end_time: End Time
:type end_time: datetime
:param time_grain: Time Grain
:type time_grain: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: DetectorResponse or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.web.models.DetectorResponse or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
"""
# Construct URL
url = self.get_hosting_environment_detector_response.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'detectorName': self._serialize.url("detector_name", detector_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if start_time is not None:
query_parameters['startTime'] = self._serialize.query("start_time", start_time, 'iso-8601')
if end_time is not None:
query_parameters['endTime'] = self._serialize.query("end_time", end_time, 'iso-8601')
if time_grain is not None:
query_parameters['timeGrain'] = self._serialize.query("time_grain", time_grain, 'str', pattern=r'PT[1-9][0-9]+[SMH]')
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.DefaultErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DetectorResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_hosting_environment_detector_response.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/hostingEnvironments/{name}/detectors/{detectorName}'}
def list_site_detector_responses(
self, resource_group_name, site_name, custom_headers=None, raw=False, **operation_config):
"""List Site Detector Responses.
List Site Detector Responses.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param site_name: Site Name
:type site_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of DetectorResponse
:rtype:
~azure.mgmt.web.models.DetectorResponsePaged[~azure.mgmt.web.models.DetectorResponse]
:raises:
:class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list_site_detector_responses.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.DefaultErrorResponseException(self._deserialize, response)
return response
# Deserialize response
deserialized = models.DetectorResponsePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.DetectorResponsePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list_site_detector_responses.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/detectors'}
def get_site_detector_response(
self, resource_group_name, site_name, detector_name, start_time=None, end_time=None, time_grain=None, custom_headers=None, raw=False, **operation_config):
"""Get site detector response.
Get site detector response.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param site_name: Site Name
:type site_name: str
:param detector_name: Detector Resource Name
:type detector_name: str
:param start_time: Start Time
:type start_time: datetime
:param end_time: End Time
:type end_time: datetime
:param time_grain: Time Grain
:type time_grain: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: DetectorResponse or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.web.models.DetectorResponse or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
"""
# Construct URL
url = self.get_site_detector_response.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'detectorName': self._serialize.url("detector_name", detector_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if start_time is not None:
query_parameters['startTime'] = self._serialize.query("start_time", start_time, 'iso-8601')
if end_time is not None:
query_parameters['endTime'] = self._serialize.query("end_time", end_time, 'iso-8601')
if time_grain is not None:
query_parameters['timeGrain'] = self._serialize.query("time_grain", time_grain, 'str', pattern=r'PT[1-9][0-9]+[SMH]')
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.DefaultErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DetectorResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_site_detector_response.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/detectors/{detectorName}'}
def list_site_diagnostic_categories(
self, resource_group_name, site_name, custom_headers=None, raw=False, **operation_config):
"""Get Diagnostics Categories.
Get Diagnostics Categories.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param site_name: Site Name
:type site_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of DiagnosticCategory
:rtype:
~azure.mgmt.web.models.DiagnosticCategoryPaged[~azure.mgmt.web.models.DiagnosticCategory]
:raises:
:class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list_site_diagnostic_categories.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.DefaultErrorResponseException(self._deserialize, response)
return response
# Deserialize response
deserialized = models.DiagnosticCategoryPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.DiagnosticCategoryPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list_site_diagnostic_categories.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/diagnostics'}
def get_site_diagnostic_category(
self, resource_group_name, site_name, diagnostic_category, custom_headers=None, raw=False, **operation_config):
"""Get Diagnostics Category.
Get Diagnostics Category.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param site_name: Site Name
:type site_name: str
:param diagnostic_category: Diagnostic Category
:type diagnostic_category: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: DiagnosticCategory or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.web.models.DiagnosticCategory or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
"""
# Construct URL
url = self.get_site_diagnostic_category.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'diagnosticCategory': self._serialize.url("diagnostic_category", diagnostic_category, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.DefaultErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DiagnosticCategory', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_site_diagnostic_category.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/diagnostics/{diagnosticCategory}'}
def list_site_analyses(
self, resource_group_name, site_name, diagnostic_category, custom_headers=None, raw=False, **operation_config):
"""Get Site Analyses.
Get Site Analyses.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param site_name: Site Name
:type site_name: str
:param diagnostic_category: Diagnostic Category
:type diagnostic_category: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of AnalysisDefinition
:rtype:
~azure.mgmt.web.models.AnalysisDefinitionPaged[~azure.mgmt.web.models.AnalysisDefinition]
:raises:
:class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list_site_analyses.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'diagnosticCategory': self._serialize.url("diagnostic_category", diagnostic_category, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.DefaultErrorResponseException(self._deserialize, response)
return response
# Deserialize response
deserialized = models.AnalysisDefinitionPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.AnalysisDefinitionPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list_site_analyses.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/diagnostics/{diagnosticCategory}/analyses'}
def get_site_analysis(
self, resource_group_name, site_name, diagnostic_category, analysis_name, custom_headers=None, raw=False, **operation_config):
"""Get Site Analysis.
Get Site Analysis.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param site_name: Site Name
:type site_name: str
:param diagnostic_category: Diagnostic Category
:type diagnostic_category: str
:param analysis_name: Analysis Name
:type analysis_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: DiagnosticAnalysis or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.web.models.DiagnosticAnalysis or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
"""
# Construct URL
url = self.get_site_analysis.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'diagnosticCategory': self._serialize.url("diagnostic_category", diagnostic_category, 'str'),
'analysisName': self._serialize.url("analysis_name", analysis_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.DefaultErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DiagnosticAnalysis', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_site_analysis.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/diagnostics/{diagnosticCategory}/analyses/{analysisName}'}
def execute_site_analysis(
self, resource_group_name, site_name, diagnostic_category, analysis_name, start_time=None, end_time=None, time_grain=None, custom_headers=None, raw=False, **operation_config):
"""Execute Analysis.
Execute Analysis.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param site_name: Site Name
:type site_name: str
:param diagnostic_category: Category Name
:type diagnostic_category: str
:param analysis_name: Analysis Resource Name
:type analysis_name: str
:param start_time: Start Time
:type start_time: datetime
:param end_time: End Time
:type end_time: datetime
:param time_grain: Time Grain
:type time_grain: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: DiagnosticAnalysis or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.web.models.DiagnosticAnalysis or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
"""
# Construct URL
url = self.execute_site_analysis.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'diagnosticCategory': self._serialize.url("diagnostic_category", diagnostic_category, 'str'),
'analysisName': self._serialize.url("analysis_name", analysis_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if start_time is not None:
query_parameters['startTime'] = self._serialize.query("start_time", start_time, 'iso-8601')
if end_time is not None:
query_parameters['endTime'] = self._serialize.query("end_time", end_time, 'iso-8601')
if time_grain is not None:
query_parameters['timeGrain'] = self._serialize.query("time_grain", time_grain, 'str', pattern=r'PT[1-9][0-9]+[SMH]')
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.DefaultErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DiagnosticAnalysis', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
execute_site_analysis.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/diagnostics/{diagnosticCategory}/analyses/{analysisName}/execute'}
def list_site_detectors(
self, resource_group_name, site_name, diagnostic_category, custom_headers=None, raw=False, **operation_config):
"""Get Detectors.
Get Detectors.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param site_name: Site Name
:type site_name: str
:param diagnostic_category: Diagnostic Category
:type diagnostic_category: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of DetectorDefinition
:rtype:
~azure.mgmt.web.models.DetectorDefinitionPaged[~azure.mgmt.web.models.DetectorDefinition]
:raises:
:class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list_site_detectors.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'diagnosticCategory': self._serialize.url("diagnostic_category", diagnostic_category, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.DefaultErrorResponseException(self._deserialize, response)
return response
# Deserialize response
deserialized = models.DetectorDefinitionPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.DetectorDefinitionPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list_site_detectors.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/diagnostics/{diagnosticCategory}/detectors'}
def get_site_detector(
self, resource_group_name, site_name, diagnostic_category, detector_name, custom_headers=None, raw=False, **operation_config):
"""Get Detector.
Get Detector.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param site_name: Site Name
:type site_name: str
:param diagnostic_category: Diagnostic Category
:type diagnostic_category: str
:param detector_name: Detector Name
:type detector_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of DetectorDefinition
:rtype:
~azure.mgmt.web.models.DetectorDefinitionPaged[~azure.mgmt.web.models.DetectorDefinition]
:raises:
:class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.get_site_detector.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'diagnosticCategory': self._serialize.url("diagnostic_category", diagnostic_category, 'str'),
'detectorName': self._serialize.url("detector_name", detector_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.DefaultErrorResponseException(self._deserialize, response)
return response
# Deserialize response
deserialized = models.DetectorDefinitionPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.DetectorDefinitionPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
get_site_detector.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/diagnostics/{diagnosticCategory}/detectors/{detectorName}'}
def execute_site_detector(
self, resource_group_name, site_name, detector_name, diagnostic_category, start_time=None, end_time=None, time_grain=None, custom_headers=None, raw=False, **operation_config):
"""Execute Detector.
Execute Detector.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param site_name: Site Name
:type site_name: str
:param detector_name: Detector Resource Name
:type detector_name: str
:param diagnostic_category: Category Name
:type diagnostic_category: str
:param start_time: Start Time
:type start_time: datetime
:param end_time: End Time
:type end_time: datetime
:param time_grain: Time Grain
:type time_grain: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: DiagnosticDetectorResponse or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.web.models.DiagnosticDetectorResponse or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
"""
# Construct URL
url = self.execute_site_detector.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'detectorName': self._serialize.url("detector_name", detector_name, 'str'),
'diagnosticCategory': self._serialize.url("diagnostic_category", diagnostic_category, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if start_time is not None:
query_parameters['startTime'] = self._serialize.query("start_time", start_time, 'iso-8601')
if end_time is not None:
query_parameters['endTime'] = self._serialize.query("end_time", end_time, 'iso-8601')
if time_grain is not None:
query_parameters['timeGrain'] = self._serialize.query("time_grain", time_grain, 'str', pattern=r'PT[1-9][0-9]+[SMH]')
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.DefaultErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DiagnosticDetectorResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
execute_site_detector.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/diagnostics/{diagnosticCategory}/detectors/{detectorName}/execute'}
def list_site_detector_responses_slot(
self, resource_group_name, site_name, slot, custom_headers=None, raw=False, **operation_config):
"""List Site Detector Responses.
List Site Detector Responses.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param site_name: Site Name
:type site_name: str
:param slot: Slot Name
:type slot: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of DetectorResponse
:rtype:
~azure.mgmt.web.models.DetectorResponsePaged[~azure.mgmt.web.models.DetectorResponse]
:raises:
:class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list_site_detector_responses_slot.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'slot': self._serialize.url("slot", slot, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.DefaultErrorResponseException(self._deserialize, response)
return response
# Deserialize response
deserialized = models.DetectorResponsePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.DetectorResponsePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list_site_detector_responses_slot.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/slots/{slot}/detectors'}
def get_site_detector_response_slot(
self, resource_group_name, site_name, detector_name, slot, start_time=None, end_time=None, time_grain=None, custom_headers=None, raw=False, **operation_config):
"""Get site detector response.
Get site detector response.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param site_name: Site Name
:type site_name: str
:param detector_name: Detector Resource Name
:type detector_name: str
:param slot: Slot Name
:type slot: str
:param start_time: Start Time
:type start_time: datetime
:param end_time: End Time
:type end_time: datetime
:param time_grain: Time Grain
:type time_grain: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: DetectorResponse or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.web.models.DetectorResponse or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
"""
# Construct URL
url = self.get_site_detector_response_slot.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'detectorName': self._serialize.url("detector_name", detector_name, 'str'),
'slot': self._serialize.url("slot", slot, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if start_time is not None:
query_parameters['startTime'] = self._serialize.query("start_time", start_time, 'iso-8601')
if end_time is not None:
query_parameters['endTime'] = self._serialize.query("end_time", end_time, 'iso-8601')
if time_grain is not None:
query_parameters['timeGrain'] = self._serialize.query("time_grain", time_grain, 'str', pattern=r'PT[1-9][0-9]+[SMH]')
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.DefaultErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DetectorResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_site_detector_response_slot.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/slots/{slot}/detectors/{detectorName}'}
def list_site_diagnostic_categories_slot(
self, resource_group_name, site_name, slot, custom_headers=None, raw=False, **operation_config):
"""Get Diagnostics Categories.
Get Diagnostics Categories.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param site_name: Site Name
:type site_name: str
:param slot: Slot Name
:type slot: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of DiagnosticCategory
:rtype:
~azure.mgmt.web.models.DiagnosticCategoryPaged[~azure.mgmt.web.models.DiagnosticCategory]
:raises:
:class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list_site_diagnostic_categories_slot.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'slot': self._serialize.url("slot", slot, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.DefaultErrorResponseException(self._deserialize, response)
return response
# Deserialize response
deserialized = models.DiagnosticCategoryPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.DiagnosticCategoryPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list_site_diagnostic_categories_slot.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/slots/{slot}/diagnostics'}
def get_site_diagnostic_category_slot(
self, resource_group_name, site_name, diagnostic_category, slot, custom_headers=None, raw=False, **operation_config):
"""Get Diagnostics Category.
Get Diagnostics Category.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param site_name: Site Name
:type site_name: str
:param diagnostic_category: Diagnostic Category
:type diagnostic_category: str
:param slot: Slot Name
:type slot: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: DiagnosticCategory or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.web.models.DiagnosticCategory or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
"""
# Construct URL
url = self.get_site_diagnostic_category_slot.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'diagnosticCategory': self._serialize.url("diagnostic_category", diagnostic_category, 'str'),
'slot': self._serialize.url("slot", slot, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.DefaultErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DiagnosticCategory', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_site_diagnostic_category_slot.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/slots/{slot}/diagnostics/{diagnosticCategory}'}
def list_site_analyses_slot(
self, resource_group_name, site_name, diagnostic_category, slot, custom_headers=None, raw=False, **operation_config):
"""Get Site Analyses.
Get Site Analyses.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param site_name: Site Name
:type site_name: str
:param diagnostic_category: Diagnostic Category
:type diagnostic_category: str
:param slot: Slot Name
:type slot: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of AnalysisDefinition
:rtype:
~azure.mgmt.web.models.AnalysisDefinitionPaged[~azure.mgmt.web.models.AnalysisDefinition]
:raises:
:class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list_site_analyses_slot.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'diagnosticCategory': self._serialize.url("diagnostic_category", diagnostic_category, 'str'),
'slot': self._serialize.url("slot", slot, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.DefaultErrorResponseException(self._deserialize, response)
return response
# Deserialize response
deserialized = models.AnalysisDefinitionPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.AnalysisDefinitionPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list_site_analyses_slot.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/slots/{slot}/diagnostics/{diagnosticCategory}/analyses'}
def get_site_analysis_slot(
self, resource_group_name, site_name, diagnostic_category, analysis_name, slot, custom_headers=None, raw=False, **operation_config):
"""Get Site Analysis.
Get Site Analysis.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param site_name: Site Name
:type site_name: str
:param diagnostic_category: Diagnostic Category
:type diagnostic_category: str
:param analysis_name: Analysis Name
:type analysis_name: str
:param slot: Slot - optional
:type slot: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: DiagnosticAnalysis or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.web.models.DiagnosticAnalysis or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
"""
# Construct URL
url = self.get_site_analysis_slot.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'diagnosticCategory': self._serialize.url("diagnostic_category", diagnostic_category, 'str'),
'analysisName': self._serialize.url("analysis_name", analysis_name, 'str'),
'slot': self._serialize.url("slot", slot, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.DefaultErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DiagnosticAnalysis', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_site_analysis_slot.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/slots/{slot}/diagnostics/{diagnosticCategory}/analyses/{analysisName}'}
def execute_site_analysis_slot(
self, resource_group_name, site_name, diagnostic_category, analysis_name, slot, start_time=None, end_time=None, time_grain=None, custom_headers=None, raw=False, **operation_config):
"""Execute Analysis.
Execute Analysis.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param site_name: Site Name
:type site_name: str
:param diagnostic_category: Category Name
:type diagnostic_category: str
:param analysis_name: Analysis Resource Name
:type analysis_name: str
:param slot: Slot Name
:type slot: str
:param start_time: Start Time
:type start_time: datetime
:param end_time: End Time
:type end_time: datetime
:param time_grain: Time Grain
:type time_grain: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: DiagnosticAnalysis or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.web.models.DiagnosticAnalysis or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
"""
# Construct URL
url = self.execute_site_analysis_slot.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'diagnosticCategory': self._serialize.url("diagnostic_category", diagnostic_category, 'str'),
'analysisName': self._serialize.url("analysis_name", analysis_name, 'str'),
'slot': self._serialize.url("slot", slot, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if start_time is not None:
query_parameters['startTime'] = self._serialize.query("start_time", start_time, 'iso-8601')
if end_time is not None:
query_parameters['endTime'] = self._serialize.query("end_time", end_time, 'iso-8601')
if time_grain is not None:
query_parameters['timeGrain'] = self._serialize.query("time_grain", time_grain, 'str', pattern=r'PT[1-9][0-9]+[SMH]')
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.DefaultErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DiagnosticAnalysis', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
execute_site_analysis_slot.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/slots/{slot}/diagnostics/{diagnosticCategory}/analyses/{analysisName}/execute'}
def list_site_detectors_slot(
self, resource_group_name, site_name, diagnostic_category, slot, custom_headers=None, raw=False, **operation_config):
"""Get Detectors.
Get Detectors.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param site_name: Site Name
:type site_name: str
:param diagnostic_category: Diagnostic Category
:type diagnostic_category: str
:param slot: Slot Name
:type slot: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of DetectorDefinition
:rtype:
~azure.mgmt.web.models.DetectorDefinitionPaged[~azure.mgmt.web.models.DetectorDefinition]
:raises:
:class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list_site_detectors_slot.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'diagnosticCategory': self._serialize.url("diagnostic_category", diagnostic_category, 'str'),
'slot': self._serialize.url("slot", slot, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.DefaultErrorResponseException(self._deserialize, response)
return response
# Deserialize response
deserialized = models.DetectorDefinitionPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.DetectorDefinitionPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list_site_detectors_slot.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/slots/{slot}/diagnostics/{diagnosticCategory}/detectors'}
def get_site_detector_slot(
self, resource_group_name, site_name, diagnostic_category, detector_name, slot, custom_headers=None, raw=False, **operation_config):
"""Get Detector.
Get Detector.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param site_name: Site Name
:type site_name: str
:param diagnostic_category: Diagnostic Category
:type diagnostic_category: str
:param detector_name: Detector Name
:type detector_name: str
:param slot: Slot Name
:type slot: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of DetectorDefinition
:rtype:
~azure.mgmt.web.models.DetectorDefinitionPaged[~azure.mgmt.web.models.DetectorDefinition]
:raises:
:class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.get_site_detector_slot.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'diagnosticCategory': self._serialize.url("diagnostic_category", diagnostic_category, 'str'),
'detectorName': self._serialize.url("detector_name", detector_name, 'str'),
'slot': self._serialize.url("slot", slot, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.DefaultErrorResponseException(self._deserialize, response)
return response
# Deserialize response
deserialized = models.DetectorDefinitionPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.DetectorDefinitionPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
get_site_detector_slot.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/slots/{slot}/diagnostics/{diagnosticCategory}/detectors/{detectorName}'}
def execute_site_detector_slot(
self, resource_group_name, site_name, detector_name, diagnostic_category, slot, start_time=None, end_time=None, time_grain=None, custom_headers=None, raw=False, **operation_config):
"""Execute Detector.
Execute Detector.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param site_name: Site Name
:type site_name: str
:param detector_name: Detector Resource Name
:type detector_name: str
:param diagnostic_category: Category Name
:type diagnostic_category: str
:param slot: Slot Name
:type slot: str
:param start_time: Start Time
:type start_time: datetime
:param end_time: End Time
:type end_time: datetime
:param time_grain: Time Grain
:type time_grain: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: DiagnosticDetectorResponse or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.web.models.DiagnosticDetectorResponse or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
"""
# Construct URL
url = self.execute_site_detector_slot.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'detectorName': self._serialize.url("detector_name", detector_name, 'str'),
'diagnosticCategory': self._serialize.url("diagnostic_category", diagnostic_category, 'str'),
'slot': self._serialize.url("slot", slot, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if start_time is not None:
query_parameters['startTime'] = self._serialize.query("start_time", start_time, 'iso-8601')
if end_time is not None:
query_parameters['endTime'] = self._serialize.query("end_time", end_time, 'iso-8601')
if time_grain is not None:
query_parameters['timeGrain'] = self._serialize.query("time_grain", time_grain, 'str', pattern=r'PT[1-9][0-9]+[SMH]')
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.DefaultErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DiagnosticDetectorResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
execute_site_detector_slot.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/slots/{slot}/diagnostics/{diagnosticCategory}/detectors/{detectorName}/execute'}
| 49.472432
| 239
| 0.66407
| 84,683
| 0.993454
| 0
| 0
| 0
| 0
| 0
| 0
| 38,061
| 0.44651
|
f7a95c509ce50bcdd7048409ca9b8c7d9c279bfa
| 432
|
py
|
Python
|
Dataset/Leetcode/valid/98/736.py
|
kkcookies99/UAST
|
fff81885aa07901786141a71e5600a08d7cb4868
|
[
"MIT"
] | null | null | null |
Dataset/Leetcode/valid/98/736.py
|
kkcookies99/UAST
|
fff81885aa07901786141a71e5600a08d7cb4868
|
[
"MIT"
] | null | null | null |
Dataset/Leetcode/valid/98/736.py
|
kkcookies99/UAST
|
fff81885aa07901786141a71e5600a08d7cb4868
|
[
"MIT"
] | null | null | null |
class Solution:
def XXX(self, root: TreeNode) -> bool:
stack = []
cur = root
last = float("-inf")
while cur or stack:
while cur:
stack.append(cur)
cur = cur.left
cur = stack.pop()
if cur.val > last:
last = cur.val
else:
return False
cur = cur.right
return True
| 24
| 42
| 0.414352
| 429
| 0.993056
| 0
| 0
| 0
| 0
| 0
| 0
| 6
| 0.013889
|
f7abc4036e6849052f1ad734c829603c8746cd22
| 237
|
py
|
Python
|
data/ck/check_data.py
|
jorgimello/meta-learning-fer
|
793610ae8471f794a6837930d8bb51866c1f7c02
|
[
"MIT"
] | 4
|
2020-10-10T03:33:15.000Z
|
2022-01-17T08:00:32.000Z
|
data/ck/check_data.py
|
jorgimello/meta-learning-facial-expression-recognition
|
793610ae8471f794a6837930d8bb51866c1f7c02
|
[
"MIT"
] | null | null | null |
data/ck/check_data.py
|
jorgimello/meta-learning-facial-expression-recognition
|
793610ae8471f794a6837930d8bb51866c1f7c02
|
[
"MIT"
] | null | null | null |
import numpy as np
import os, cv2
imgs = np.load('test_set_ck_extended_no_resize.npy')
lbls = np.load('test_labels_ck_extended_no_resize.npy')
for i in range(imgs.shape[0]):
print (lbls[i])
cv2.imshow('img', imgs[i])
cv2.waitKey(0)
| 21.545455
| 55
| 0.734177
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 80
| 0.337553
|
f7add4b7f65c543a8a0fd87ede46693f7cb004d9
| 773
|
py
|
Python
|
app/db/schemas/users.py
|
ergo-pad/paideia-api
|
7ffc78366567c72722d107f06ad37aa7557b05be
|
[
"MIT"
] | null | null | null |
app/db/schemas/users.py
|
ergo-pad/paideia-api
|
7ffc78366567c72722d107f06ad37aa7557b05be
|
[
"MIT"
] | null | null | null |
app/db/schemas/users.py
|
ergo-pad/paideia-api
|
7ffc78366567c72722d107f06ad37aa7557b05be
|
[
"MIT"
] | null | null | null |
from pydantic import BaseModel
import typing as t
### SCHEMAS FOR USERS ###
class UserBase(BaseModel):
alias: str
primary_wallet_address_id: t.Optional[int]
profile_img_url: t.Optional[str]
is_active: bool = True
is_superuser: bool = False
class UserOut(UserBase):
pass
class UserCreate(UserBase):
password: str
class Config:
orm_mode = True
class UserEdit(UserBase):
password: t.Optional[str] = None
class Config:
orm_mode = True
class User(UserBase):
id: int
class Config:
orm_mode = True
class CreateErgoAddress(BaseModel):
user_id: int
address: str
is_smart_contract: bool
class ErgoAddress(CreateErgoAddress):
id: int
class Config:
orm_mode = True
| 14.865385
| 46
| 0.667529
| 674
| 0.871928
| 0
| 0
| 0
| 0
| 0
| 0
| 25
| 0.032342
|