blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
281
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
57
| license_type
stringclasses 2
values | repo_name
stringlengths 6
116
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 313
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 18.2k
668M
⌀ | star_events_count
int64 0
102k
| fork_events_count
int64 0
38.2k
| gha_license_id
stringclasses 17
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 107
values | src_encoding
stringclasses 20
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 4
6.02M
| extension
stringclasses 78
values | content
stringlengths 2
6.02M
| authors
listlengths 1
1
| author
stringlengths 0
175
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
259cd5eaaa20071850043e7d7215f3ce6aebb6c9
|
02447b317690827683dc329153e74f1599e4db92
|
/wazimap_ng/general/views.py
|
cf915a8c7c7477aca9df7290d0a6c06d7c62058d
|
[
"Apache-2.0"
] |
permissive
|
neoromantique/wazimap-ng
|
fc8ca6704851db8d9941d3bcb9c06e367c2e1e94
|
c19e9450655f5d404c60e2b4d214715ec8a0b1d9
|
refs/heads/master
| 2021-02-11T00:02:54.001820
| 2020-02-13T20:36:40
| 2020-02-13T20:36:40
| 244,431,358
| 0
| 0
|
Apache-2.0
| 2020-03-02T17:23:51
| 2020-03-02T17:23:50
| null |
UTF-8
|
Python
| false
| false
| 1,482
|
py
|
from rest_framework.response import Response
from rest_framework.decorators import api_view
from ..datasets import models as dataset_models
from ..datasets import views as dataset_views
from ..boundaries import models as boundaries_models
from ..boundaries import views as boundaries_views
from ..utils import cache_decorator
@cache_decorator("consolidated_profile")
def consolidated_profile_helper(profile_id, code):
profile_js = dataset_views.profile_geography_data_helper(profile_id, code)
boundary_js = boundaries_views.geography_item_helper(code)
children_boundary_js = boundaries_views.geography_children_helper(code)
parent_layers = []
parents = profile_js["geography"]["parents"]
children_levels = [p["level"] for p in parents[1:]] + [profile_js["geography"]["level"]]
pairs = zip(parents, children_levels)
for parent, children_level in pairs:
layer = boundaries_views.geography_children_helper(parent["code"])
parent_layers.append(layer[children_level])
return ({
"profile": profile_js,
"boundary": boundary_js,
"children": children_boundary_js,
"parent_layers": parent_layers,
})
@api_view()
def consolidated_profile(request, profile_id, code):
js = consolidated_profile_helper(profile_id, code)
return Response(js)
@api_view()
def consolidated_profile_test(request, profile_id, code):
js = consolidated_profile_helper(profile_id, code)
return Response("test")
|
[
"adi@openup.org.za"
] |
adi@openup.org.za
|
5efc50ceb6731db4235259a51c5915dd70f4ba85
|
1288ef00c7f77aa75c8d3281592dee6024a6115e
|
/Project 5.2 randomwalk GOOD part1.py
|
1bed70a41dd3eba55571f6d790e2af0e1dc3398b
|
[] |
no_license
|
SONGjiaxiu/CS-111
|
32ee85c6fcd28ba2e062e282f6df51bf206f0970
|
b80e7d185745aeb1185300231c43c7a21004b996
|
refs/heads/master
| 2020-04-13T18:56:14.065602
| 2016-11-22T15:22:15
| 2016-11-22T15:22:15
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,088
|
py
|
""" Taylor Heilman
Haley Nugent
Escaape function"""
import math
import turtle
import random
import matplotlib.pyplot as mpl
tortoise=turtle.Turtle()
def distance(x1,x2,y1,y2):
""" funciton to find the distance between the turtle object and the origin
parameters:
x1: x cordinate of first point
x2: x cordinate of second point
y1: y cordinate of first point
y2: y cordinate of second point
return: distance between the two points
"""
d = math.sqrt((x2-x1)**2 + (y2-y1)**2) #distance formula
return (d)
def angle (x, y):
""" function which finds the angle of the turtle object
parameters:
x: x cordinate of object
y1: y cordinate of object
return: angle of object
"""
if x == 0: #avoid dividing by zero
x = 0.001
angle = math.degrees(math.atan(y/x))
if angle < 0:
if y < 0:
angle = angle + 360 #quadrant IV
else:
angle = angle +180 #quadrant II
elif y < 0:
angle = angle + 180 #quadrant III
return angle
def escape(openingDegrees, tortoise, draw):
""" escape function which randomly moves and object until it find the
escape angle
parameters:
openingDegrees: the angle in degrees of the escape area
tortoise: the turtle object
draw: a true or false value, if you want to draw the simulation or not
return: steps taken to escape
"""
x = y = 0 # initialize (x, y) = (0, 0)
radius = 1 # moving in unit radius circle
stepLength = math.pi / 128 # std dev of each step
steps = 0
if draw:
scale = 300 # scale up drawing
setupWalls(tortoise, openingDegrees, scale, radius)
escaped = False # has particle escaped yet?
while not escaped:
preX = x #
preY = y
x=x+random.gauss(0,stepLength) #random movement for x value
y=y+random.gauss(0,stepLength) #random movement for y value
d = distance(0, x, 0, y) # call upon distance function
if d > 1: #if the object is on the perimeter of the circle
ang = angle(x,y)
if ang < 360 and ang > 360-openingDegrees: #the object found the escape area
escaped = True
else: #the object didn't find the escape area, return to previous spot
x = preX
y = preY
steps = steps + 1 #keep track of steps
if draw:
tortoise.goto(x * scale, y * scale) # move particle
if draw:
screen = tortoise.getscreen() # update screen to compensate
screen.update() # for higher tracer value
return steps
def setupWalls(tortoise, openingDegrees, scale, radius):
""" a function to create a circle with the desired radius and eescape angle
parameters:
tortoise: the turtle object
openingDegrees: the escape angle size
scale: the scale of the circle being drawn
radius: radius of the circle being drawn
return: none
"""
screen = tortoise.getscreen()
screen.mode('logo') # east is 0 degrees
screen.tracer(0) # speed up drawing
tortoise.up() # draw boundary with
tortoise.width(0.015 * scale) # shaded background
tortoise.goto(radius * scale, 0)
tortoise.down()
tortoise.pencolor('lightyellow') #color of the area of the circle
tortoise.fillcolor('lightyellow')
tortoise.begin_fill()
tortoise.circle(radius * scale)
tortoise.end_fill()
tortoise.pencolor('black') #color of the perimeter of the circle
tortoise.circle(radius * scale, 360 - openingDegrees)
tortoise.up()
tortoise.home()
tortoise.pencolor('blue') # particle is a blue circle
tortoise.fillcolor('blue')
tortoise.shape('circle')
tortoise.shapesize(0.75, 0.75)
tortoise.width(1) # set up for walk
tortoise.pencolor('green')
tortoise.speed(0)
tortoise.down() # comment this out to hide trail
def rwMonteCarlo(i, trials):
"""A Monte Carlo simulation to find the expected distance that a random walk ends up from the origin.
Parameters:
i: the step to increase the escape angle size
trials: the number of random walks
Return value: the average distance from the origin
"""
totalDistance = 0 #set distance = 0
for trial in range(trials):
openingDegrees = i + t
distance = escape(openingDegrees,tortoise,False) #call upon escape function
totalDistance = totalDistance + distance #keep track of total steps
return openingDegrees, totalDistance / trials #return the average of steps
def Caginalp(openingDegrees):
""" a function which returns the value of the Caginalp function
Parameters:
openingdegrees: size of the escape angle
Return: value of the Caginalp function"""
angle= math.sin((openingDegrees/180*3.14)/4)
#if angle <= 0:
#angle = angle + math.pi
#print(angle)
return 1/2 - 2*math.log(angle)
def plotEscapeSteps(minOpening, maxOpening, steps, trials):
"""Plots the average distances traveled by random walks for certain escape angle sizes
Parameters:
MinOpening: minimum escape angle size
maxOpening: maximum escape angle size
steps: steps to increase from minimum angle
trials: amount of times run for each angle size
Return value: none
"""
averagedistance = [] # initial value of lists
widths = []
z = []
anglelist = []
for i in range(minOpening,maxOpening,steps): #for loop increases
totalDistance = 0
for j in range(trials):
openingDegrees = i + steps/trials * j
distance = escape(openingDegrees,tortoise,False) #stopping the drawing of the simulation
totalDistance = totalDistance + distance #adding up total steps
anglelist.append(openingDegrees)
t = Caginalp(openingDegrees) #calling upon Caginal function
z.append(t)
dist = totalDistance/trials * ((math.pi /128) ** 2)
averagedistance.append(dist)
widths.append(i) #appending desired values
mpl.plot(widths, averagedistance, label = 'Simulation') #plotting graphs of values
mpl.legend(loc = 'center right')
mpl.xlabel('Opening Widths') # labeling x and y axis
mpl.ylabel('Average Number of Steps')
mpl.plot(anglelist, z, label = 'Caginalp')
mpl.show()
def main():
plotEscapeSteps(10,190,10,5000) # main function containing the plot function
main()
|
[
"noreply@github.com"
] |
noreply@github.com
|
5bb104e3ff4c5531bec4dfe4204d4e626ef4dc32
|
e86897d5780bc80c0746f21233b6f93f88f8016a
|
/PythonCode_Practice2/function201~210.py
|
937a45542f6db0ddefa41fa331c55a91446500b8
|
[] |
no_license
|
alstndhffla/PythonCode_Practice
|
8e6e030b75d62e4363a1041087fe73138d9c8392
|
e664802b9a5353cda1d44e929330930be125d785
|
refs/heads/master
| 2023-04-14T00:17:56.830120
| 2021-04-13T04:55:37
| 2021-04-13T04:55:37
| 344,238,777
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 484
|
py
|
# 201
def print_coin():
print("비트코인")
# 202
print_coin()
# 203
"""
100번 호출
"""
print("-----------")
for i in range(1, 101):
print_coin()
# 204
print("-----------")
def print_coins():
# for i in range(1, 101): # 이것도 사용가능
for i in range(100):
print_coin()
print_coins()
# 209
print("-----------")
"""
결과 예측해보셈
"""
def message1():
print("A")
def message2():
print("B")
message1()
message2()
|
[
"alstndhffla@gmail.com"
] |
alstndhffla@gmail.com
|
d5c55ee89e6eaf5651c721d1759f24bff2d67de8
|
d35b4e99e584abb052f6c6ee8e633ff80bbda990
|
/backend/app/app/models/user.py
|
5c028df7d1bab02675ee481f9d38f4a3126ddb52
|
[] |
no_license
|
montionugera/card_game
|
09b0ecda5f7241ff01bd4981807f42c3c11e1592
|
9bd67e3c76258b598a5714e479982255088af700
|
refs/heads/master
| 2023-04-19T06:18:45.006573
| 2021-04-25T03:43:19
| 2021-04-25T03:43:19
| 361,054,128
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 769
|
py
|
from typing import TYPE_CHECKING
from sqlalchemy import Boolean, Column, Integer, String
from sqlalchemy.orm import relationship
from app.db.base_class import Base
if TYPE_CHECKING:
from .item import Item # noqa: F401
class User(Base):
id = Column(Integer, primary_key=True, index=True)
full_name = Column(String, index=True)
email = Column(String, unique=True, index=True, nullable=False)
hashed_password = Column(String, nullable=False)
is_active = Column(Boolean(), default=True)
is_superuser = Column(Boolean(), default=False)
items = relationship("Item", back_populates="owner")
card_games = relationship("CardGame", back_populates="owner")
best_score = relationship("BestScore", back_populates="user", uselist=False)
|
[
"montionugera@gmail.com"
] |
montionugera@gmail.com
|
b69f0a2e498e5ca5ef4367126e3697b6e381c9fb
|
564546e079bef8f6610a3c4cdff30fb2cea802bd
|
/scripts/delete_posts.py
|
233f076cab9cb6de63cc11d0c72552efe5a6df91
|
[] |
no_license
|
santhi-nyros/Tastypie-django
|
399dd55019b75b4e55a1ffbc9b6a34bd13d548aa
|
55d48b5f14c248a4be07363d49ec157526d12994
|
refs/heads/master
| 2021-01-13T03:08:34.939942
| 2016-12-27T09:20:25
| 2016-12-27T09:20:25
| 77,442,604
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 317
|
py
|
# scripts/delete_all_questions.py
from datetime import timedelta
from django.utils import timezone
from api.models import Comment,Post
def run(*args):
print list(args)
if list(args):
comments = Comment.objects.filter(id__in=list(args))
print comments
comments.delete()
else:
print "empty list given."
|
[
"santhi_nyros@yahoo.com"
] |
santhi_nyros@yahoo.com
|
073ead4492da0cd5742377f402f6dddb6d8260bc
|
4eb5aeef510f827e864fe7beaf2ee52647663872
|
/day14/删除文件.py
|
9ce80500d2170d9034934fb1151d352797eaef57
|
[] |
no_license
|
Sunshine-Queen/Test
|
5d7127cc356dc7b52a2e249ec0c609d601ef6951
|
d36186d04de7076675f682f993b75f0797d64f86
|
refs/heads/master
| 2022-12-21T21:04:54.773926
| 2019-04-10T12:05:09
| 2019-04-10T12:05:09
| 157,209,473
| 0
| 1
| null | 2022-12-17T21:45:53
| 2018-11-12T12:22:04
|
Python
|
UTF-8
|
Python
| false
| false
| 42
|
py
|
import os
os.remove("heihei[复件].txt")
|
[
"1053710934@qq.com"
] |
1053710934@qq.com
|
2b66f779ad34d216561b67a4a62e5d69750079e3
|
869d917ef14fb8e4bb899a192903dd1f64028d2b
|
/train/train_street_view_regression.py
|
faa00a2a48b49ee081de9cbc395048edd88abcef
|
[] |
no_license
|
andreiqv/rotnet_not_my
|
bbd7fadba9c2e000d324e931d4fddc95ad8e4e25
|
ce0ea3f80aba263ae5fc54549c5d3d571d02ef59
|
refs/heads/master
| 2020-04-26T11:00:21.724905
| 2019-03-02T22:50:31
| 2019-03-02T22:50:31
| 173,502,216
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,469
|
py
|
from __future__ import print_function
import os
import sys
from keras.callbacks import ModelCheckpoint, EarlyStopping, TensorBoard
from keras.applications.resnet50 import ResNet50
from keras.applications.imagenet_utils import preprocess_input
from keras.models import Model
from keras.layers import Dense, Flatten
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from utils import angle_error_regression, RotNetDataGenerator
from data.street_view import get_filenames as get_street_view_filenames
data_path = os.path.join('data', 'street_view')
train_filenames, test_filenames = get_street_view_filenames(data_path)
print(len(train_filenames), 'train samples')
print(len(test_filenames), 'test samples')
model_name = 'rotnet_street_view_resnet50_regression'
# input image shape
input_shape = (224, 224, 3)
# load base model
base_model = ResNet50(weights='imagenet', include_top=False,
input_shape=input_shape)
# append classification layer
x = base_model.output
x = Flatten()(x)
final_output = Dense(1, activation='sigmoid', name='fc1')(x)
# create the new model
model = Model(inputs=base_model.input, outputs=final_output)
model.summary()
# model compilation
model.compile(loss=angle_error_regression,
optimizer='adam')
# training parameters
batch_size = 16 # was 64
nb_epoch = 50
output_folder = 'models'
if not os.path.exists(output_folder):
os.makedirs(output_folder)
# callbacks
checkpointer = ModelCheckpoint(
filepath=os.path.join(output_folder, model_name + '.hdf5'),
save_best_only=True
)
early_stopping = EarlyStopping(patience=0)
tensorboard = TensorBoard()
# training loop
model.fit_generator(
RotNetDataGenerator(
train_filenames,
input_shape=input_shape,
batch_size=batch_size,
one_hot=False,
preprocess_func=preprocess_input,
crop_center=True,
crop_largest_rect=True,
shuffle=True
),
steps_per_epoch=len(train_filenames) / batch_size,
epochs=nb_epoch,
validation_data=RotNetDataGenerator(
test_filenames,
input_shape=input_shape,
batch_size=batch_size,
one_hot=False,
preprocess_func=preprocess_input,
crop_center=True,
crop_largest_rect=True
),
validation_steps=len(test_filenames) / batch_size,
callbacks=[checkpointer, early_stopping, tensorboard],
nb_worker=10,
pickle_safe=True,
verbose=1
)
|
[
"phxv@mail.ru"
] |
phxv@mail.ru
|
848896a09b323f63b826a5d7d1a101d443ee8e97
|
eba3c6eb8d5ae18f3bcbb39df9b391a3d4a40ebd
|
/src/pod.py
|
0c5a188691b12a8dcdfcdd05b25d3cc6f487bd37
|
[] |
no_license
|
aleefrank/VPP-Simulator
|
294381d5dafe6316dc21180683110081bfbb91ca
|
44f2663c905d6c6ecdd098d05a74a9d66fb31650
|
refs/heads/main
| 2023-03-23T15:50:31.444004
| 2021-03-13T11:11:19
| 2021-03-13T11:11:19
| 347,332,585
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 13,558
|
py
|
from src.enums import ModelResolveMethod
from src.load import LoadT3, LoadT2
from src.profile import Profile
from src.pv import PV
from src.wind import Wind
from src.pod_solver import Solver
from src.storage import SimpleStorage
import matplotlib.pyplot as plt
import seaborn as sns
import numpy as np
sns.set()
class Pod(object):
def __init__(self, init_list=None, profiles=None, name=None):
self.name = name
self.profiles = []
self.solver = None
self.resolve_method = None
self.to_optimize = False
if init_list is not None:
if isinstance(init_list, list):
self.set_profiles(init_list)
else:
self.set_profiles([init_list])
def set_name(self, name):
self.name = name
def get_composition(self):
lst = []
tmp = []
res = '['
for p in self.profiles:
lst.append(p.profile_type.value) # if p.profile_type.value not in lst else lst
for p in self.profiles:
if p.profile_type.value not in tmp:
tmp.append(p.profile_type.value)
res += ' ' + str(lst.count(p.profile_type.value)) + ' ' + p.profile_type.value + ','
res = res[:len(res) - 1]
res += ' ]'
return res
def add_profile(self, profile):
if isinstance(profile, Profile):
# Overwrite Storage profile if already exist
if isinstance(profile, SimpleStorage):
self.profiles = [p for p in self.profiles if not isinstance(p, SimpleStorage)]
self.profiles.append(profile)
if not self.to_optimize:
if len(self.profiles) > 1:
self.to_optimize = True
else:
raise Exception('{} passed argument is not of type Profile'.format(profile))
def set_profiles(self, profiles: [Profile]):
try:
for p in profiles:
self.add_profile(p)
if not self.to_optimize:
if len(self.profiles) > 1:
self.to_optimize = True
except:
raise Exception('Profiles is not iterable')
def resolve(self, model_resolve_method=ModelResolveMethod.MINIMIZE_AND_MAXIMIZE, print_results=False,
print_graphs=False, tee=False, pprint=False, per_allegra=False):
if not self.to_optimize:
return self
self.resolve_method = model_resolve_method
self.solver = None
self.solver = Solver(self.profiles)
self.solver.resolve(model_resolve_method, print_results, tee, pprint)
self.__fix_flexibility_bounds()
#self.__fix_baseline()
if print_graphs:
if model_resolve_method == ModelResolveMethod.MINIMIZE:
self.print_graph('minimized')
elif model_resolve_method == ModelResolveMethod.MAXIMIZE:
self.print_graph('maximized')
elif model_resolve_method == ModelResolveMethod.MINIMIZE_AND_MAXIMIZE:
self.print_graph('minimized')
self.print_graph('maximized')
self.print_graph_2()
if per_allegra:
self.graphs_for_allegra()
return self
# Minimized Flexibility value cannot exceed Maximized Flexibility value
def __fix_flexibility_bounds(self):
for i in range(0, len(self.solver.results['maximized']['grid'])):
self.solver.results['maximized']['grid'] = [x if x > y else y for x, y in
zip(self.solver.results['minimized']['grid'],
self.solver.results['maximized']['grid'])]
# Baseline value cannot exceed Flexibility bounds (misscalculated because SimpleStorage has no baseline)
def __fix_baseline(self):
print(self.solver.results['baseline'])
self.solver.results['baseline'] = [i if b > i else b for i, b in zip(self.solver.results['maximized']['grid'],
self.solver.results['baseline'])]
self.solver.results['baseline'] = [i if b < i else b for i, b in zip(self.solver.results['minimized']['grid'],
self.solver.results['baseline'])]
def needs_local_optimization(self):
return self.to_optimize
def get_flexibility(self, method):
if not self.needs_local_optimization():
p = self.profiles[0]
return p.get_flexibility(method)
else:
if self.solver is None:
print('This Pod has to be optimized. Run \'resolve()\' to get the flexibility.')
else:
return self.solver.results[method]['grid']
def get_cost(self, method):
if not self.needs_local_optimization():
p = self.profiles[0]
return [0] * 96, # p.get_costs(method) TODO
else:
if self.solver is None:
print('Before getting the costs you have to call \'resolve()\' and resolve the optimization.')
else:
return self.solver.results[method]['cost']
def get_opt_time(self, method):
if not self.needs_local_optimization():
return '-' # no time
else:
if self.solver is None:
print('Before getting the costs you have to call \'resolve()\' and resolve the optimization.')
else:
return self.solver.results[method]['time']
def get_costs_old(self):
if self.solver is None:
print('Before getting the costs you have to call \'resolve()\' and resolve the optimization.')
else:
if self.resolve_method == ModelResolveMethod.MINIMIZE:
return self.solver.results[self.resolve_method]['cost']
elif self.resolve_method == ModelResolveMethod.MAXIMIZE:
return self.solver.results[self.resolve_method]['cost']
elif self.resolve_method == ModelResolveMethod.MINIMIZE_AND_MAXIMIZE:
return [self.solver.results['minimized']['cost'], self.solver.results['maximized']['cost']]
def print_graph(self, method):
fig, ax = plt.subplots(figsize=(10, 7))
ax.set(xlabel='Timestamp (t)', ylabel='Active Power (W)',
title='Model Results - {} - {}'.format(method, self.solver.results[method]['solution_value']))
plt.xticks(range(0, 96, 5))
if self.solver.data['n_load_t1'] > 0:
ax.plot(sum(l.profile for l in [p for p in self.solver.l1_array]), label='L1-Total')
if self.solver.data['n_load_t2'] > 0:
ax.plot(sum(l.profile for l in [p for p in self.solver.l2_array]), label='L2-Total')
ax.plot(sum(l.profile for l in [p for p in self.solver.l2_array]) + np.array(
self.solver.results[method]['load_t2_shift']).sum(axis=0), label='L2-Shifted')
if self.solver.data['n_load_t3'] > 0:
ax.plot(sum(l.profile for l in [p for p in self.solver.l3_array]), label='L3-Total')
ax.plot(sum(l.profile for l in [p for p in self.solver.l3_array]) + np.array(
self.solver.results[method]['load_t3_shift']).sum(axis=0), label='L3-Shifted')
if self.solver.data['n_chp'] > 0:
ax.plot(sum(chp.profile for chp in [p for p in self.solver.chp_array]), label='CHP-Total')
ax.plot(sum(chp.profile for chp in [p for p in self.solver.chp_array]) + np.array(
self.solver.results[method]['chp_shift']).sum(axis=0), label='CHP-Shifted')
if self.solver.data['n_pv'] > 0:
ax.plot(sum(l.profile for l in [p for p in self.solver.pv_array]), label='PV-Total')
ax.plot(sum(l.profile for l in [p for p in self.solver.pv_array]) + np.array(
self.solver.results[method]['pv_shift']).sum(axis=0), label='PV-Shifted')
if self.solver.data['n_wind'] > 0:
ax.plot(sum(l.profile for l in [p for p in self.solver.wind_array]), label='WIND-Total')
ax.plot(sum(l.profile for l in [p for p in self.solver.wind_array]) + np.array(
self.solver.results[method]['wind_shift']).sum(axis=0), label='WIND-Shifted')
if self.solver.data['storage'] > 0:
ax.plot(self.solver.results[method]['storage_charge'], label='Storage')
# ax.plot(self.sum_shifted(method), label='Shifted')
ax.plot(self.sum_baseline(), label='Total Baseline')
ax.plot(self.solver.results[method]['grid'], label='Grid')
plt.legend(bbox_to_anchor=(1, 1), loc=1, borderaxespad=0.3)
plt.show()
def print_graph_2(self):
fig, ax = plt.subplots(figsize=(10, 7))
ax.set(xlabel='Timestamp (t)', ylabel='Active Power (W)', title='Model Grid Results')
plt.xticks(range(0, 96, 5))
ax.plot(self.solver.results['minimized']['grid'], label='Grid (minimized)')
ax.plot(self.solver.results['baseline'], label='Total Baseline')
ax.plot(self.solver.results['maximized']['grid'], label='Grid (maximized)')
plt.legend(bbox_to_anchor=(1, 1), loc=1, borderaxespad=0.3)
plt.show()
def graphs_for_allegra(self):
for index, p in enumerate(self.profiles):
fig, ax = plt.subplots(figsize=(10, 7))
plt.xticks(range(0, 96, 5))
if isinstance(p, PV):
ax.set(xlabel='Timestamp (t)', ylabel='Active Power (W)', title='PV Optimized')
ax.plot(p.profile, label=str(str(p.profile_type) + str(index)))
ax.plot(p.profile + self.solver.results['minimized']['pv_shift'][index], label='minimized')
ax.plot(p.profile + self.solver.results['maximized']['pv_shift'][index], label='maximized')
if isinstance(p, Wind):
ax.set(xlabel='Timestamp (t)', ylabel='Active Power (W)', title='Wind Optimized')
ax.plot(p.profile, label=str(str(p.profile_type) + str(index)))
ax.plot(p.profile + self.solver.results['minimized']['wind_shift'][0], label='minimized')
ax.plot(p.profile + self.solver.results['maximized']['wind_shift'][0], label='maximized')
if isinstance(p, SimpleStorage):
ax.set(xlabel='Timestamp (t)', ylabel='Active Power (W)', title='Storage Optimized')
ax.plot(self.solver.results['minimized']['storage_charge'], label='minimized')
ax.plot(self.solver.results['maximized']['storage_charge'], label='maximized')
if isinstance(p, LoadT2):
ax.set(xlabel='Timestamp (t)', ylabel='Active Power (W)', title='L2 Optimized')
ax.plot(p.profile, label=str(str(p.profile_type) + str(index)))
ax.plot(p.profile + self.solver.results['minimized']['load_t2_shift'][0], label='minimized')
ax.plot(p.profile + self.solver.results['maximized']['load_t2_shift'][0], label='maximized')
if isinstance(p, LoadT3):
ax.set(xlabel='Timestamp (t)', ylabel='Active Power (W)', title='L3 Optimized')
ax.plot(p.profile, label=str(str(p.profile_type) + str(index)))
ax.plot(p.profile + self.solver.results['minimized']['load_t3_shift'][0], label='minimized')
ax.plot(p.profile + self.solver.results['maximized']['load_t3_shift'][0], label='maximized')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.show()
def sum_shifted(self, method):
self.solver.results[method]['solution_value']
shifted_list = []
if self.solver.data['n_load_t1'] > 0:
shifted_list.append(
sum(l.profile for l in [p for p in self.solver.l1_array])) # L1-Total - because l1 not shiftable
if self.solver.data['n_load_t2'] > 0:
shifted_list.append(sum(l.profile for l in [p for p in self.solver.l2_array]) + np.array(
self.solver.results[method]['load_t2_shift']).sum(axis=0)) # L2-Shifted
if self.solver.data['n_load_t3'] > 0:
shifted_list.append(sum(l.profile for l in [p for p in self.solver.l3_array]) + np.array(
self.solver.results[method]['load_t3_shift']).sum(axis=0)) # L3-Shifted
if self.solver.data['n_chp'] > 0:
shifted_list.append(sum(chp.profile for chp in [p for p in self.solver.chp_array]) + np.array(
self.solver.results[method]['chp_shift']).sum(axis=0)) # CHP-Shifted
if self.solver.data['n_pv'] > 0:
shifted_list.append(sum(l.profile for l in [p for p in self.solver.pv_array]) - np.array(
self.solver.results[method]['pv_shift']).sum(axis=0)) # PV-Shifted
if self.solver.data['n_wind'] > 0:
shifted_list.append(sum(l.profile for l in [p for p in self.solver.wind_array]) - np.array(
self.solver.results[method]['wind_shift']).sum(axis=0)) # WIND-Shifted
# if self.solver.data['storage'] > 0:
# shifted_list.append([-1*x for x in self.solver.results[method]['storage_charge']]) # Storage
shifted_total = [sum(x) for x in zip(*shifted_list)]
return shifted_total
def sum_baseline(self):
baseline_list = []
[baseline_list.append(p.profile) for p in self.profiles]
baseline_total = [sum(x) for x in zip(*baseline_list)]
return baseline_total
|
[
"alessandro.f95@live.it"
] |
alessandro.f95@live.it
|
1a8c0082d7e1b3c37b764106167cf493af3e10c5
|
9188138de9e8b4e46750476de0e72472087ce4bf
|
/Student/mysql.py
|
c5493b626daea2b8bff8fe5082afa981ce18aecd
|
[] |
no_license
|
Rjsetter/LearningPython
|
91493a7b0faf02301d54ba0fb73a812611c35aa7
|
74c672e270f2280699aacd480cd13f0ebd60a40a
|
refs/heads/master
| 2020-03-23T09:24:21.913141
| 2018-12-17T10:20:31
| 2018-12-17T10:20:31
| 141,385,712
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,978
|
py
|
#!/usr/bin/python
#_*_ encoding:utf-8_*_
"""
Datetime:2018-7-28 20:14
Author:叶强
Version:基于mysql的学生管理系统 v1.0
"""
import pymysql, time
import datetime
def connect_db():
return pymysql.connect(host="127.0.0.1",
port = 3306,
user = "root",
password = "mysql",
database = "Python"
)
def insert(sql):
"""插入操作"""
con = connect_db()
cursor = con.cursor()
try:
cursor.execute(sql)
con.commit()
except:
con.rollback()
# logging.exception("Insert operation error")
raise
finally:
cursor.close()
con.close()
def insert2student():
sid = input("请输入学生的学号:")
sname = input("请输入学生的姓名:")
sage = input("请输入学生的年龄:")
sgender = input("请输入学生的性别:")
sphone = input("请输入学生的电话:")
sql = """insert into student_(sid,sname,sage,sgender,sphone)values
('%s','%s','%d','%s','%d')"""%(sid, sname, int(sage),sgender,int(sphone))
confirm = input("确认添加吗?(yes/no):")
if confirm == "yes":
insert(sql)
def search(sql):
"""多出复用"""
"""接收表名,获取表的所有信息,返回含有表内所有信息的元组对象"""
con = connect_db()
cur = con.cursor()
try:
cur.execute(sql)
#查询时获取结果集中的所有行,一行构成一个元组,然后再将这些元组返回(即嵌套元组)
content = cur.fetchall()
except:
#logging.exception("Search operation error")
raise
finally:
cur.close()
con.close()
return content
def del_info(sql):
"""删除信息"""
con = connect_db()
cur = con.cursor()
try:
cur.execute(sql)
con.commit()
print("delete success")
except:
#logging.exception("Search operation error")
print("delete error")
raise
finally:
cur.close()
con.close()
def update(sql_update):
"""更新学生信息"""
con = connect_db()
cur = con.cursor()
try:
cur.execute(sql_update)
con.commit()
print("update success")
except:
#logging.exception("Search operation error")
print("update failed")
raise
finally:
cur.close()
con.close()
def show_student(sql):
#返回的是一个嵌套元组
Tuple = search(sql)
if Tuple:
print("+++++++++++++++++++++++++++++++++++++++++++++++++")
print("| 学号 | 姓名 | 年龄| 性别 | 电话 |")
print("+++++++++++++++++++++++++++++++++++++++++++++++++")
for tuple_ in Tuple:
# print(tuple_)
print("|{0:^11}| {1:\u3000^ 4}| {2:^4}| {3:^4}| {4:^11}|".format(tuple_[1],tuple_[2],tuple_[3],tuple_[4],tuple_[5]))
print("+++++++++++++++++++++++++++++++++++++++++++++++++")
else:
print("目前学生库里没有学生")
def show_all_student():
"""打印所有学生信息"""
sql = "select * from student_"
show_student(sql)
def search_student():
"""按您选择的方式查询学生"""
flag = True
while flag:
print("-"*20)
print("您想如何查询?")
print("1.学号")
print("2.姓名")
print("3.打印所有学生信息")
print("4.退出")
print("*****功能待添加*****")
print("-"*40)
select = input("请输入想查询的方式:")
if select == '1':
num = input("请输入要查询学号:")
sql = "select * from student_ where sid = %s"%num
show_student(sql)
elif select == '2':
name = str(input("请输入要查询名字:"))
sql = "select * from student_ where sname = '"+name+"'"
show_student(sql)
elif select == '3':
show_all_student()
elif select == '4':
print("查询结束,欢迎再次使用!")
flag = False
else:
print("请输入正确的选择!")
# 加入这个为了用户见面好看一点,一个页面结束要回车才会进入下一个页面
print("操作结束,回车进入下一页面")
fl = input()
if fl == '\n':
pass
def manage_student():
"""管理学生 增、删、改"""
flag = True
while flag:
print("+"*20)
print("学生管理界面")
print("1.增加学生")
print("2.删除学生")
print("3.修改学生")
print("4.退出学生管理界面")
print("+"*20)
select = input("请输入你的选择:")
if select == '1':
insert2student()
elif select == '2':
show_all_student()
sid_ = input("请输入要删除的学生的学号:")
sql_ = 'DELETE FROM student_ WHERE sid = %s'%sid_
del_info(sql_)
elif select == '3':
show_all_student()
stu_id = input("请输入你想要修改的学生的学号:")
sid = input("请输入学生的学号:")
sname = input("请输入学生的姓名:")
sage = input("请输入学生的年龄:")
sgender = input("请输入学生的性别:")
sphone = input("请输入学生的电话:")
sql_update = "update student_ set sid = %s,sname = '"%(sid)+sname+"'"+",sage = %d, sgender = '"%(int(sage))+sgender+"'"+", sphone = %d where sid = %s"%(int(sphone),stu_id)
update(sql_update)
elif select == '4':
print("管理结束,退出管理系统")
flag = False
else:
print("请输入正确的选择!")
print("+"*25)
print("操作结束,回车进入下一页面")
print("+"*25)
fl = input()
if fl == '\n':
pass
def menu():
"""用户可视窗口,即主菜单"""
datetime_dt = datetime.datetime.today() # 获取当前日期和时间
datetime_str = datetime_dt.strftime("%Y-%m-%d %H:%M:%S") # 格式化日期时间
print("+++++++++++++++++++++++++++++++++++++++++++++++")
print("+ 学生管理系统2.0(mysql版本) +")
print("+++++++++++++++++++++++++++++++++++++++++++++++")
print("+\t 1.管理学生 +")
print("+\t 2.查看学生 +")
print("+\t 3.学生选课 +")
print("+\t 4.学生选课情况 +")
print("+\t 5.查改课程 +")
print("+\t 6.打分程序 +")
print("+\t 7.查分 +")
print("+\t 8.退出系统 +")
print("+++++++++++++++++++++++++++++++++++++++++++++++")
print("+{0: ^45}+".format(datetime_str))
print("+++++++++++++++++++++++++++++++++++++++++++++++")
if __name__ == '__main__':
Flag = True
while Flag:
menu()
select = input("您想要进行什么操作?")
print("------------------------------")
if select == '1':
manage_student()
elif select == '2':
search_student()
# addCourse()
elif select == '3':
pass
elif select == '4':
"""打印学生选课信息"""
pass
# showStu_Course()
elif select == '5':
pass
# checkCourse()
# dealCourse()
elif select == '6':
"""给学生的课程打分"""
pass
# makeScore()
elif select == '7':
pass
# showScore()
elif select == '8':
Flag = False
print("谢谢您使用我们的学生系统!欢迎再次使用~")
else:
print("请输入正确的选择!")
|
[
"1039339929@qq.com"
] |
1039339929@qq.com
|
794f3abbe9fca363cc78d142151697bd2f6917bf
|
9bf5d10a4644ebb4b788233cae2e82ad9515d433
|
/Bank/settings.py
|
c7b4ff3d587217be41a204660a47977d685de06f
|
[] |
no_license
|
Mohitkashyap123/Bank
|
2ea3d808406d1fa06a7a0e9e2a005d45dc53810f
|
8f6d3b509a6945ba7eaecedc0fe5287bc769c8f6
|
refs/heads/master
| 2022-12-17T03:39:29.581970
| 2020-09-26T07:04:53
| 2020-09-26T07:04:53
| 276,882,798
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,121
|
py
|
"""
Django settings for Bank project.
Generated by 'django-admin startproject' using Django 3.0.6.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'x^1e=5%0gik&6=5c!9a$%7t^dztyz-p0^n+*&qv7!b30h+20w!'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'info.apps.InfoConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'Bank.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Bank.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
|
[
"mohitkashyap656@gmail.com"
] |
mohitkashyap656@gmail.com
|
fe0a66dc75df9851dd1126510d15ca89dad1ec91
|
bcb4da89ab0513c095158ffc7a9a1b6a6bdfd435
|
/app/routes.py
|
5daa74e8a7400c72eb6c7749c3d639d877b51db4
|
[] |
no_license
|
marino2009/Flask
|
bec55230e0cba0c289b8252e2e92e528b5b9fdee
|
b94b1e7107e32124b21ce4edb3891332927d42eb
|
refs/heads/master
| 2020-04-18T13:01:28.987967
| 2019-01-30T01:48:36
| 2019-01-30T01:48:36
| 167,551,318
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,947
|
py
|
from flask import render_template, flash, redirect, url_for, request
from app import app, db
from app.forms import LoginForm, RegistrationForm, EditProfileForm
from flask_login import current_user, login_user, logout_user, login_required
from app.models import User
from werkzeug.urls import url_parse
from datetime import datetime
@app.route('/')
@app.route('/index')
@login_required
def index():
user = {'username': 'Marino'}
posts = [
{
'author': {'username': 'John'},
'body': 'Beautiful day in Portland435465!'
},
{
'author': {'username': 'Susan'},
'body': 'The Avengers movie was so cool!'
},
]
return render_template('index.html', title='Home', user=user, posts=posts)
@app.route('/login', methods=['GET', 'POST'])
def login():
if current_user.is_authenticated:
return redirect(url_for('index'))
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(username=form.username.data).first()
if user is None or not user.check_password(form.password.data):
flash('Invalid username or password')
return redirect(url_for('login'))
login_user(user, remember=form.remember_me.data)
next_page = request.args.get('next')
if not next_page or url_parse(next_page).netloc != '':
next_page = url_for('index')
return redirect(next_page)
return render_template('login.html', title='Sign In', form=form)
@app.route('/logout')
def logout():
logout_user()
return redirect(url_for('index'))
@app.route('/register', methods=['GET', 'POST'])
def register():
if current_user.is_authenticated:
return redirect(url_for('index'))
form = RegistrationForm()
if form.validate_on_submit():
user = User(username=form.username.data, email=form.email.data)
user.set_password(form.password.data)
db.session.add(user)
db.session.commit()
flash('Congratulations, you are now a registered user!')
return redirect(url_for('login'))
return render_template('register.html', title='Register', form=form)
@app.route('/user/<username>')
@login_required
def user(username):
user = User.query.filter_by(username=username).first_or_404()
posts = [
{'author': user, 'body': 'Test post #1'},
{'author': user, 'body': 'Test post #2'}
]
return render_template('user.html', user=user, posts=posts)
@app.before_request
def before_request():
if current_user.is_authenticated:
current_user.last_seen = datetime.utcnow()
db.session.commit()
@app.route('/edit_profile', methods=['GET', 'POST'])
@login_required
def edit_profile():
form = EditProfileForm()
if form.validate_on_submit():
current_user.username = form.username.data
current_user.about_me = form.about_me.data
db.session.commit()
flash('Your changes have been saved.')
return redirect(url_for('edit_profile'))
elif request.method == 'GET':
form.username.data = current_user.username
form.about_me.data = current_user.about_me
return render_template('edit_profile.html', title='Edit Profile', form=form)
|
[
"45363000+marino2009@users.noreply.github.com"
] |
45363000+marino2009@users.noreply.github.com
|
87c7524501017490341a86012b5d7364f04aacde
|
15f321878face2af9317363c5f6de1e5ddd9b749
|
/solutions_python/Problem_54/78.py
|
1e0afea1344679e1079ae74d8bb54a891e5ad167
|
[] |
no_license
|
dr-dos-ok/Code_Jam_Webscraper
|
c06fd59870842664cd79c41eb460a09553e1c80a
|
26a35bf114a3aa30fc4c677ef069d95f41665cc0
|
refs/heads/master
| 2020-04-06T08:17:40.938460
| 2018-10-14T10:12:47
| 2018-10-14T10:12:47
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 956
|
py
|
def gcd(a,b):
while (b != 0):
c = a%b
a = b
b = c
return a
def get_gcd(line):
g = line[0]
cnt = len(line)
for i in range(1,cnt):
g = gcd(g,line[i])
return g
def solve(line):
N = int(line.pop(0))
for i in range(0,N):
line[i] = int(line[i])
line.sort()
diffs = list()
for i in range(0,N-1):
diff = line[i+1] - line[i]
diffs.append(diff)
g = pg = get_gcd(diffs)
if g < line[0]:
g = line[0] / pg * pg
if line[0] % pg != 0:
g += pg
ans = g - line[0]
return ans
AnsT = ""
myfile = open("B.in")
T = int(myfile.readline())
for i in range(0,T):
line = myfile.readline()
line = line.split("\n")
print i
ans = solve(line[0].split(" "))
AnsT = AnsT + "Case #"+ str(i+1) +": "+str(ans) + "\n"
outfile = open("B.out","w")
outfile.write(AnsT)
outfile.close()
|
[
"miliar1732@gmail.com"
] |
miliar1732@gmail.com
|
fdb8fc4c86a750baa500c7ee03cbb74671b28f35
|
ebd5c4632bb5f85c9e3311fd70f6f1bf92fae53f
|
/Sourcem8/pirates/minigame/RepairGlobals.py
|
8027e62d79153e2436b77a14e3c56012b7f68cec
|
[] |
no_license
|
BrandonAlex/Pirates-Online-Retribution
|
7f881a64ec74e595aaf62e78a39375d2d51f4d2e
|
980b7448f798e255eecfb6bd2ebb67b299b27dd7
|
refs/heads/master
| 2020-04-02T14:22:28.626453
| 2018-10-24T15:33:17
| 2018-10-24T15:33:17
| 154,521,816
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,950
|
py
|
from pandac.PandaModules import Vec3, Vec4, Point3
class VariableContainer:
def __init__(self):
pass
AI = VariableContainer()
AI.goldRewardRange = (15, 35)
AI.goldRewardMultiplier = [
(14.0, 3.0),
(18.0, 2.5),
(24.0, 2.0),
(36.0, 1.6000000000000001),
(52.0, 1.3),
(72.0, 1.1499999999999999)]
AI.repairRewardRange = (5000, 1000)
AI.grapeshotEffectCooldown = 2.0
AI.grapeshotEffectProbability = 0.5
AI.kickedTimestampLife = 60.0 * 60.0
AI.inactiveClientKickTime = 60.0 * 2.0 + 2.0
AI.numTimesKickedBeforeBlacklisted = 3
AI.maxPlayersPerBench = 5
AI.baseRepairAmount = 0.5
AI.maxRepairCount = 30
AI.reductionAtFullRepair = 0.5
AI.maxCombatCount = 20
AI.reductionAtFullCombat = 0.5
AI.critGrapeshotCombatDebuff = 3
AI.grapeshotCombatDebuff = 3
AI.regularCombatDebuff = 1
AI.totalDifficulty = AI.maxRepairCount + AI.maxCombatCount
AI.difficultyIncreasePoint = AI.totalDifficulty / 10.0
AI.repairDebuffPerModelClass = {
1: 1.0,
2: 1.0,
3: 1.0,
11: 1.0,
12: 1.0,
13: 1.0,
21: 1.0,
22: 1.0,
23: 1.0,
24: 1.0,
25: 1.0,
26: 1.0,
27: 1.0 }
AI.sailRepairPercent = 0.14999999999999999
AI.armorRepairPercent = 0.14999999999999999
AI.hpRepairPercent = 0.40000000000000002
AI.hpTertiaryDecay = 0.0
Common = VariableContainer()
Common.guiShakeCooldownTime = 2.0
Common.youWinPos = {
'careening': (-0.12, 0.0, 0.22),
'pumping': (0.0, 0.0, 0.14999999999999999),
'sawing': (0.0, 0.0, 0.14999999999999999),
'bracing': (0.0, 0.0, 0.22),
'hammering': (0.0, 0.0, 0.38),
'pitching': (0.0, 0.0, 0.22) }
Common.scorePos = {
'careening': (-0.12, 0.0, 0.089999999999999997),
'pumping': (0.0, 0.0, 0.02),
'sawing': (0.0, 0.0, 0.02),
'bracing': (0.0, 0.0, 0.089999999999999997),
'hammering': (0.0, 0.0, 0.25),
'pitching': (0.0, 0.0, 0.089999999999999997) }
Common.speedThresholds = {
'careening': [
(5.0, 15.0),
(10.0, 30.0),
(20.0, 90.0)],
'pumping': [
(10.0, 13.0),
(20.0, 40.0),
(40.0, 90.0)],
'sawing': [
(6.0, 9.0),
(12.0, 18.0),
(30.0, 45.0)],
'bracing': [
(5.0, 15.0),
(30.0, 45.0),
(90.0, 180.0)],
'hammering': [
(5.0, 10.0),
(10.0, 20.0),
(20.0, 40.0)],
'pitching': [
(8.0, 16.0),
(16.0, 32.0),
(32.0, 64.0)] }
Careening = VariableContainer()
Careening.barnacleCountRange = (15, 30)
Careening.superScrubMultiplier = 4.0
Careening.superScrubDecreaseRate = 0.40000000000000002
Careening.superScrubIncreaseRate = 0.80000000000000004
Careening.barnacleHPRange = (30, 70)
Careening.barnacleHPScaleRange = (1.0, 3.0)
Careening.xRange = (-0.61499999999999999, 0.375)
Careening.yRange = (-0.16500000000000001, 0.51500000000000001)
Careening.barnacleRadius = 0.040000000000000001
Careening.mossPercentage = 0.75
Careening.mossPosVariance = 0.01
Careening.mossEdgeRestrictionAmount = 0.10000000000000001
Careening.showBarnacleHP = False
Pumping = VariableContainer()
Pumping.pumpPowerRange = (0.059999999999999998, 0.02)
Pumping.hitRange = (0.17999999999999999, 0.17999999999999999)
Pumping.barStartRange = (1.2, 1.0)
Pumping.barSpeedMin = 2.0
Pumping.barSpeedMax = 0.29999999999999999
Pumping.barSpeedIncrease = 1.25
Pumping.barSpeedDecrease = 0.80000000000000004
Pumping.chainMultiplier = 0.080000000000000002
Sawing = VariableContainer()
Sawing.difficultySets = ((3, 3, 1, 1), (3, 1, 1, 2), (1, 2, 1, 2), (3, 1, 2, 2), (2, 2, 1, 2), (3, 2, 1, 4), (2, 4, 3, 2), (4, 2, 1, 2), (4, 1, 1, 5), (2, 2, 4, 5))
Sawing.waypointRange = (0.080000000000000002, 0.080000000000000002, 0.080000000000000002, 0.11, 0.10000000000000001)
Sawing.sawlineColor = Vec4(0.75, 0.75, 0.75, 0.69999999999999996)
Sawing.sawlineLineThickness = 4.0
Sawing.sawlineLinespawnDist = 0.02
Sawing.testWaypointDelta = 0.040000000000000001
Sawing.playSawingSoundDelta = 0.10000000000000001
Sawing.totalPoints = 20.0
Sawing.pointsPerBoard = 7.0
Sawing.pointsLostForZone1 = 4.0
Sawing.pointsLostForZone2 = 1.0
Sawing.cutColor = (0.29999999999999999, 0.29999999999999999, 0.29999999999999999, 1.0)
Sawing.zone1Color = (0.75, 0.75, 0.75, 1.0)
Sawing.zone2Color = (0.75, 0.75, 0.75, 1.0)
Sawing.sawTurnSpeed = 1000
Sawing.newBoardAnimTime = 0.25
Sawing.splitBoardAnimTime = 0.5
Sawing.activeBoardPosition = (0.0, 0.0, 0.10000000000000001)
Sawing.boardYDist = 1.3
from RepairGridPiece import GOAL_HORIZ_1, GOAL_HORIZ_2, GOAL_VERT_1
Bracing = VariableContainer()
Bracing.difficultyLevels = ((8, (GOAL_HORIZ_1,)), (7, (GOAL_HORIZ_1,)), (6, (GOAL_HORIZ_1,)), (7, (GOAL_HORIZ_1, GOAL_VERT_1)), (6, (GOAL_HORIZ_1, GOAL_VERT_1)), (5, (GOAL_HORIZ_1, GOAL_VERT_1)), (4, (GOAL_HORIZ_1, GOAL_VERT_1)), (5, (GOAL_HORIZ_1, GOAL_HORIZ_2)), (4, (GOAL_HORIZ_1, GOAL_HORIZ_2)), (3, (GOAL_HORIZ_1, GOAL_HORIZ_2)))
Bracing.moveTime = 0.080000000000000002
Bracing.fadeTime = 0.14999999999999999
Bracing.movePieceThreshold = 0.080000000000000002
Bracing.pushPieceThreshold = 0.01
Bracing.repairTimeframe = 20
Hammering = VariableContainer()
Hammering.reticleScaleRange = (0.20000000000000001, 1.0)
Hammering.reticleScaleRate = 1.0
Hammering.recoveryTime = 4.0
Hammering.nailCountRange = (4, 8)
Hammering.rankingThresholds = (5, 4, 3, 2, 1)
Hammering.hitForgiveness = 0.10000000000000001
Hammering.useReticleColor = True
Pitching = VariableContainer()
Pitching.leakScaleRange = (0.10000000000000001, 0.27500000000000002)
Pitching.spawnDelayRange = (0.5, 0.10000000000000001, 2.0, 1.0)
Pitching.leakCountRange = (16, 32)
Pitching.maxLeaksRange = (2, 5)
Pitching.useReticle = True
Pitching.ratingGive = 0
REPAIR_AT_SEA_REWARD_RATING = [
0,
1,
1,
1.5,
2.0]
REPAIR_AT_SEA_GAME_MULTIPLIER = [
20,
60,
200,
40,
20]
def getAtSeaRepairRating(rating, gameType):
if rating > 4 or rating < 0:
rating = 0
return REPAIR_AT_SEA_REWARD_RATING[rating] * REPAIR_AT_SEA_GAME_MULTIPLIER[gameType]
|
[
"brandoncarden12345@gmail.com"
] |
brandoncarden12345@gmail.com
|
3e7c790c56f14ea782d02ca44526b8f07db60168
|
f76bdfd886ce116fdfeea408d7251142ed73d7c4
|
/dash/_validate.py
|
76047242a6a08c325706708be192ed92f0639e4a
|
[
"MIT"
] |
permissive
|
pikhovkin/dj-plotly-dash
|
73a4a679472eddfbb56c44ca054040b64b6a57a4
|
25efb612ead04bf3564c25b994dc633929eec457
|
refs/heads/master
| 2023-08-10T22:08:30.363654
| 2022-05-03T12:23:56
| 2022-05-03T12:23:56
| 151,003,269
| 53
| 16
|
MIT
| 2023-09-04T20:56:53
| 2018-09-30T20:18:22
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 12,126
|
py
|
import collections
import re
from .development.base_component import Component
from . import exceptions
from ._utils import patch_collections_abc, _strings, stringify_id
def validate_callback(output, inputs, state, extra_args, types):
is_multi = isinstance(output, (list, tuple))
outputs = output if is_multi else [output]
Input, Output, State = types
if extra_args:
if not isinstance(extra_args[0], (Output, Input, State)):
raise exceptions.IncorrectTypeException(
"""
Callback arguments must be `Output`, `Input`, or `State` objects,
optionally wrapped in a list or tuple. We found (possibly after
unwrapping a list or tuple):
{}
""".format(
repr(extra_args[0])
)
)
raise exceptions.IncorrectTypeException(
"""
In a callback definition, you must provide all Outputs first,
then all Inputs, then all States. After this item:
{}
we found this item next:
{}
""".format(
repr((outputs + inputs + state)[-1]), repr(extra_args[0])
)
)
for args in [outputs, inputs, state]:
for arg in args:
validate_callback_arg(arg)
def validate_callback_arg(arg):
if not isinstance(getattr(arg, "component_property", None), _strings):
raise exceptions.IncorrectTypeException(
"""
component_property must be a string, found {!r}
""".format(
arg.component_property
)
)
if hasattr(arg, "component_event"):
raise exceptions.NonExistentEventException(
"""
Events have been removed.
Use the associated property instead.
"""
)
if isinstance(arg.component_id, dict):
validate_id_dict(arg)
elif isinstance(arg.component_id, _strings):
validate_id_string(arg)
else:
raise exceptions.IncorrectTypeException(
"""
component_id must be a string or dict, found {!r}
""".format(
arg.component_id
)
)
def validate_id_dict(arg):
arg_id = arg.component_id
for k in arg_id:
# Need to keep key type validation on the Python side, since
# non-string keys will be converted to strings in json.dumps and may
# cause unwanted collisions
if not isinstance(k, _strings):
raise exceptions.IncorrectTypeException(
"""
Wildcard ID keys must be non-empty strings,
found {!r} in id {!r}
""".format(
k, arg_id
)
)
def validate_id_string(arg):
arg_id = arg.component_id
invalid_chars = ".{"
invalid_found = [x for x in invalid_chars if x in arg_id]
if invalid_found:
raise exceptions.InvalidComponentIdError(
"""
The element `{}` contains `{}` in its ID.
Characters `{}` are not allowed in IDs.
""".format(
arg_id, "`, `".join(invalid_found), "`, `".join(invalid_chars)
)
)
def validate_multi_return(outputs_list, output_value, callback_id):
if not isinstance(output_value, (list, tuple)):
raise exceptions.InvalidCallbackReturnValue(
"""
The callback {} is a multi-output.
Expected the output type to be a list or tuple but got:
{}.
""".format(
callback_id, repr(output_value)
)
)
if len(output_value) != len(outputs_list):
raise exceptions.InvalidCallbackReturnValue(
"""
Invalid number of output values for {}.
Expected {}, got {}
""".format(
callback_id, len(outputs_list), len(output_value)
)
)
for i, outi in enumerate(outputs_list):
if isinstance(outi, list):
vi = output_value[i]
if not isinstance(vi, (list, tuple)):
raise exceptions.InvalidCallbackReturnValue(
"""
The callback {} output {} is a wildcard multi-output.
Expected the output type to be a list or tuple but got:
{}.
output spec: {}
""".format(
callback_id, i, repr(vi), repr(outi)
)
)
if len(vi) != len(outi):
raise exceptions.InvalidCallbackReturnValue(
"""
Invalid number of output values for {} item {}.
Expected {}, got {}
output spec: {}
output value: {}
""".format(
callback_id, i, len(vi), len(outi), repr(outi), repr(vi)
)
)
def fail_callback_output(output_value, output):
valid = _strings + (dict, int, float, type(None), Component)
def _raise_invalid(bad_val, outer_val, path, index=None, toplevel=False):
bad_type = type(bad_val).__name__
outer_id = (
"(id={:s})".format(outer_val.id) if getattr(outer_val, "id", False) else ""
)
outer_type = type(outer_val).__name__
if toplevel:
location = """
The value in question is either the only value returned,
or is in the top level of the returned list,
"""
else:
index_string = "[*]" if index is None else "[{:d}]".format(index)
location = """
The value in question is located at
{} {} {}
{},
""".format(
index_string, outer_type, outer_id, path
)
raise exceptions.InvalidCallbackReturnValue(
"""
The callback for `{output}`
returned a {object:s} having type `{type}`
which is not JSON serializable.
{location}
and has string representation
`{bad_val}`
In general, Dash properties can only be
dash components, strings, dictionaries, numbers, None,
or lists of those.
""".format(
output=repr(output),
object="tree with one value" if not toplevel else "value",
type=bad_type,
location=location,
bad_val=bad_val,
)
)
def _value_is_valid(val):
return isinstance(val, valid)
def _validate_value(val, index=None):
# val is a Component
if isinstance(val, Component):
# pylint: disable=protected-access
for p, j in val._traverse_with_paths():
# check each component value in the tree
if not _value_is_valid(j):
_raise_invalid(bad_val=j, outer_val=val, path=p, index=index)
# Children that are not of type Component or
# list/tuple not returned by traverse
child = getattr(j, "children", None)
if not isinstance(child, (tuple, collections.MutableSequence)):
if child and not _value_is_valid(child):
_raise_invalid(
bad_val=child,
outer_val=val,
path=p + "\n" + "[*] " + type(child).__name__,
index=index,
)
# Also check the child of val, as it will not be returned
child = getattr(val, "children", None)
if not isinstance(child, (tuple, collections.MutableSequence)):
if child and not _value_is_valid(child):
_raise_invalid(
bad_val=child,
outer_val=val,
path=type(child).__name__,
index=index,
)
# val is not a Component, but is at the top level of tree
elif not _value_is_valid(val):
_raise_invalid(
bad_val=val,
outer_val=type(val).__name__,
path="",
index=index,
toplevel=True,
)
if isinstance(output_value, list):
for i, val in enumerate(output_value):
_validate_value(val, index=i)
else:
_validate_value(output_value)
# if we got this far, raise a generic JSON error
raise exceptions.InvalidCallbackReturnValue(
"""
The callback for property `{property:s}` of component `{id:s}`
returned a value which is not JSON serializable.
In general, Dash properties can only be dash components, strings,
dictionaries, numbers, None, or lists of those.
""".format(
property=output.component_property, id=output.component_id
)
)
def check_obsolete(kwargs):
for key in kwargs:
if key in ["components_cache_max_age", "static_folder"]:
raise exceptions.ObsoleteKwargException(
"""
{} is no longer a valid keyword argument in Dash since v1.0.
See https://dash.plotly.com for details.
""".format(
key
)
)
# any other kwarg mimic the built-in exception
raise TypeError("Dash() got an unexpected keyword argument '" + key + "'")
def validate_js_path(registered_paths, package_name, path_in_package_dist):
if package_name not in registered_paths:
raise exceptions.DependencyException(
"""
Error loading dependency. "{}" is not a registered library.
Registered libraries are:
{}
""".format(
package_name, list(registered_paths.keys())
)
)
if path_in_package_dist not in registered_paths[package_name]:
raise exceptions.DependencyException(
"""
"{}" is registered but the path requested is not valid.
The path requested: "{}"
List of registered paths: {}
""".format(
package_name, path_in_package_dist, registered_paths
)
)
def validate_index(name, checks, index):
missing = [i for check, i in checks if not re.compile(check).search(index)]
if missing:
plural = "s" if len(missing) > 1 else ""
raise exceptions.InvalidIndexException(
"Missing item{pl} {items} in {name}.".format(
items=", ".join(missing), pl=plural, name=name
)
)
def validate_layout_type(value):
if not isinstance(value, (Component, patch_collections_abc("Callable"))):
raise exceptions.NoLayoutException(
"Layout must be a dash component "
"or a function that returns a dash component."
)
def validate_layout(layout, layout_value):
if layout is None:
raise exceptions.NoLayoutException(
"""
The layout was `None` at the time that `run_server` was called.
Make sure to set the `layout` attribute of your application
before running the server.
"""
)
layout_id = stringify_id(getattr(layout_value, "id", None))
component_ids = {layout_id} if layout_id else set()
for component in layout_value._traverse(): # pylint: disable=protected-access
component_id = stringify_id(getattr(component, "id", None))
if component_id and component_id in component_ids:
raise exceptions.DuplicateIdError(
"""
Duplicate component id found in the initial layout: `{}`
""".format(
component_id
)
)
component_ids.add(component_id)
|
[
"pikhovkin@gmail.com"
] |
pikhovkin@gmail.com
|
bce6368fc8a866dd4bff9c0a271687bdaea848c1
|
5e014f95b49f376b34d20760c41f09bdca094247
|
/flask_ide/auth/models.py
|
2fe1fcdca8701cfe3cf45972adb5b95603c108eb
|
[] |
no_license
|
jstacoder/flask-ide
|
34ae304c211c7b263f37b2fcf0660ae76053c0a2
|
3890756c094b4b7872bad7d915e764e3e32dcb2d
|
refs/heads/master
| 2023-02-12T11:22:24.412680
| 2020-07-20T17:21:55
| 2020-07-20T17:21:55
| 29,079,246
| 50
| 10
| null | 2023-02-02T07:17:40
| 2015-01-11T02:51:35
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 3,478
|
py
|
from flask_xxl.basemodels import BaseMixin
from flask import url_for
from LoginUtils import encrypt_password, check_password
from sqlalchemy.ext.declarative import declared_attr
#import sqlalchemy to global namespace
from sqlalchemy import (
UnicodeText,func,Enum,UniqueConstraint,DateTime,Text,Column,Integer,
ForeignKey,Boolean,String,Table
)
from sqlalchemy.orm import relationship, backref
class UnknownUser(object):
is_unknown = True
class Role(BaseMixin):
__tablename__ = 'roles'
name = Column(String(255))
can_view = Column(Boolean,default=True,nullable=False)
can_add = Column(Boolean,default=False,nullable=False)
can_edit = Column(Boolean,default=False,nullable=False)
can_delete = Column(Boolean,default=False,nullable=False)
class User(BaseMixin):
__tablename__ = 'users'
first_name = Column(String(255),default="")
last_name = Column(String(255),default="")
email = Column(String(255),nullable=False,unique=True)
role_id = Column(Integer,ForeignKey('roles.id'))
role = relationship('Role',backref=backref(
'users',lazy='dynamic'))
add_date = Column(DateTime,default=func.now())
_pw_hash = Column(UnicodeText,nullable=False)
age = Column(Integer)
def __init__(self,*args,**kwargs):
if 'first_name' in kwargs:
self.first_name = kwargs.pop('first_name')
if 'last_name' in kwargs:
self.last_name = kwargs.pop('last_name')
if 'email' in kwargs:
self.email = kwargs.pop('email')
if 'role' in kwargs:
self.role = kwargs.pop('role')
if 'role_id' in kwargs:
self.role_id = kwargs.pop('role_id')
if 'password' in kwargs:
self.password = kwargs.pop('password')
def _to_json(self):
import json
return json.dumps(
{
'first_name':self.first_name,
'last_name':self.last_name,
'email':self.email,
'age':self.age,
'date_added':self.add_date,
}
)
@declared_attr
def __table_args__(cls):
return (UniqueConstraint('email','first_name','last_name'),{})
@property
def is_unknown(self):
return False
def check_password(self, pw):
return check_password(pw,self._pw_hash)
@classmethod
def get_by_email(cls, email):
return cls.query().filter_by(email=email).first()
@property
def password(self):
return 'private'
raise ValueError('Private Value!!!!')
@password.setter
def password(self,pw):
self._pw_hash = encrypt_password(pw)
@property
def full_name(self):
return '{} {}'.format(self.first_name.title(),self.last_name.title())
@property
def name(self):
return str(self.first_name)
def __str__(self):
if self.first_name != "":
rtn = self.full_name
else:
rtn = self.email
return rtn
def __repr__(self):
return 'User<{} {}'.format(self.email,self.first_name)
def _get_absolute_url(self):
return url_for('member.profile',member_id=str(int(self.id)))
@property
def absolute_url(self):
return str(self._get_absolute_url())
def _get_edit_url(self):
return '#'
@property
def edit_url(self):
return str(self._get_edit_url())
|
[
"kyle@level2designs.com"
] |
kyle@level2designs.com
|
b4fbc50150b419753f6d6e59d82190c93aac1e3f
|
ef31e4986b524f73a1cc7300327f28d0e7fd1960
|
/venv/Scripts/pyi-makespec-script.py
|
b5f8e8f7834bff66fba68d49892f0987cf07502d
|
[] |
no_license
|
aurvandel/pdfcombine
|
a5348150ef8f5649c09c689f5f5966a045cd49b5
|
7900faa2c823005adc46ee1f434cebf575632d24
|
refs/heads/master
| 2023-04-15T08:13:26.170775
| 2019-06-05T16:22:51
| 2019-06-05T16:22:51
| 362,185,595
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 450
|
py
|
#!C:\Users\pwatkin1\PycharmProjects\pdfcombine\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'PyInstaller==3.4','console_scripts','pyi-makespec'
__requires__ = 'PyInstaller==3.4'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('PyInstaller==3.4', 'console_scripts', 'pyi-makespec')()
)
|
[
"parkergw@gmail.com"
] |
parkergw@gmail.com
|
d5b659372a216b999b788a1e5dbe6d3852e2a1f3
|
474525154a4e1d48ef5242d1f44164d05399b145
|
/tensorflow_probability/python/experimental/distributions/mvn_precision_factor_linop_test.py
|
47676d4d6f31be7ebf0b5ac98d233982286579c7
|
[
"Apache-2.0"
] |
permissive
|
svshivapuja/probability
|
9855737790f74a39169688fbfec9671deef804d9
|
af7ccb22d972329633530c3b754ed1f49472f6a7
|
refs/heads/main
| 2023-07-17T04:14:53.703622
| 2021-08-30T17:47:06
| 2021-08-30T17:47:06
| 400,983,015
| 1
| 0
|
Apache-2.0
| 2021-08-29T07:51:29
| 2021-08-29T07:51:29
| null |
UTF-8
|
Python
| false
| false
| 8,157
|
py
|
# Copyright 2020 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for tensorflow_probability.python.experimental.distributions.mvn_precision_factor_linop."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
import tensorflow.compat.v2 as tf
import tensorflow_probability as tfp
from tensorflow_probability.python.internal import prefer_static as ps
from tensorflow_probability.python.internal import test_combinations
from tensorflow_probability.python.internal import test_util
tfd = tfp.distributions
tfd_e = tfp.experimental.distributions
@test_util.test_all_tf_execution_regimes
class MVNPrecisionFactorLinOpTest(test_util.TestCase):
def _random_constant_spd_linop(
self,
event_size,
batch_shape=(),
conditioning=1.2,
dtype=np.float32,
):
"""Randomly generate a constant SPD LinearOperator."""
# The larger conditioning is, the better posed the matrix is.
# With conditioning = 1, it will be on the edge of singular, and likely
# numerically singular if event_size is large enough.
# Conditioning on the small side is best, since then the matrix is not so
# diagonally dominant, and we therefore test use of transpositions better.
assert conditioning >= 1
scale_wishart = tfd.WishartLinearOperator(
df=dtype(conditioning * event_size),
scale=tf.linalg.LinearOperatorIdentity(event_size, dtype=dtype),
input_output_cholesky=False,
)
# Make sure to evaluate here. This ensures that the linear operator is a
# constant rather than a random operator.
matrix = self.evaluate(
scale_wishart.sample(batch_shape, seed=test_util.test_seed()))
return tf.linalg.LinearOperatorFullMatrix(
matrix, is_positive_definite=True, is_self_adjoint=True)
@test_combinations.generate(
test_combinations.combine(
use_loc=[True, False],
use_precision=[True, False],
event_size=[3],
batch_shape=[(), (2,)],
n_samples=[5000],
dtype=[np.float32, np.float64],
),
)
def test_log_prob_and_sample(
self,
use_loc,
use_precision,
event_size,
batch_shape,
dtype,
n_samples,
):
cov = self._random_constant_spd_linop(
event_size, batch_shape=batch_shape, dtype=dtype)
precision = cov.inverse()
precision_factor = precision.cholesky()
# Make sure to evaluate here, else you'll have a random loc vector!
if use_loc:
loc = self.evaluate(
tf.random.normal(
batch_shape + (event_size,),
dtype=dtype,
seed=test_util.test_seed()))
else:
loc = None
mvn_scale = tfd.MultivariateNormalTriL(
loc=loc, scale_tril=cov.cholesky().to_dense())
mvn_precision = tfd_e.MultivariateNormalPrecisionFactorLinearOperator(
loc=loc,
precision_factor=precision_factor,
precision=precision if use_precision else None,
)
point = tf.random.normal(
batch_shape + (event_size,), dtype=dtype, seed=test_util.test_seed())
mvn_scale_log_prob, mvn_precision_log_prob = self.evaluate(
[mvn_scale.log_prob(point),
mvn_precision.log_prob(point)])
self.assertAllClose(
mvn_scale_log_prob, mvn_precision_log_prob, atol=5e-4, rtol=5e-4)
batch_point = tf.random.normal(
(2,) + batch_shape + (event_size,),
dtype=dtype,
seed=test_util.test_seed())
mvn_scale_log_prob, mvn_precision_log_prob = self.evaluate(
[mvn_scale.log_prob(batch_point),
mvn_precision.log_prob(batch_point)])
self.assertAllClose(
mvn_scale_log_prob, mvn_precision_log_prob, atol=5e-4, rtol=5e-4)
samples = mvn_precision.sample(n_samples, seed=test_util.test_seed())
arrs = self.evaluate({
'stddev': tf.sqrt(cov.diag_part()),
'var': cov.diag_part(),
'cov': cov.to_dense(),
'sample_mean': tf.reduce_mean(samples, axis=0),
'sample_var': tfp.stats.variance(samples, sample_axis=0),
'sample_cov': tfp.stats.covariance(samples, sample_axis=0),
})
self.assertAllClose(
arrs['sample_mean'],
loc if loc is not None else np.zeros_like(arrs['cov'][..., 0]),
atol=5 * np.max(arrs['stddev']) / np.sqrt(n_samples))
self.assertAllClose(
arrs['sample_var'],
arrs['var'],
atol=5 * np.sqrt(2) * np.max(arrs['var']) / np.sqrt(n_samples))
self.assertAllClose(
arrs['sample_cov'],
arrs['cov'],
atol=5 * np.sqrt(2) * np.max(arrs['var']) / np.sqrt(n_samples))
def test_dynamic_shape(self):
x = tf.Variable(ps.ones([7, 3]), shape=[7, None])
self.evaluate(x.initializer)
# Check that the shape is actually `None`.
if not tf.executing_eagerly():
last_shape = x.shape[-1]
if last_shape is not None: # This is a `tf.Dimension` in tf1.
last_shape = last_shape.value
self.assertIsNone(last_shape)
dynamic_dist = tfd_e.MultivariateNormalPrecisionFactorLinearOperator(
precision_factor=tf.linalg.LinearOperatorDiag(tf.ones_like(x)))
static_dist = tfd_e.MultivariateNormalPrecisionFactorLinearOperator(
precision_factor=tf.linalg.LinearOperatorDiag(tf.ones([7, 3])))
in_ = tf.zeros([7, 3])
self.assertAllClose(self.evaluate(dynamic_dist.log_prob(in_)),
static_dist.log_prob(in_))
@test_combinations.generate(
test_combinations.combine(
batch_shape=[(), (2,)],
dtype=[np.float32, np.float64],
),
)
def test_mean_and_mode(self, batch_shape, dtype):
event_size = 3
cov = self._random_constant_spd_linop(
event_size, batch_shape=batch_shape, dtype=dtype)
precision_factor = cov.inverse().cholesky()
# Make sure to evaluate here, else you'll have a random loc vector!
loc = self.evaluate(
tf.random.normal(
batch_shape + (event_size,),
dtype=dtype,
seed=test_util.test_seed()))
mvn_precision = tfd_e.MultivariateNormalPrecisionFactorLinearOperator(
loc=loc,
precision_factor=precision_factor)
self.assertAllClose(mvn_precision.mean(), loc)
self.assertAllClose(mvn_precision.mode(), loc)
@test_combinations.generate(
test_combinations.combine(
batch_shape=[(), (2,)],
use_precision=[True, False],
dtype=[np.float32, np.float64],
),
)
def test_cov_var_stddev(self, batch_shape, use_precision, dtype):
event_size = 3
cov = self._random_constant_spd_linop(
event_size, batch_shape=batch_shape, dtype=dtype)
precision = cov.inverse()
precision_factor = precision.cholesky()
# Make sure to evaluate here, else you'll have a random loc vector!
loc = self.evaluate(
tf.random.normal(
batch_shape + (event_size,),
dtype=dtype,
seed=test_util.test_seed()))
mvn_precision = tfd_e.MultivariateNormalPrecisionFactorLinearOperator(
loc=loc,
precision_factor=precision_factor,
precision=precision if use_precision else None)
self.assertAllClose(mvn_precision.covariance(), cov.to_dense(), atol=1e-4)
self.assertAllClose(mvn_precision.variance(), cov.diag_part(), atol=1e-4)
self.assertAllClose(mvn_precision.stddev(), tf.sqrt(cov.diag_part()),
atol=1e-5)
if __name__ == '__main__':
test_util.main()
|
[
"gardener@tensorflow.org"
] |
gardener@tensorflow.org
|
a98dbf4c9d9e1172ec1e655a3091b12b5af3dea1
|
795cdbb2192e069a808e59d9a8c098637e738ccb
|
/app/upload_file.py
|
785974f435d3b9b36c82957c375bf85a7a246e8c
|
[
"MIT"
] |
permissive
|
terrainthesky-hub/Human_Rights_Asylum_Seekers
|
ccc09b72caac0e4684b04a7bc10b7699d3e2833e
|
5f2715c0fc4d2b48922d37527fa3860a20e9c72d
|
refs/heads/main
| 2023-06-22T20:50:07.487285
| 2021-07-12T22:26:39
| 2021-07-12T22:26:39
| 385,399,231
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,692
|
py
|
# All Required Imports
from fastapi import APIRouter
from fastapi import FastAPI, File, UploadFile
import pandas
import urllib.request
import logging
import boto3
from botocore.config import Config
import shutil
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import Callable
from botocore.exceptions import ClientError
import sqlalchemy
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from dotenv import load_dotenv
import os
import aiofile
from starlette.responses import FileResponse
from fnmatch import fnmatch
# loads secret credentials
load_dotenv()
# Connect POST request ot the api routor
router = APIRouter()
# parse case_url and scrape relivent data off of it
def case_urls(str):
# case url data for web to no name of document on S3 buckets to view
case_url = str
index = str[-19:-4].find('-')
hearing_date = str[(len(str)-19):-4]
hearing_date = hearing_date[index+1:]
decision_date = hearing_date
index = str.find('-')
indexend = str.find(hearing_date)
a = str[indexend-8:indexend-1].find('-') +1
str[indexend-8+a:indexend-1]
department=str[indexend-8+a:indexend-1]
b = str.find(department)
c=str[:b].find('-')+1
urlforloop = str[c:indexend-9+a]
l = []
for i in range(7,len(urlforloop)):
if str[i:i+1].find('-') == -1 and str[i+2:i+3].isnumeric():
l.append(i)
h= min(l) - 10
case_id = urlforloop[h:]
t = urlforloop.find(case_id)
refugee = urlforloop[:t+1]
return case_id, case_url,hearing_date,decision_date,department,refugee
def save_upload_file(upload_file: UploadFile, destination: Path) -> None:
try:
with destination.open("wb") as buffer:
shutil.copyfileobj(upload_file.file, buffer)
finally:
upload_file.file.close()
def save_upload_file_tmp(upload_file: UploadFile) -> Path:
try:
suffix = Path(upload_file.filename).suffix
with NamedTemporaryFile(delete=False, suffix=suffix) as tmp:
shutil.copyfileobj(upload_file.file, tmp)
tmp_path = Path(tmp.name)
finally:
upload_file.file.close()
return tmp_path
def handle_upload_file(
upload_file: UploadFile, handler: Callable[[Path], None]
) -> None:
tmp_path = save_upload_file_tmp(upload_file)
try:
handler(tmp_path) # Do something with the saved temp file
finally:
tmp_path.unlink() # Delete the temp file
# file uploaders
@router.post("/upload/pdf")
async def pdf(file: UploadFile = File(...)):
filename = file.filename
if len(filename) >= 1:
# add these varibles to table's scrapes all the data we need from initional upload
case_id, case_url,hearing_date, decision_date, department, refugee = case_urls(file.filename)
# helper functions to handle file correctly
#save_upload_file_tmp(file)
#handle_upload_file(file, tmp_path)
# Uploads File to S3 and downloads to scipts folder
path = 'app/'+file.filename
key = os.getenv('access_key')
secret_access_key = os.getenv('secret_access_key')
with open(path, 'wb') as file_object:
shutil.copyfileobj(file.file, file_object)
s3 = boto3.resource(
's3',
aws_access_key_id = key,
aws_secret_access_key = secret_access_key,
config = Config(signature_version ='s3v4')
)
data = open(path, 'rb')
s3.Bucket('hrf-asylum-dsa-documents').put_object(Key='pdf/'+file.filename, Body=data)
# scipts to scrape pdf into free text amd get judge name
# scipt to delete pdf after being scraped
for dirpath, dirnames, filenames in os.walk(os.curdir):
for file in filenames:
if fnmatch(file, '*.pdf'):
os.remove(os.path.join(dirpath, file))
return {"filename": case_url,
"case_id" : case_id,
"case_url" : case_url,
"hearing_date" : hearing_date,
"decision_date" : decision_date,
"department": department,
"refugee": refugee,
"s3": "Viewable"}
# deals with data from csv
def csv_data(df):
return ""
# This route is not working yet, so don't include it
# @router.post("/upload/file")
async def not_pdf(file: UploadFile = File(...)):
#if len(file.filename) >= 1:
# add these varibles to table's
#df = pd.read_csv(file)
#varibles = csv_data(df)
return {"filename": file.filename}
# This route is not working yet, so don't include it
# @router.post("/connect/db")
async def get_db() -> sqlalchemy.engine.base.Connection:
"""Get a SQLAlchemy database connection.
grab this from group b due to are database not working and nobody connect the scipts and tables together
Uses this environment variable if it exists:
DATABASE_URL=dialect://user:password@host/dbname
Otherwise uses a SQLite database for initial local development.
"""
database_url = os.getenv('DATABASE_URL')
engine = sqlalchemy.create_engine(database_url)
connection = engine.connect()
session_local = sessionmaker(autocommit=False, autoflush=False, bind=engine)()
try:
yield connection
finally:
connection.close()
# the postgres database has never work yet
# load_dotenv()
# database_url = os.getenv('DATABASE_URL')
# engine = sqlalchemy.create_engine(database_url, pool_pre_ping=True)
# connection = engine.connect()
# session_local = sessionmaker(autocommit=False, autoflush=False, bind=engine)()
Base = declarative_base()
|
[
"lesley.t.rich@gmail.com"
] |
lesley.t.rich@gmail.com
|
4fe043e3715e5b8438e6d887a92c5386dfd3f625
|
2044867096d52f2737102681dbb1baf6b05eeb97
|
/Robot sense Localization.py
|
256c0425096c8aaeed3e4ba1a4add37ba9af80cc
|
[] |
no_license
|
ahmedfarid98/Robot_Localization
|
4a42fbb81d83cdcca69a5d47ed2e8b1634343d81
|
4b618eef94e9e7dbb4785feb3dbfc4a34da3d791
|
refs/heads/master
| 2022-11-15T14:28:22.447170
| 2020-07-13T00:12:20
| 2020-07-13T00:12:20
| 277,865,945
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,555
|
py
|
############################### Robot Localization #########################
################# Example 1 ####################
#Modify the code below so that the function sense, which takes p and Z as inputs, will output the NON-normalized probability distribution, q,
#after multiplying the entries in p by pHit or pMiss according to the color in the corresponding cell in world.
#### Then Normalize the values in q and get the final posterior Probability.
p=[0.2, 0.2, 0.2, 0.2, 0.2] ### prior probab. are equal at the begining before any sense or measurement
world=['green', 'red', 'red', 'green', 'green']
Z='red'
pHit = 0.6 ## if robot sense red multiply it's probab. by 0.6
pMiss = 0.2 ## if robot sense green multiply it's probab. by 0.2
def sense(p, Z):
q=[]
for i in range(len(p)):
hit = (Z == world[i]) ### flag set to be used in the next line
# this flag = 1 if color is red & flag=0 if color is green
q.append(p[i]*(hit*pHit + (1-hit)*pMiss)) ## Nice Logic code to give us the posterior probability
# To normalize probabilities
s=sum(q)
for i in range(len(p)):
q[i]=q[i]/s
return q
print(sense(p,Z))
############################
############## Example 2 ####################
''' For Multiple Measurement
#measurements = ['red' , 'green']
for k in range(len(measurements)):
p=sense(p,measurements[k])
print(p)
'''
#####################################################################
|
[
"noreply@github.com"
] |
noreply@github.com
|
9b041b73b4058ed94e12ca2a03153ad4b7767547
|
3f911aca38f91e56890f5034b31ed81edb31b000
|
/protein/FDR 구현실습/test.py
|
2f29ade236cf8f8f9d1525d161b6fe892a63d725
|
[] |
no_license
|
sochic2/kis
|
5dd83fd474176981f49cde967f49763405ed27b3
|
3ab07710c987110224b3fad0cb1ce3a0d6df6d1a
|
refs/heads/master
| 2022-11-07T00:58:37.427148
| 2020-06-18T14:37:01
| 2020-06-18T14:37:01
| 257,481,038
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 33
|
py
|
a = 'abcdefg'
b = a[0:3]
print(b)
|
[
"netzzang12@gmail.com"
] |
netzzang12@gmail.com
|
046ae5d3523c40583815218388fe2c2f3b49b05b
|
df0903d95faa22bd37ea661ba706776d384ab0bd
|
/scripts/propagate/merger.py
|
4892333b046c16ea3904661db9531c0ea8556cce
|
[] |
no_license
|
tschmorleiz/simman
|
50ce0cbf7f6695a09fa45a6dcd8c655ba9febb63
|
c589b1a9fc429ee33e46e730253a42bd10fa95b3
|
refs/heads/master
| 2016-09-06T12:05:59.870641
| 2015-02-16T11:48:38
| 2015-02-16T11:48:38
| 30,798,863
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 19,625
|
py
|
#!/usr/bin/python -O
################################################################################
################################################################################
#
# State-Based Text Merging Algorithm
# For 6.033 Design Project 2
# TA: Katherine Fang
# 9 May 2012
#
# Stephan Boyer
# Ami Patel
# Vo Thanh Minh Tue
#
# Description:
#
# Attempts to automatically perform a three-way merge.
# Prints the result to standard output.
#
# This is a proof of concept. In a real system, we would
# write at least the diff algorithm in a faster language.
# This implementation quickly slows down for large files,
# unless the fast diff approximation is used (see below).
#
# For more information, see:
# http://www.stephanboyer.com/post/26/3-way-text-merging-algorithm
#
# Usage:
#
# merger.py ancestor_file alice_file bob_file
#
################################################################################
################################################################################
################################################################################
################################################################################
# String Diffing
################################################################################
################################################################################
# represents a change from one string to another
class Change:
pass
# represents adding <text> to string b
class Insert(Change):
def __init__(self, text, pos_a, range_b):
self.text = text
self.pos_a = pos_a
self.range_b = range_b
def __repr__(self):
return "Insert(\"" + str(self.text) + "\", " + str(self.pos_a) + ", " + str(self.range_b) + ")"
# represents deleting <text> from string b
class Delete(Change):
def __init__(self, text, range_a, pos_b):
self.text = text
self.range_a = range_a
self.pos_b = pos_b
def __repr__(self):
return "Delete(\"" + str(self.text) + "\", " + str(self.range_a) + ", " + str(self.pos_b) + ")"
# takes 2 indexable objects (e.g. strings or lists)
# returns a list of Change objects (Delete or Insert)
# guaranteed to produce an optimal diff
def str_diff(a, b):
ls = len(a)
lf = len(b)
memo = {}
def min_diff(si, fi):
if (si, fi) in memo:
return memo[(si, fi)]
ans = []
if si == ls and fi == lf:
ans = []
elif si < ls and fi == lf:
ans = []
for i in range(si, ls):
ans.append((i, "d"))
elif fi < lf and si == ls:
ans = []
for j in range(fi, lf):
ans.append((si, "i", b[j]))
elif a[si] == b[fi]:
ans = min_diff(si + 1, fi + 1)
else:
alts = [(min_diff(si + 1, fi), (si, "d")), (min_diff(si, fi + 1), (si, "i", b[fi]))]
best = min(alts, key=lambda t: len(t[0]))
ans = [best[1]] + best[0]
memo[(si, fi)] = ans
return ans
diff = sorted(min_diff(0, 0), key=lambda x: x[0])
changes = []
pos_diff = 0
offset_b = 0
while pos_diff < len(diff):
length = 0
pos_a_old = diff[pos_diff][0]
while pos_diff < len(diff) and diff[pos_diff][1] == "i":
if diff[pos_diff][0] != pos_a_old:
break
length += 1
pos_diff += 1
if length > 0:
pos_a = pos_a_old
range_b_0 = pos_a_old + offset_b
range_b_1 = pos_a_old + offset_b + length
changes.append(Insert(b[range_b_0:range_b_1], pos_a, (range_b_0, range_b_1)))
offset_b += length
if pos_diff >= len(diff):
break
length = 0
pos_a_old = diff[pos_diff][0]
while pos_diff < len(diff) and diff[pos_diff][1] == "d":
if diff[pos_diff][0] != pos_a_old + length:
break
length += 1
pos_diff += 1
if length > 0:
range_a_0 = pos_a_old
range_a_1 = pos_a_old + length
pos_b = pos_a_old + offset_b
changes.append(Delete(a[range_a_0:range_a_1], (range_a_0, range_a_1), pos_b))
offset_b -= length
return changes
"""
# Here is an alternative version of the str_diff(a, b) function.
# Unlike the version above, it is NOT guaranteed to produce optimal
# diffs. Diffs that are not optimal can sometimes produce unexpected
# results. However, this version is much faster.
import difflib
# takes 2 indexable objects (e.g. strings or lists)
# returns a list of Change objects (Delete or Insert)
# not guaranteed to produce an optimal diff
def str_diff(a, b):
d = difflib.Differ()
diff = list(d.compare(a, b))
changes = []
pos_a = 0
pos_b = 0
pos_diff = 0
while pos_diff < len(diff):
while pos_diff < len(diff) and diff[pos_diff][0] == " ":
pos_diff += 1
pos_a += 1
pos_b += 1
while pos_diff < len(diff) and diff[pos_diff][0] == "?":
pos_diff += 1
length = 0
range_b_0 = pos_b
while pos_diff < len(diff) and diff[pos_diff][0] == "+":
length += 1
pos_diff += 1
pos_b += 1
if length > 0:
changes.append(Insert(b[range_b_0:pos_b], pos_a, (range_b_0, pos_b)))
text = []
range_a_0 = pos_a
while pos_diff < len(diff) and diff[pos_diff][0] == "-":
length += 1
pos_diff += 1
pos_a += 1
if length > 0:
changes.append(Delete(a[range_a_0:pos_a], (range_a_0, pos_a), pos_b))
return changes
"""
################################################################################
################################################################################
# Levenshtein Distance
################################################################################
################################################################################
# compute the Levenshtein distance between two strings
def levenshtein(a, b):
d = {}
for i in range(len(a) + 1):
d[(i, 0)] = i
for j in range(len(b) + 1):
d[(0, j)] = j
for j in range(1, len(b) + 1):
for i in range(1, len(a) + 1):
if a[i - 1] == b[j - 1]:
d[(i, j)] = d[(i - 1, j - 1)]
else:
d[(i, j)] = min([d[(i - 1, j)], d[(i, j - 1)], d[(i - 1, j - 1)]]) + 1
return d[len(a), len(b)]
################################################################################
################################################################################
# Finding Move Actions
################################################################################
################################################################################
# the maximum normalized distance (0-1) between two strings for them to be considered the same
# for the purposes of finding Move actions
MAX_MOVE_DIST = 0.2
# the minimum number of items that can be considered a Move action
MIN_MOVE_LENGTH = 10
# represents moving <text_a> in range <range_a> to <text_b> in range <range_b>
class Move(Change):
def __init__(self, text_a, range_a, pos_a, text_b, range_b, pos_b, first):
self.text_a = text_a
self.range_a = range_a
self.pos_a = pos_a
self.text_b = text_b
self.range_b = range_b
self.pos_b = pos_b
self.first = first
def __repr__(self):
return "Move(\"" + str(self.text_a) + "\", " + str(self.range_a) + ", " + str(self.pos_a) + ", \"" + str(self.text_b) + "\", " + str(self.range_b) + ", " + str(self.pos_b) + ", " + str(self.first) + ")"
# find Move actions in a list of Change objects (mutates the input list).
# a Move action comes from an Insert-Delete pair where the strings differ
# by less than MAX_MOVE_DIST in terms of normalized Levenshtein distance
def find_moves(diff, first):
indices_to_delete = []
for i in range(len(diff)):
if isinstance(diff[i], Delete):
for j in range(len(diff)):
if isinstance(diff[j], Insert):
if not (i in indices_to_delete) and not (j in indices_to_delete):
normalized_dist = float(levenshtein(diff[i].text, diff[j].text)) / max(len(diff[i].text), len(diff[j].text))
if normalized_dist <= MAX_MOVE_DIST and max(len(diff[i].text), len(diff[j].text)) >= MIN_MOVE_LENGTH:
indices_to_delete.append(i)
indices_to_delete.append(j)
diff.append(Move(diff[i].text, diff[i].range_a, diff[j].pos_a, diff[j].text, diff[j].range_b, diff[i].pos_b, first))
indices_to_delete.sort()
indices_to_delete.reverse()
for i in indices_to_delete:
diff.pop(i)
################################################################################
################################################################################
# Text Merging
################################################################################
################################################################################
# represents a list of merge conflicts
class MergeConflictList(Exception):
def __init__(self, conflicts):
self.conflicts = conflicts
def __repr__(self):
return self.conflicts
# takes indexable objects (e.g. strings or lists) a, b and their common ancestor
# returns the merged document
def merge(ancestor, a, b):
# compute the diffs from the common ancestor
diff_a = str_diff(ancestor, a)
diff_b = str_diff(ancestor, b)
# find Move actions
find_moves(diff_a, True)
find_moves(diff_b, False)
# find conflicts and automatically resolve them where possible
conflicts = []
indices_to_delete_a = []
indices_to_delete_b = []
len_diff_a = len(diff_a)
len_diff_b = len(diff_b)
for i in range(len_diff_a):
for j in range(len_diff_b):
if j in indices_to_delete_b:
continue
if isinstance(diff_a[i], Delete) and isinstance(diff_b[j], Delete):
# if two Delete actions overlap, take the union of their ranges
if (diff_b[j].range_a[0] >= diff_a[i].range_a[0] and diff_b[j].range_a[0] < diff_a[i].range_a[1]) or \
(diff_b[j].range_a[1] >= diff_a[i].range_a[0] and diff_b[j].range_a[1] < diff_a[i].range_a[1]) or \
(diff_b[j].range_a[0] < diff_a[i].range_a[0] and diff_b[j].range_a[1] > diff_a[i].range_a[1]):
diff_a[i].range_a = (min(diff_a[i].range_a[0], diff_b[j].range_a[0]), max(diff_a[i].range_a[1], diff_b[j].range_a[1]))
indices_to_delete_b.append(j)
if isinstance(diff_a[i], Delete) and isinstance(diff_b[j], Insert):
# Insert actions inside the range of Delete actions collide
if diff_b[j].pos_a > diff_a[i].range_a[0] and diff_b[j].pos_a < diff_a[i].range_a[1]:
conflicts.append("A is deleting text that B is inserting into.")
if isinstance(diff_a[i], Delete) and isinstance(diff_b[j], Move):
# Delete actions that overlap with but are not fully contained within PsuedoMove sources collide
if diff_a[i].range_a[0] >= diff_b[j].range_a[0] and diff_a[i].range_a[1] <= diff_b[j].range_a[1]:
pass
elif diff_a[i].range_a[0] >= diff_b[j].range_a[0] and diff_a[i].range_a[0] < diff_b[j].range_a[1]:
conflicts.append("B is moving only part of some text that A is deleting.")
elif diff_a[i].range_a[1] >= diff_b[j].range_a[0] and diff_a[i].range_a[1] < diff_b[j].range_a[1]:
conflicts.append("B is moving only part of some text that A is deleting.")
elif diff_a[i].range_a[0] < diff_b[j].range_a[0] and diff_a[i].range_a[1] > diff_b[j].range_a[1]:
conflicts.append("A is deleting text that B is moving.")
# Move destinations inside the range of Delete actions collide
if diff_b[j].pos_a > diff_a[i].range_a[0] and diff_b[j].pos_a < diff_a[i].range_a[1]:
conflicts.append("A is deleting text that B is moving text into.")
if isinstance(diff_a[i], Insert) and isinstance(diff_b[j], Delete):
# Insert actions inside the range of Delete actions collide
if diff_a[i].pos_a > diff_b[j].range_a[0] and diff_a[i].pos_a < diff_b[j].range_a[1]:
conflicts.append("B is deleting text that A is inserting into.")
if isinstance(diff_a[i], Insert) and isinstance(diff_b[j], Insert):
# Insert actions at the same position collide unless the inserted text is the same
if diff_a[i].pos_a == diff_b[j].pos_a:
if diff_a[i].text == diff_b[j].text:
indices_to_delete_b.append(j)
else:
conflicts.append("A and B are inserting text at the same location.")
if isinstance(diff_a[i], Insert) and isinstance(diff_b[j], Move):
# Insert actions at the same location as Move destinations collide unless the text is the same
if diff_a[i].pos_a == diff_b[j].pos_a:
if diff_a[i].text == diff_b[j].text_b:
indices_to_delete_a.append(i)
else:
conflicts.append("A is inserting text at the same location that B is moving text to.")
if isinstance(diff_a[i], Move) and isinstance(diff_b[j], Delete):
# Delete actions that overlap with but are not fully contained within PsuedoMove actions collide
if diff_b[j].range_a[0] >= diff_a[i].range_a[0] and diff_b[j].range_a[1] <= diff_a[i].range_a[1]:
pass
elif diff_b[j].range_a[0] >= diff_a[i].range_a[0] and diff_b[j].range_a[0] < diff_a[i].range_a[1]:
conflicts.append("A is moving only part of some text that B is deleting.")
elif diff_b[j].range_a[1] >= diff_a[i].range_a[0] and diff_b[j].range_a[1] < diff_a[i].range_a[1]:
conflicts.append("A is moving only part of some text that B is deleting.")
elif diff_b[j].range_a[0] < diff_a[i].range_a[0] and diff_b[j].range_a[1] > diff_a[i].range_a[1]:
conflicts.append("B is deleting text that A is moving.")
if isinstance(diff_a[i], Move) and isinstance(diff_b[j], Insert):
# Insert actions at the same location as Move destinations collide unless the text is the same
if diff_b[j].pos_a == diff_a[i].pos_a:
if diff_b[j].text == diff_a[i].text_b:
indices_to_delete_b.append(j)
else:
conflicts.append("B is inserting text at the same location that A is moving text to.")
if isinstance(diff_a[i], Move) and isinstance(diff_b[j], Move):
# PsuedoMove actions collide if their source ranges overlap unless one is fully contained in the other
if diff_b[j].range_a[0] >= diff_a[i].range_a[0] and diff_b[j].range_a[1] <= diff_a[i].range_a[1]:
pass
elif diff_b[j].range_a[0] >= diff_a[i].range_a[0] and diff_b[j].range_a[0] < diff_a[i].range_a[1]:
conflicts.append("A text move by A overlaps with a text move by B.")
elif diff_b[j].range_a[1] >= diff_a[i].range_a[0] and diff_b[j].range_a[1] < diff_a[i].range_a[1]:
conflicts.append("A text move by A overlaps with a text move by B.")
elif diff_b[j].range_a[0] < diff_a[i].range_a[0] and diff_b[j].range_a[1] > diff_a[i].range_a[1]:
pass
# Move actions collide if their destination positions are the same
if diff_a[i].pos_a == diff_b[j].pos_a:
conflicts.append("A and B are moving text to the same location.")
indices_to_delete_a.sort()
indices_to_delete_a.reverse()
for i in indices_to_delete_a:
diff_a.pop(i)
indices_to_delete_b.sort()
indices_to_delete_b.reverse()
for i in indices_to_delete_b:
diff_b.pop(i)
# throw an error if there are conflicts
if len(conflicts) > 0:
return {'merge': None, 'conflicts': conflicts}
# sort the actions by position in the common ancestor
def sort_key(action):
if isinstance(action, Delete):
return action.range_a[0]
if isinstance(action, Insert):
return action.pos_a
actions = sorted(diff_a + diff_b, key=sort_key)
# compute offset lists
offset_changes_ab = []
for i in range(len(actions)):
if isinstance(actions[i], Delete):
offset_changes_ab.append((actions[i].range_a[0], actions[i].range_a[0] - actions[i].range_a[1]))
if isinstance(actions[i], Insert):
offset_changes_ab.append((actions[i].pos_a, len(actions[i].text)))
offset_changes_a = []
for i in range(len(diff_a)):
if isinstance(diff_a[i], Delete):
offset_changes_a.append((diff_a[i].range_a[0], diff_a[i].range_a[0] - diff_a[i].range_a[1]))
if isinstance(diff_a[i], Insert):
offset_changes_a.append((diff_a[i].pos_a, len(diff_a[i].text)))
if isinstance(diff_a[i], Move):
offset_changes_a.append((diff_a[i].range_a[0], diff_a[i].range_a[0] - diff_a[i].range_a[1]))
offset_changes_a.append((diff_a[i].pos_a, len(diff_a[i].text_a)))
offset_changes_b = []
for i in range(len(diff_b)):
if isinstance(diff_b[i], Delete):
offset_changes_b.append((diff_b[i].range_a[0], diff_b[i].range_a[0] - diff_b[i].range_a[1]))
if isinstance(diff_b[i], Insert):
offset_changes_b.append((diff_b[i].pos_a, len(diff_b[i].text)))
if isinstance(diff_b[i], Move):
offset_changes_b.append((diff_b[i].range_a[0], diff_b[i].range_a[0] - diff_b[i].range_a[1]))
offset_changes_b.append((diff_b[i].pos_a, len(diff_b[i].text_a)))
# compute the preliminary merge
preliminary_merge = ancestor[:]
pos_offset = 0
for i in range(len(actions)):
if isinstance(actions[i], Delete):
preliminary_merge = preliminary_merge[:actions[i].range_a[0] + pos_offset] + preliminary_merge[actions[i].range_a[1] + pos_offset:]
pos_offset += actions[i].range_a[0] - actions[i].range_a[1]
offset_changes_ab.append((actions[i].range_a[0], actions[i].range_a[0] - actions[i].range_a[1]))
if isinstance(actions[i], Insert):
preliminary_merge = preliminary_merge[:actions[i].pos_a + pos_offset] + actions[i].text + preliminary_merge[actions[i].pos_a + pos_offset:]
pos_offset += len(actions[i].text)
offset_changes_ab.append((actions[i].pos_a, len(actions[i].text)))
# perform the "delete" part of the moves
for i in range(len(actions)):
if isinstance(actions[i], Move):
range_a0 = actions[i].range_a[0]
range_a1 = actions[i].range_a[1]
for offset_pair in offset_changes_ab:
if offset_pair[0] <= actions[i].range_a[0]:
range_a0 += offset_pair[1]
if offset_pair[0] <= actions[i].range_a[1]:
range_a1 += offset_pair[1]
offset_changes_ab.append((actions[i].range_a[0], actions[i].range_a[0] - actions[i].range_a[1]))
preliminary_merge = preliminary_merge[:range_a0] + preliminary_merge[range_a1:]
# perform the "add" part of the moves
for i in range(len(actions)):
if isinstance(actions[i], Move):
pos_a = actions[i].pos_a
for offset_pair in offset_changes_ab:
if offset_pair[0] <= actions[i].pos_a:
pos_a += offset_pair[1]
text_ancestor = actions[i].text_a
if actions[i].first:
text_a = actions[i].text_b
range_a0 = actions[i].range_a[0]
range_a1 = actions[i].range_a[1]
for offset_pair in offset_changes_b:
if offset_pair[0] <= actions[i].range_a[0]:
range_a0 += offset_pair[1]
if offset_pair[0] <= actions[i].range_a[1]:
range_a1 += offset_pair[1]
text_b = b[range_a0:range_a1]
else:
text_b = actions[i].text_b
range_a0 = actions[i].range_a[0]
range_a1 = actions[i].range_a[1]
for offset_pair in offset_changes_a:
if offset_pair[0] <= actions[i].range_a[0]:
range_a0 += offset_pair[1]
if offset_pair[0] <= actions[i].range_a[1]:
range_a1 += offset_pair[1]
text_a = a[range_a0:range_a1]
text = merge(text_a, text_b, text_ancestor)
offset_changes_ab.append((actions[i].pos_a, len(text)))
preliminary_merge = preliminary_merge[:pos_a] + text + preliminary_merge[pos_a:]
return {'merge': preliminary_merge, 'conflicts': []}
|
[
"tschmorleiz@googlemail.com"
] |
tschmorleiz@googlemail.com
|
a8b5ba72724f1f79ee0c9186191a238c4b463315
|
27a4208c86693ea1da9abd485e69f5db89c9dcf9
|
/tests/irteusgl9-29_slime2-22_color.py
|
8c9f0c5764c6af10ffccd86fd7e914fbf0538957
|
[
"BSD-3-Clause"
] |
permissive
|
Affonso-Gui/euslime
|
b70eec1766680cfbbb6fd94e22d52eb3494c4002
|
cc457db9b839073fe95d01e93325afbe1f8fa996
|
refs/heads/devel
| 2023-01-23T21:59:58.582923
| 2022-12-22T10:34:07
| 2022-12-22T10:34:07
| 481,096,158
| 0
| 0
|
BSD-3-Clause
| 2022-05-12T07:52:30
| 2022-04-13T06:19:20
|
Python
|
UTF-8
|
Python
| false
| false
| 84
|
py
|
from irteusgl import irteusgl
class irteusgl_color(irteusgl):
USE_COLOR = True
|
[
"guilherme.c.affonso@gmail.com"
] |
guilherme.c.affonso@gmail.com
|
ff1fa756c47a62759082ec1444f5f81912834726
|
7d81a7bba996c4257e47f039ab2524be0fe4bea4
|
/theawesomeprice/first/MyAwesomeSite/PageScrape/migrations/0016_auto_20190612_2155.py
|
b009a44ddaf20d15af55d6720da5f461bd5ef174
|
[] |
no_license
|
KushSondhi/theawesomeprice
|
38a43f9586a69636446de0b5d055db7a657073d9
|
cddc36bf8a64db14dd801865b1c4a61f38c780c6
|
refs/heads/master
| 2022-11-05T14:08:27.967107
| 2020-07-05T00:30:35
| 2020-07-05T00:30:35
| 269,604,086
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 621
|
py
|
# Generated by Django 2.2b1 on 2019-06-12 16:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('PageScrape', '0015_auto_20190612_2153'),
]
operations = [
migrations.AlterField(
model_name='shopclues',
name='prod_specs_left',
field=models.CharField(default=None, max_length=8000, null=True),
),
migrations.AlterField(
model_name='shopclues',
name='prod_specs_right',
field=models.CharField(default=None, max_length=8000, null=True),
),
]
|
[
"root@Kush.theawesomeprice.tk"
] |
root@Kush.theawesomeprice.tk
|
e7264e259de6aa5df6e149d3921ca82ae4e1a383
|
8937741694a4be26baf0f9166c4d71ce56a0b53e
|
/npb/pitcher_stats.py
|
187a7cbb8eaa588a9f90e52c600a08182ea2f63e
|
[
"MIT"
] |
permissive
|
amacbee/xp2015_baseball_tools
|
ad83b51f4929f654db4ee45312d295b8e0856d80
|
9f1e07901145176b92cdc6136c49b7f9d3625f2e
|
refs/heads/master
| 2021-01-21T08:54:33.668145
| 2015-09-15T15:54:07
| 2015-09-15T15:54:07
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,657
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'Shinichi Nakagawa'
from npb.data_source import DataSource
from baseball.stats import Stats
class PitcherStats(DataSource):
def _calc_ip(self, ip, delimiter='\xa0'):
"""
イニング計算
:param ip: (str)inning pitched
:param delimiter: default=no break space
:return: (float)ip
"""
ips = ip.split(delimiter)
if len(ips) == 1:
return float(ips[0])
else:
# 1/3 = 0.333, 2/3 = 0.666
if ips[1] == '1/3':
return float(ips[0]) + 0.333
elif ips[1] == '2/3':
return float(ips[0]) + 0.666
else:
raise Exception('イレギュラーなイニング数:{ip}'.format(ip=ip))
def get_baseballdata_row(self, row, config_path):
"""
行データ出力
:param row: スクレイピングした結果の行データ
:param config_path: config上の定義名
:return: dict
"""
config_pitcher = self.config[config_path]
_stats = row
# 名前と順位を分割
rank, name = row['name'].split(':')
_stats['name'] = name
# イニングを計算し直す(分数表記から小数表記に)
key_ip, type_ip = DataSource.get_column_and_data_type(
config_pitcher[PitcherStats.KEY_FORMAT.format(index=35)]
)
_stats[key_ip] = self._calc_ip(row['ip'],delimiter=' ')
# 選手名(チーム名)
key_name, type_name = DataSource.get_column_and_data_type(
config_pitcher[PitcherStats.KEY_FORMAT.format(index=36)]
)
_stats[key_name] = DataSource.get_player_name_and_team(name, row['team'])
# 被アダム・ダン率
key_dunn, type_dunn = DataSource.get_column_and_data_type(
config_pitcher[PitcherStats.KEY_FORMAT.format(index=37)]
)
_stats[key_dunn] = Stats.adam_dunn_pitcher(row['hr'], row['bb'], row['hbp'], row['so'], row['bf'])
# 順位
key_rank, type_rank = DataSource.get_column_and_data_type(
config_pitcher[PitcherStats.KEY_FORMAT.format(index=38)]
)
_stats[key_rank] = int(rank)
return _stats
def get_row(self, row, config_path):
"""
行データ出力
:param row: スクレイピングした結果の行データ
:param config_path: config上の定義名
:return: dict
"""
config_pitcher = self.config[config_path]
_stats = row
# イニングを計算し直す(分数表記から小数表記に)
key_ip, type_ip = DataSource.get_column_and_data_type(
config_pitcher[PitcherStats.KEY_FORMAT.format(index=29)]
)
_stats[key_ip] = self._calc_ip(row['ip'])
# 選手名(チーム名)
key_name, type_name = DataSource.get_column_and_data_type(
config_pitcher[PitcherStats.KEY_FORMAT.format(index=24)]
)
_stats[key_name] = DataSource.get_player_name_and_team(row['name'], row['team'])
# BB/9
key_bb9, type_bb9 = DataSource.get_column_and_data_type(
config_pitcher[PitcherStats.KEY_FORMAT.format(index=25)]
)
_stats[key_bb9] = Stats.bb9(row['bb'], _stats['calc_ip'])
# SO/9
key_so9, type_so9 = DataSource.get_column_and_data_type(
config_pitcher[PitcherStats.KEY_FORMAT.format(index=26)]
)
_stats[key_so9] = Stats.so9(row['so'], _stats['calc_ip'])
# HR/9
key_hr9, type_hr9 = DataSource.get_column_and_data_type(
config_pitcher[PitcherStats.KEY_FORMAT.format(index=27)]
)
_stats[key_hr9] = Stats.so9(row['hr'], _stats['calc_ip'])
return _stats
def get(self, ):
"""
投手成績を取得して吐き出す
return : dict
"""
return self.get_yahoo_japan_baseball('stats_pitcher_url', 'NpbPlSt mb10', 'pitcher', 23)
def get_baseballdata(self, ):
"""
投手成績を取得して吐き出す(baseballdata)
return : dict
:return:
"""
return super(PitcherStats, self).get_baseballdata('stats_pitcher_baseballdata_url', 'responsive', 'pitcher_baseballdata', 35)
if __name__ == '__main__':
st = PitcherStats(config_file='../config.ini')
stats = st.get()
stats2 = st.get_baseballdata()
st.excel(stats, filename=r'npb_pitcher_stats.xlsx', output_dir='../output')
st.excel(stats2, filename=r'npb_pitcher_stats_baseballdata.xlsx', output_dir='../output')
|
[
"spirits.is.my.rader@gmail.com"
] |
spirits.is.my.rader@gmail.com
|
bd374ed841b18e22b1108b9e8b2c12dac786d446
|
971e0efcc68b8f7cfb1040c38008426f7bcf9d2e
|
/tests/artificial/transf_Anscombe/trend_MovingMedian/cycle_30/ar_12/test_artificial_128_Anscombe_MovingMedian_30_12_100.py
|
ccc8235516ad5f149b1dacaabb8d05d4860cb57f
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
antoinecarme/pyaf
|
a105d172c2e7544f8d580d75f28b751351dd83b6
|
b12db77cb3fa9292e774b2b33db8ce732647c35e
|
refs/heads/master
| 2023-09-01T09:30:59.967219
| 2023-07-28T20:15:53
| 2023-07-28T20:15:53
| 70,790,978
| 457
| 77
|
BSD-3-Clause
| 2023-03-08T21:45:40
| 2016-10-13T09:30:30
|
Python
|
UTF-8
|
Python
| false
| false
| 269
|
py
|
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "MovingMedian", cycle_length = 30, transform = "Anscombe", sigma = 0.0, exog_count = 100, ar_order = 12);
|
[
"antoine.carme@laposte.net"
] |
antoine.carme@laposte.net
|
c1d9e970d2aaa6133afb7698b6b7cd8027825d57
|
bdc14f0cd470219fd62bd94d2025cccf553fd905
|
/news/serializers.py
|
a3f7eeaca3c237ade0efe48adc55640633e6c5f6
|
[] |
no_license
|
mhsniranmanesh/sporthub-core
|
b12fb4f4dc180b7fb6b3808c488952623c9364ff
|
57ef2caead9e7346a6b02e0d7df0f9b0bb8e5d2e
|
refs/heads/master
| 2020-04-18T07:41:12.531413
| 2019-01-27T20:37:11
| 2019-01-27T20:37:11
| 167,367,077
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 385
|
py
|
from rest_framework import serializers
from news.models import News, NewsTag
class TagSerializer(serializers.ModelSerializer):
class Meta:
model = NewsTag
fields = ['name']
class NewsGetRecentSerializer(serializers.ModelSerializer):
tag = TagSerializer()
class Meta:
model = News
fields = ('uuid', 'title', 'body', 'tag','date_created')
|
[
"mhsn.iranmanesh@gmail.com"
] |
mhsn.iranmanesh@gmail.com
|
c629e0e16b3e985e068c51df8646c388991624fe
|
193247e2d09b2ceb003a07630b93b671a88aaddd
|
/reference-implementations/air-c2-cop/AirC2Weather/Tools/MultidimensionSupplementalTools/Scripts/mds/tools/get_variable_statistics_over_dimension.py
|
e7a2e4598f3ba4900ee4b06accb210a6b2c51ecd
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
Esri/defense-solutions-proofs-of-concept
|
5044c062e0bca762b95bb7dbe6aa7b27170923f1
|
b0d796e0c36f8ee3a17dfe57b19f8de930b08414
|
refs/heads/master
| 2023-08-28T10:18:33.364318
| 2022-10-26T15:27:46
| 2022-10-26T15:27:46
| 102,737,392
| 15
| 21
|
Apache-2.0
| 2022-09-30T18:11:37
| 2017-09-07T13:00:26
|
Java
|
UTF-8
|
Python
| false
| false
| 11,190
|
py
|
# -*- coding: utf-8 -*-
import arcpy
import mds
import mds.messages
import numpy
import netCDF4
import os.path
#
# LIMITATIONS:
# > Attributes:
# Attribute values are copied wholesale from the original variable. Hence,
# if these values describe the the values in the new variable, i.e. as with
# valid_range, actual_range, unpacked_range, they will be incorrect and should
# be manually altered. This affects all statistics types, but is only
# problematic with the RANGE, STD, SUM, and VARIANCE.
#
class GetVariableStatisticsOverDimension(object):
def __init__(self):
"""Define the tool (tool name is the name of the class)."""
self.label = "Get Variable Statistics Over Dimension"
self.description = "Calculates statistics for a variable in a " + \
"multidimensional dataset, such as netCDF or HDF, over a specified" + \
"dimension. "
self.canRunInBackground = False
# Statistics choices
statistics_numpy = {'MAXIMUM':'max', \
'MEAN':'mean', \
'MINIMUM':'min', \
'RANGE':'ptp', \
'STD':'std', \
'SUM':'sum', \
'VARIANCE':'var'}
# List of dictionaries of statistics
# Sublist elements indices:
# 0: object
# 1: dictionary defined by 'displayname':'methodname'
# where object.methodname() is valid and displayname is what is
# shown to the user
self.statistics = [[numpy.ma, statistics_numpy]]
self.default_statistic = "MEAN"
def getParameterInfo(self):
"""Define parameter definitions"""
parameters = []
# Input parameter
parameters.append(arcpy.Parameter(
displayName="Input File or URL String",
name="in_file",
datatype=["DEFile","GPString"],
parameterType="Required",
direction="Input"))
# Variable parameter
parameters.append(arcpy.Parameter(
displayName="Variable",
name="variable",
datatype="GPString",
parameterType="Required",
direction="Input"))
parameters[-1].parameterDependencies = [parameters[-2].name]
# Dimension parameter
parameters.append(arcpy.Parameter(
displayName="Dimension",
name="dimension",
datatype="GPString",
parameterType="Required",
direction="Input"))
parameters[-1].parameterDependencies = [parameters[-2].name]
# Output parameter
parameters.append(arcpy.Parameter(
displayName="Output netCDF File",
name="out_netcdf_file",
datatype="DEFile",
multiValue=False,
parameterType="Required",
direction="Output"))
# Output variable parameter
parameters.append(arcpy.Parameter(
displayName="Output Variable Name",
name="out_variable",
datatype="GPString",
multiValue=False,
parameterType="Optional",
direction="Output"))
# Type parameter
parameters.append(arcpy.Parameter(
displayName="Statistic Type",
name="statistic_type",
datatype="GPString",
parameterType="Optional",
direction="Input"))
parameters[-1].filter.type = "ValueList"
parameters[-1].filter.list = sorted([key for stat in \
self.statistics for key in stat[1].keys()])
parameters[-1].value = self.default_statistic
return parameters
def isLicensed(self):
"""Set whether tool is licensed to execute."""
return True
def updateParameters(self, parameters):
"""Modify the values and properties of parameters before internal
validation is performed. This method is called whenever a parameter
has been changed."""
return
def updateMessages(self, parameters):
"""Modify the messages created by internal validation for each tool
parameter. This method is called after internal validation."""
input_parameter = parameters[0]
variable_parameter = parameters[1]
dimension_parameter = parameters[2]
output_parameter = parameters[3]
output_var_parameter = parameters[4]
type_parameter = parameters[5]
dataset = None
# Open dataset and populate variable names
if input_parameter.value is not None:
try:
dataset = mds.netcdf.Dataset(input_parameter.valueAsText, '')
except RuntimeError, exception:
if "No such file or directory" in str(exception) or \
"Invalid argument" in str(exception):
input_parameter.setErrorMessage(
mds.messages.INPUT_DATASET_DOES_NOT_RESOLVE_TO_FILENAME.format(
input_parameter.valueAsText))
elif "Malformed or inaccessible DAP DDS" in str(exception):
input_parameter.setErrorMessage(
mds.messages.INPUT_DATASET_URL_MALFORMED.format(
input_parameter.valueAsText))
else:
input_parameter.setErrorMessage(
mds.messages.INPUT_DATASET_GENERIC_ERROR.format(
input_parameter.valueAsText, str(exception)))
except Exception, exception:
input_parameter.setErrorMessage(
mds.messages.INPUT_DATASET_GENERIC_ERROR.format(
input_parameter.valueAsText, str(exception)))
if dataset is not None:
# Fill variable list
variable_parameter.filter.type = "ValueList"
variable_parameter.filter.list = list(dataset.variable_names())
else:
# Clear variable list if no input specified
variable_parameter.filter.type = "ValueList"
variable_parameter.filter.list = []
variable_parameter.value = ""
# Clear dimension list if no input specified
dimension_parameter.filter.type = "ValueList"
dimension_parameter.filter.list = []
dimension_parameter.value = ""
# Update dimension list
if (variable_parameter.value is not None) and (dataset is not None):
# Fill dimensions list
dimension_parameter.filter.type = "ValueList"
dimension_parameter.filter.list = list(
dataset.variable_dimension_names(variable_parameter.valueAsText))
else:
# Clear dimension list if no input specified
dimension_parameter.filter.type = "ValueList"
dimension_parameter.filter.list = []
dimension_parameter.value = ""
# Ensure an output variable name is entered
if (output_var_parameter.altered) and (output_var_parameter.value is None):
output_var_parameter.setErrorMessage(
'%s: Must input a variable name.' % output_var_parameter.name)
# Ensure output variable name is not the same as an existing variable's
if (output_var_parameter.value is not None) and \
(dataset is not None) and (output_var_parameter.value in \
dataset.variable_names()):
output_var_parameter.setErrorMessage(
'%s: Name cannot be the same as that of an existing variable.' \
% output_var_parameter.name)
# Populate a default output variable name and update it with changes
# to other parameters as long as the user hasn't modified it themself
if (variable_parameter.value is not None) and \
(dimension_parameter.value is not None) and \
(not output_var_parameter.altered):
if type_parameter.value is None:
output_var_parameter.value = variable_parameter.value + \
"_MEAN" + dimension_parameter.value
else:
output_var_parameter.value = variable_parameter.value + \
"_" + type_parameter.value + dimension_parameter.value
# Ensure output file has a .nc extension
if output_parameter.value is not None:
output_filename = output_parameter.valueAsText
if os.path.splitext(output_filename)[1] != ".nc":
output_parameter.setErrorMessage(
mds.messages.OUTPUT_FILE_EXTENSION_MUST_BE_NC)
return
# ---------------------------------------------------------
# Statistics
def calculate_statistic(self, variable, dimension, statistic):
# Apply statistic
for stat in self.statistics:
if statistic in stat[1]:
func = getattr(stat[0], stat[1][statistic])
break
else:
# Default
func = getattr(numpy.ma, 'mean')
return func(variable, axis=dimension)
# ---------------------------------------------------------
def execute(self, parameters, messages):
"""The source code of the tool."""
input_parameter = parameters[0]
variable_parameter = parameters[1]
dimension_parameter = parameters[2]
output_parameter = parameters[3]
output_var_parameter = parameters[4]
type_parameter = parameters[5]
dataset_name = input_parameter.valueAsText
# Open dataset
try:
dataset = mds.netcdf.Dataset(dataset_name,'')
except RuntimeError, exception:
# Handle errors not detected by updateMessages.
messages.addErrorMessage(str(exception))
raise arcpy.ExecuteError
# Variable of interest
var1 = dataset.variable(variable_parameter.valueAsText)
# Dimension of interest
dim1 = var1.dimensions.index(dimension_parameter.valueAsText)
# Perform statistic
result1 = self.calculate_statistic(var1[:], dim1, \
type_parameter.valueAsText)
# Collect output dataset information
output_dims = list(dataset.variable_dimension_names(
variable_parameter.valueAsText))
output_dims.remove(dimension_parameter.valueAsText)
output_dims = tuple(output_dims)
output_filename = output_parameter.valueAsText
output_name = output_var_parameter.valueAsText
# Create new dataset
dataset.xcopy(dataset.data_variable_names(), output_filename)
# Create new variable in dataset
with netCDF4.Dataset(output_filename, mode="a") as newdataset:
newvar = newdataset.createVariable(output_name, var1.dtype, \
output_dims)
for attribute_name in var1.ncattrs():
newvar.setncattr(attribute_name, var1.getncattr(attribute_name))
newvar[:] = result1
# Output new variable name
arcpy.SetParameter(5, output_name)
return
|
[
"jbayles@esri.com"
] |
jbayles@esri.com
|
3fa07e5008b46020f7867d26769152465c99df3f
|
07ffe8db66fbd50f87315df34074e20b3ce67f0e
|
/about/models.py
|
80a8e89e5bba77662e330b6c74d3a6e0a8d8a48a
|
[] |
no_license
|
jakiiii/jtro-ecommerce
|
9acc6d37797e409a79921358958e50d66f20a0b4
|
e6e5ae04c7756e99f862634ad21f1d3877b501ab
|
refs/heads/master
| 2023-01-22T09:44:47.891286
| 2020-12-01T23:32:19
| 2020-12-01T23:32:19
| 316,202,084
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 488
|
py
|
from django.db import models
from ckeditor_uploader.fields import RichTextUploadingField
from jtro_ecommerce.utils import upload_image_path
class About(models.Model):
title = models.CharField(max_length=150)
image = models.ImageField(upload_to=upload_image_path, null=True, blank=True)
description = RichTextUploadingField()
timestamp = models.DateField(auto_now_add=True)
update = models.DateField(auto_now=True)
def __str__(self):
return "ABOUT US"
|
[
"me.jaki@outlook.com"
] |
me.jaki@outlook.com
|
767c0f1bf81724fc490d700d2e61919694707e07
|
823dd69093200d01995c4067ed1ec87194246d40
|
/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py
|
8d356f570ac7130a9f2e26e6ec371238fe0143bf
|
[
"Apache-2.0"
] |
permissive
|
plamut/python-bigquery-reservation
|
910c0a5bf70f82968f3db91f3ef1d18270a84548
|
27b256440b2565369c900cd4728e38676f82fcfe
|
refs/heads/master
| 2023-07-15T05:17:15.137418
| 2021-08-13T15:28:12
| 2021-08-13T15:28:12
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 238,549
|
py
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import packaging.version
import grpc
from grpc.experimental import aio
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.bigquery_reservation_v1.services.reservation_service import (
ReservationServiceAsyncClient,
)
from google.cloud.bigquery_reservation_v1.services.reservation_service import (
ReservationServiceClient,
)
from google.cloud.bigquery_reservation_v1.services.reservation_service import pagers
from google.cloud.bigquery_reservation_v1.services.reservation_service import transports
from google.cloud.bigquery_reservation_v1.services.reservation_service.transports.base import (
_GOOGLE_AUTH_VERSION,
)
from google.cloud.bigquery_reservation_v1.types import reservation
from google.cloud.bigquery_reservation_v1.types import reservation as gcbr_reservation
from google.oauth2 import service_account
from google.protobuf import any_pb2 # type: ignore
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
from google.rpc import status_pb2 # type: ignore
import google.auth
# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively
# through google-api-core:
# - Delete the auth "less than" test cases
# - Delete these pytest markers (Make the "greater than or equal to" tests the default).
requires_google_auth_lt_1_25_0 = pytest.mark.skipif(
packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"),
reason="This test requires google-auth < 1.25.0",
)
requires_google_auth_gte_1_25_0 = pytest.mark.skipif(
packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"),
reason="This test requires google-auth >= 1.25.0",
)
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert ReservationServiceClient._get_default_mtls_endpoint(None) is None
assert (
ReservationServiceClient._get_default_mtls_endpoint(api_endpoint)
== api_mtls_endpoint
)
assert (
ReservationServiceClient._get_default_mtls_endpoint(api_mtls_endpoint)
== api_mtls_endpoint
)
assert (
ReservationServiceClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
ReservationServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
assert (
ReservationServiceClient._get_default_mtls_endpoint(non_googleapi)
== non_googleapi
)
@pytest.mark.parametrize(
"client_class", [ReservationServiceClient, ReservationServiceAsyncClient,]
)
def test_reservation_service_client_from_service_account_info(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "bigqueryreservation.googleapis.com:443"
@pytest.mark.parametrize(
"transport_class,transport_name",
[
(transports.ReservationServiceGrpcTransport, "grpc"),
(transports.ReservationServiceGrpcAsyncIOTransport, "grpc_asyncio"),
],
)
def test_reservation_service_client_service_account_always_use_jwt(
transport_class, transport_name
):
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=True)
use_jwt.assert_called_once_with(True)
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=False)
use_jwt.assert_not_called()
@pytest.mark.parametrize(
"client_class", [ReservationServiceClient, ReservationServiceAsyncClient,]
)
def test_reservation_service_client_from_service_account_file(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "bigqueryreservation.googleapis.com:443"
def test_reservation_service_client_get_transport_class():
transport = ReservationServiceClient.get_transport_class()
available_transports = [
transports.ReservationServiceGrpcTransport,
]
assert transport in available_transports
transport = ReservationServiceClient.get_transport_class("grpc")
assert transport == transports.ReservationServiceGrpcTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(ReservationServiceClient, transports.ReservationServiceGrpcTransport, "grpc"),
(
ReservationServiceAsyncClient,
transports.ReservationServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
@mock.patch.object(
ReservationServiceClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(ReservationServiceClient),
)
@mock.patch.object(
ReservationServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(ReservationServiceAsyncClient),
)
def test_reservation_service_client_client_options(
client_class, transport_class, transport_name
):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(ReservationServiceClient, "get_transport_class") as gtc:
transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(ReservationServiceClient, "get_transport_class") as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class()
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
with pytest.raises(ValueError):
client = client_class()
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
(
ReservationServiceClient,
transports.ReservationServiceGrpcTransport,
"grpc",
"true",
),
(
ReservationServiceAsyncClient,
transports.ReservationServiceGrpcAsyncIOTransport,
"grpc_asyncio",
"true",
),
(
ReservationServiceClient,
transports.ReservationServiceGrpcTransport,
"grpc",
"false",
),
(
ReservationServiceAsyncClient,
transports.ReservationServiceGrpcAsyncIOTransport,
"grpc_asyncio",
"false",
),
],
)
@mock.patch.object(
ReservationServiceClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(ReservationServiceClient),
)
@mock.patch.object(
ReservationServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(ReservationServiceAsyncClient),
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_reservation_service_client_mtls_env_auto(
client_class, transport_class, transport_name, use_client_cert_env
):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(ReservationServiceClient, transports.ReservationServiceGrpcTransport, "grpc"),
(
ReservationServiceAsyncClient,
transports.ReservationServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_reservation_service_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(scopes=["1", "2"],)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(ReservationServiceClient, transports.ReservationServiceGrpcTransport, "grpc"),
(
ReservationServiceAsyncClient,
transports.ReservationServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_reservation_service_client_client_options_credentials_file(
client_class, transport_class, transport_name
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
def test_reservation_service_client_client_options_from_dict():
with mock.patch(
"google.cloud.bigquery_reservation_v1.services.reservation_service.transports.ReservationServiceGrpcTransport.__init__"
) as grpc_transport:
grpc_transport.return_value = None
client = ReservationServiceClient(
client_options={"api_endpoint": "squid.clam.whelk"}
)
grpc_transport.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
def test_create_reservation(
transport: str = "grpc", request_type=gcbr_reservation.CreateReservationRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcbr_reservation.Reservation(
name="name_value", slot_capacity=1391, ignore_idle_slots=True,
)
response = client.create_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == gcbr_reservation.CreateReservationRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gcbr_reservation.Reservation)
assert response.name == "name_value"
assert response.slot_capacity == 1391
assert response.ignore_idle_slots is True
def test_create_reservation_from_dict():
test_create_reservation(request_type=dict)
def test_create_reservation_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_reservation), "__call__"
) as call:
client.create_reservation()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == gcbr_reservation.CreateReservationRequest()
@pytest.mark.asyncio
async def test_create_reservation_async(
transport: str = "grpc_asyncio",
request_type=gcbr_reservation.CreateReservationRequest,
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gcbr_reservation.Reservation(
name="name_value", slot_capacity=1391, ignore_idle_slots=True,
)
)
response = await client.create_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == gcbr_reservation.CreateReservationRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gcbr_reservation.Reservation)
assert response.name == "name_value"
assert response.slot_capacity == 1391
assert response.ignore_idle_slots is True
@pytest.mark.asyncio
async def test_create_reservation_async_from_dict():
await test_create_reservation_async(request_type=dict)
def test_create_reservation_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = gcbr_reservation.CreateReservationRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_reservation), "__call__"
) as call:
call.return_value = gcbr_reservation.Reservation()
client.create_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_reservation_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = gcbr_reservation.CreateReservationRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_reservation), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gcbr_reservation.Reservation()
)
await client.create_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_create_reservation_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcbr_reservation.Reservation()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_reservation(
parent="parent_value",
reservation=gcbr_reservation.Reservation(name="name_value"),
reservation_id="reservation_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].reservation == gcbr_reservation.Reservation(name="name_value")
assert args[0].reservation_id == "reservation_id_value"
def test_create_reservation_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_reservation(
gcbr_reservation.CreateReservationRequest(),
parent="parent_value",
reservation=gcbr_reservation.Reservation(name="name_value"),
reservation_id="reservation_id_value",
)
@pytest.mark.asyncio
async def test_create_reservation_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcbr_reservation.Reservation()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gcbr_reservation.Reservation()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_reservation(
parent="parent_value",
reservation=gcbr_reservation.Reservation(name="name_value"),
reservation_id="reservation_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].reservation == gcbr_reservation.Reservation(name="name_value")
assert args[0].reservation_id == "reservation_id_value"
@pytest.mark.asyncio
async def test_create_reservation_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_reservation(
gcbr_reservation.CreateReservationRequest(),
parent="parent_value",
reservation=gcbr_reservation.Reservation(name="name_value"),
reservation_id="reservation_id_value",
)
def test_list_reservations(
transport: str = "grpc", request_type=reservation.ListReservationsRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_reservations), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.ListReservationsResponse(
next_page_token="next_page_token_value",
)
response = client.list_reservations(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.ListReservationsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListReservationsPager)
assert response.next_page_token == "next_page_token_value"
def test_list_reservations_from_dict():
test_list_reservations(request_type=dict)
def test_list_reservations_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_reservations), "__call__"
) as call:
client.list_reservations()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.ListReservationsRequest()
@pytest.mark.asyncio
async def test_list_reservations_async(
transport: str = "grpc_asyncio", request_type=reservation.ListReservationsRequest
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_reservations), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.ListReservationsResponse(
next_page_token="next_page_token_value",
)
)
response = await client.list_reservations(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.ListReservationsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListReservationsAsyncPager)
assert response.next_page_token == "next_page_token_value"
@pytest.mark.asyncio
async def test_list_reservations_async_from_dict():
await test_list_reservations_async(request_type=dict)
def test_list_reservations_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.ListReservationsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_reservations), "__call__"
) as call:
call.return_value = reservation.ListReservationsResponse()
client.list_reservations(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_reservations_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.ListReservationsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_reservations), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.ListReservationsResponse()
)
await client.list_reservations(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_list_reservations_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_reservations), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.ListReservationsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_reservations(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
def test_list_reservations_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_reservations(
reservation.ListReservationsRequest(), parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_reservations_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_reservations), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.ListReservationsResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.ListReservationsResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_reservations(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
@pytest.mark.asyncio
async def test_list_reservations_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_reservations(
reservation.ListReservationsRequest(), parent="parent_value",
)
def test_list_reservations_pager():
client = ReservationServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_reservations), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.ListReservationsResponse(
reservations=[
reservation.Reservation(),
reservation.Reservation(),
reservation.Reservation(),
],
next_page_token="abc",
),
reservation.ListReservationsResponse(
reservations=[], next_page_token="def",
),
reservation.ListReservationsResponse(
reservations=[reservation.Reservation(),], next_page_token="ghi",
),
reservation.ListReservationsResponse(
reservations=[reservation.Reservation(), reservation.Reservation(),],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.list_reservations(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, reservation.Reservation) for i in results)
def test_list_reservations_pages():
client = ReservationServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_reservations), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.ListReservationsResponse(
reservations=[
reservation.Reservation(),
reservation.Reservation(),
reservation.Reservation(),
],
next_page_token="abc",
),
reservation.ListReservationsResponse(
reservations=[], next_page_token="def",
),
reservation.ListReservationsResponse(
reservations=[reservation.Reservation(),], next_page_token="ghi",
),
reservation.ListReservationsResponse(
reservations=[reservation.Reservation(), reservation.Reservation(),],
),
RuntimeError,
)
pages = list(client.list_reservations(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_reservations_async_pager():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_reservations),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.ListReservationsResponse(
reservations=[
reservation.Reservation(),
reservation.Reservation(),
reservation.Reservation(),
],
next_page_token="abc",
),
reservation.ListReservationsResponse(
reservations=[], next_page_token="def",
),
reservation.ListReservationsResponse(
reservations=[reservation.Reservation(),], next_page_token="ghi",
),
reservation.ListReservationsResponse(
reservations=[reservation.Reservation(), reservation.Reservation(),],
),
RuntimeError,
)
async_pager = await client.list_reservations(request={},)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, reservation.Reservation) for i in responses)
@pytest.mark.asyncio
async def test_list_reservations_async_pages():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_reservations),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.ListReservationsResponse(
reservations=[
reservation.Reservation(),
reservation.Reservation(),
reservation.Reservation(),
],
next_page_token="abc",
),
reservation.ListReservationsResponse(
reservations=[], next_page_token="def",
),
reservation.ListReservationsResponse(
reservations=[reservation.Reservation(),], next_page_token="ghi",
),
reservation.ListReservationsResponse(
reservations=[reservation.Reservation(), reservation.Reservation(),],
),
RuntimeError,
)
pages = []
async for page_ in (await client.list_reservations(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
def test_get_reservation(
transport: str = "grpc", request_type=reservation.GetReservationRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_reservation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.Reservation(
name="name_value", slot_capacity=1391, ignore_idle_slots=True,
)
response = client.get_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.GetReservationRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.Reservation)
assert response.name == "name_value"
assert response.slot_capacity == 1391
assert response.ignore_idle_slots is True
def test_get_reservation_from_dict():
test_get_reservation(request_type=dict)
def test_get_reservation_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_reservation), "__call__") as call:
client.get_reservation()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.GetReservationRequest()
@pytest.mark.asyncio
async def test_get_reservation_async(
transport: str = "grpc_asyncio", request_type=reservation.GetReservationRequest
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_reservation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.Reservation(
name="name_value", slot_capacity=1391, ignore_idle_slots=True,
)
)
response = await client.get_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.GetReservationRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.Reservation)
assert response.name == "name_value"
assert response.slot_capacity == 1391
assert response.ignore_idle_slots is True
@pytest.mark.asyncio
async def test_get_reservation_async_from_dict():
await test_get_reservation_async(request_type=dict)
def test_get_reservation_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.GetReservationRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_reservation), "__call__") as call:
call.return_value = reservation.Reservation()
client.get_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_reservation_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.GetReservationRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_reservation), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.Reservation()
)
await client.get_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_get_reservation_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_reservation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.Reservation()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_reservation(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_get_reservation_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_reservation(
reservation.GetReservationRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_get_reservation_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_reservation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.Reservation()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.Reservation()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_reservation(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_get_reservation_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_reservation(
reservation.GetReservationRequest(), name="name_value",
)
def test_delete_reservation(
transport: str = "grpc", request_type=reservation.DeleteReservationRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
response = client.delete_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.DeleteReservationRequest()
# Establish that the response is the type that we expect.
assert response is None
def test_delete_reservation_from_dict():
test_delete_reservation(request_type=dict)
def test_delete_reservation_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_reservation), "__call__"
) as call:
client.delete_reservation()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.DeleteReservationRequest()
@pytest.mark.asyncio
async def test_delete_reservation_async(
transport: str = "grpc_asyncio", request_type=reservation.DeleteReservationRequest
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
response = await client.delete_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.DeleteReservationRequest()
# Establish that the response is the type that we expect.
assert response is None
@pytest.mark.asyncio
async def test_delete_reservation_async_from_dict():
await test_delete_reservation_async(request_type=dict)
def test_delete_reservation_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.DeleteReservationRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_reservation), "__call__"
) as call:
call.return_value = None
client.delete_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_reservation_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.DeleteReservationRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_reservation), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_delete_reservation_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_reservation(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_delete_reservation_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_reservation(
reservation.DeleteReservationRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_delete_reservation_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_reservation(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_delete_reservation_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_reservation(
reservation.DeleteReservationRequest(), name="name_value",
)
def test_update_reservation(
transport: str = "grpc", request_type=gcbr_reservation.UpdateReservationRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcbr_reservation.Reservation(
name="name_value", slot_capacity=1391, ignore_idle_slots=True,
)
response = client.update_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == gcbr_reservation.UpdateReservationRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gcbr_reservation.Reservation)
assert response.name == "name_value"
assert response.slot_capacity == 1391
assert response.ignore_idle_slots is True
def test_update_reservation_from_dict():
test_update_reservation(request_type=dict)
def test_update_reservation_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_reservation), "__call__"
) as call:
client.update_reservation()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == gcbr_reservation.UpdateReservationRequest()
@pytest.mark.asyncio
async def test_update_reservation_async(
transport: str = "grpc_asyncio",
request_type=gcbr_reservation.UpdateReservationRequest,
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gcbr_reservation.Reservation(
name="name_value", slot_capacity=1391, ignore_idle_slots=True,
)
)
response = await client.update_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == gcbr_reservation.UpdateReservationRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gcbr_reservation.Reservation)
assert response.name == "name_value"
assert response.slot_capacity == 1391
assert response.ignore_idle_slots is True
@pytest.mark.asyncio
async def test_update_reservation_async_from_dict():
await test_update_reservation_async(request_type=dict)
def test_update_reservation_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = gcbr_reservation.UpdateReservationRequest()
request.reservation.name = "reservation.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_reservation), "__call__"
) as call:
call.return_value = gcbr_reservation.Reservation()
client.update_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "reservation.name=reservation.name/value",) in kw[
"metadata"
]
@pytest.mark.asyncio
async def test_update_reservation_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = gcbr_reservation.UpdateReservationRequest()
request.reservation.name = "reservation.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_reservation), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gcbr_reservation.Reservation()
)
await client.update_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "reservation.name=reservation.name/value",) in kw[
"metadata"
]
def test_update_reservation_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcbr_reservation.Reservation()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.update_reservation(
reservation=gcbr_reservation.Reservation(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].reservation == gcbr_reservation.Reservation(name="name_value")
assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"])
def test_update_reservation_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_reservation(
gcbr_reservation.UpdateReservationRequest(),
reservation=gcbr_reservation.Reservation(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@pytest.mark.asyncio
async def test_update_reservation_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcbr_reservation.Reservation()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gcbr_reservation.Reservation()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.update_reservation(
reservation=gcbr_reservation.Reservation(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].reservation == gcbr_reservation.Reservation(name="name_value")
assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"])
@pytest.mark.asyncio
async def test_update_reservation_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.update_reservation(
gcbr_reservation.UpdateReservationRequest(),
reservation=gcbr_reservation.Reservation(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
def test_create_capacity_commitment(
transport: str = "grpc", request_type=reservation.CreateCapacityCommitmentRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.CapacityCommitment(
name="name_value",
slot_count=1098,
plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
state=reservation.CapacityCommitment.State.PENDING,
renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
)
response = client.create_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.CreateCapacityCommitmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.CapacityCommitment)
assert response.name == "name_value"
assert response.slot_count == 1098
assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
assert response.state == reservation.CapacityCommitment.State.PENDING
assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
def test_create_capacity_commitment_from_dict():
test_create_capacity_commitment(request_type=dict)
def test_create_capacity_commitment_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_capacity_commitment), "__call__"
) as call:
client.create_capacity_commitment()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.CreateCapacityCommitmentRequest()
@pytest.mark.asyncio
async def test_create_capacity_commitment_async(
transport: str = "grpc_asyncio",
request_type=reservation.CreateCapacityCommitmentRequest,
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.CapacityCommitment(
name="name_value",
slot_count=1098,
plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
state=reservation.CapacityCommitment.State.PENDING,
renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
)
)
response = await client.create_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.CreateCapacityCommitmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.CapacityCommitment)
assert response.name == "name_value"
assert response.slot_count == 1098
assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
assert response.state == reservation.CapacityCommitment.State.PENDING
assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
@pytest.mark.asyncio
async def test_create_capacity_commitment_async_from_dict():
await test_create_capacity_commitment_async(request_type=dict)
def test_create_capacity_commitment_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.CreateCapacityCommitmentRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_capacity_commitment), "__call__"
) as call:
call.return_value = reservation.CapacityCommitment()
client.create_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_capacity_commitment_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.CreateCapacityCommitmentRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_capacity_commitment), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.CapacityCommitment()
)
await client.create_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_create_capacity_commitment_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.CapacityCommitment()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_capacity_commitment(
parent="parent_value",
capacity_commitment=reservation.CapacityCommitment(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].capacity_commitment == reservation.CapacityCommitment(
name="name_value"
)
def test_create_capacity_commitment_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_capacity_commitment(
reservation.CreateCapacityCommitmentRequest(),
parent="parent_value",
capacity_commitment=reservation.CapacityCommitment(name="name_value"),
)
@pytest.mark.asyncio
async def test_create_capacity_commitment_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.CapacityCommitment()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.CapacityCommitment()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_capacity_commitment(
parent="parent_value",
capacity_commitment=reservation.CapacityCommitment(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].capacity_commitment == reservation.CapacityCommitment(
name="name_value"
)
@pytest.mark.asyncio
async def test_create_capacity_commitment_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_capacity_commitment(
reservation.CreateCapacityCommitmentRequest(),
parent="parent_value",
capacity_commitment=reservation.CapacityCommitment(name="name_value"),
)
def test_list_capacity_commitments(
transport: str = "grpc", request_type=reservation.ListCapacityCommitmentsRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_capacity_commitments), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.ListCapacityCommitmentsResponse(
next_page_token="next_page_token_value",
)
response = client.list_capacity_commitments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.ListCapacityCommitmentsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListCapacityCommitmentsPager)
assert response.next_page_token == "next_page_token_value"
def test_list_capacity_commitments_from_dict():
test_list_capacity_commitments(request_type=dict)
def test_list_capacity_commitments_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_capacity_commitments), "__call__"
) as call:
client.list_capacity_commitments()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.ListCapacityCommitmentsRequest()
@pytest.mark.asyncio
async def test_list_capacity_commitments_async(
transport: str = "grpc_asyncio",
request_type=reservation.ListCapacityCommitmentsRequest,
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_capacity_commitments), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.ListCapacityCommitmentsResponse(
next_page_token="next_page_token_value",
)
)
response = await client.list_capacity_commitments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.ListCapacityCommitmentsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListCapacityCommitmentsAsyncPager)
assert response.next_page_token == "next_page_token_value"
@pytest.mark.asyncio
async def test_list_capacity_commitments_async_from_dict():
await test_list_capacity_commitments_async(request_type=dict)
def test_list_capacity_commitments_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.ListCapacityCommitmentsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_capacity_commitments), "__call__"
) as call:
call.return_value = reservation.ListCapacityCommitmentsResponse()
client.list_capacity_commitments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_capacity_commitments_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.ListCapacityCommitmentsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_capacity_commitments), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.ListCapacityCommitmentsResponse()
)
await client.list_capacity_commitments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_list_capacity_commitments_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_capacity_commitments), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.ListCapacityCommitmentsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_capacity_commitments(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
def test_list_capacity_commitments_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_capacity_commitments(
reservation.ListCapacityCommitmentsRequest(), parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_capacity_commitments_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_capacity_commitments), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.ListCapacityCommitmentsResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.ListCapacityCommitmentsResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_capacity_commitments(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
@pytest.mark.asyncio
async def test_list_capacity_commitments_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_capacity_commitments(
reservation.ListCapacityCommitmentsRequest(), parent="parent_value",
)
def test_list_capacity_commitments_pager():
client = ReservationServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_capacity_commitments), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[
reservation.CapacityCommitment(),
reservation.CapacityCommitment(),
reservation.CapacityCommitment(),
],
next_page_token="abc",
),
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[], next_page_token="def",
),
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[reservation.CapacityCommitment(),],
next_page_token="ghi",
),
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[
reservation.CapacityCommitment(),
reservation.CapacityCommitment(),
],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.list_capacity_commitments(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, reservation.CapacityCommitment) for i in results)
def test_list_capacity_commitments_pages():
client = ReservationServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_capacity_commitments), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[
reservation.CapacityCommitment(),
reservation.CapacityCommitment(),
reservation.CapacityCommitment(),
],
next_page_token="abc",
),
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[], next_page_token="def",
),
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[reservation.CapacityCommitment(),],
next_page_token="ghi",
),
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[
reservation.CapacityCommitment(),
reservation.CapacityCommitment(),
],
),
RuntimeError,
)
pages = list(client.list_capacity_commitments(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_capacity_commitments_async_pager():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_capacity_commitments),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[
reservation.CapacityCommitment(),
reservation.CapacityCommitment(),
reservation.CapacityCommitment(),
],
next_page_token="abc",
),
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[], next_page_token="def",
),
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[reservation.CapacityCommitment(),],
next_page_token="ghi",
),
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[
reservation.CapacityCommitment(),
reservation.CapacityCommitment(),
],
),
RuntimeError,
)
async_pager = await client.list_capacity_commitments(request={},)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, reservation.CapacityCommitment) for i in responses)
@pytest.mark.asyncio
async def test_list_capacity_commitments_async_pages():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_capacity_commitments),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[
reservation.CapacityCommitment(),
reservation.CapacityCommitment(),
reservation.CapacityCommitment(),
],
next_page_token="abc",
),
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[], next_page_token="def",
),
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[reservation.CapacityCommitment(),],
next_page_token="ghi",
),
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[
reservation.CapacityCommitment(),
reservation.CapacityCommitment(),
],
),
RuntimeError,
)
pages = []
async for page_ in (await client.list_capacity_commitments(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
def test_get_capacity_commitment(
transport: str = "grpc", request_type=reservation.GetCapacityCommitmentRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.CapacityCommitment(
name="name_value",
slot_count=1098,
plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
state=reservation.CapacityCommitment.State.PENDING,
renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
)
response = client.get_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.GetCapacityCommitmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.CapacityCommitment)
assert response.name == "name_value"
assert response.slot_count == 1098
assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
assert response.state == reservation.CapacityCommitment.State.PENDING
assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
def test_get_capacity_commitment_from_dict():
test_get_capacity_commitment(request_type=dict)
def test_get_capacity_commitment_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_capacity_commitment), "__call__"
) as call:
client.get_capacity_commitment()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.GetCapacityCommitmentRequest()
@pytest.mark.asyncio
async def test_get_capacity_commitment_async(
transport: str = "grpc_asyncio",
request_type=reservation.GetCapacityCommitmentRequest,
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.CapacityCommitment(
name="name_value",
slot_count=1098,
plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
state=reservation.CapacityCommitment.State.PENDING,
renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
)
)
response = await client.get_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.GetCapacityCommitmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.CapacityCommitment)
assert response.name == "name_value"
assert response.slot_count == 1098
assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
assert response.state == reservation.CapacityCommitment.State.PENDING
assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
@pytest.mark.asyncio
async def test_get_capacity_commitment_async_from_dict():
await test_get_capacity_commitment_async(request_type=dict)
def test_get_capacity_commitment_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.GetCapacityCommitmentRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_capacity_commitment), "__call__"
) as call:
call.return_value = reservation.CapacityCommitment()
client.get_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_capacity_commitment_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.GetCapacityCommitmentRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_capacity_commitment), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.CapacityCommitment()
)
await client.get_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_get_capacity_commitment_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.CapacityCommitment()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_capacity_commitment(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_get_capacity_commitment_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_capacity_commitment(
reservation.GetCapacityCommitmentRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_get_capacity_commitment_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.CapacityCommitment()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.CapacityCommitment()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_capacity_commitment(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_get_capacity_commitment_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_capacity_commitment(
reservation.GetCapacityCommitmentRequest(), name="name_value",
)
def test_delete_capacity_commitment(
transport: str = "grpc", request_type=reservation.DeleteCapacityCommitmentRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
response = client.delete_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.DeleteCapacityCommitmentRequest()
# Establish that the response is the type that we expect.
assert response is None
def test_delete_capacity_commitment_from_dict():
test_delete_capacity_commitment(request_type=dict)
def test_delete_capacity_commitment_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_capacity_commitment), "__call__"
) as call:
client.delete_capacity_commitment()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.DeleteCapacityCommitmentRequest()
@pytest.mark.asyncio
async def test_delete_capacity_commitment_async(
transport: str = "grpc_asyncio",
request_type=reservation.DeleteCapacityCommitmentRequest,
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
response = await client.delete_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.DeleteCapacityCommitmentRequest()
# Establish that the response is the type that we expect.
assert response is None
@pytest.mark.asyncio
async def test_delete_capacity_commitment_async_from_dict():
await test_delete_capacity_commitment_async(request_type=dict)
def test_delete_capacity_commitment_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.DeleteCapacityCommitmentRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_capacity_commitment), "__call__"
) as call:
call.return_value = None
client.delete_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_capacity_commitment_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.DeleteCapacityCommitmentRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_capacity_commitment), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_delete_capacity_commitment_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_capacity_commitment(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_delete_capacity_commitment_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_capacity_commitment(
reservation.DeleteCapacityCommitmentRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_delete_capacity_commitment_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_capacity_commitment(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_delete_capacity_commitment_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_capacity_commitment(
reservation.DeleteCapacityCommitmentRequest(), name="name_value",
)
def test_update_capacity_commitment(
transport: str = "grpc", request_type=reservation.UpdateCapacityCommitmentRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.CapacityCommitment(
name="name_value",
slot_count=1098,
plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
state=reservation.CapacityCommitment.State.PENDING,
renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
)
response = client.update_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.UpdateCapacityCommitmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.CapacityCommitment)
assert response.name == "name_value"
assert response.slot_count == 1098
assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
assert response.state == reservation.CapacityCommitment.State.PENDING
assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
def test_update_capacity_commitment_from_dict():
test_update_capacity_commitment(request_type=dict)
def test_update_capacity_commitment_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_capacity_commitment), "__call__"
) as call:
client.update_capacity_commitment()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.UpdateCapacityCommitmentRequest()
@pytest.mark.asyncio
async def test_update_capacity_commitment_async(
transport: str = "grpc_asyncio",
request_type=reservation.UpdateCapacityCommitmentRequest,
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.CapacityCommitment(
name="name_value",
slot_count=1098,
plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
state=reservation.CapacityCommitment.State.PENDING,
renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
)
)
response = await client.update_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.UpdateCapacityCommitmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.CapacityCommitment)
assert response.name == "name_value"
assert response.slot_count == 1098
assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
assert response.state == reservation.CapacityCommitment.State.PENDING
assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
@pytest.mark.asyncio
async def test_update_capacity_commitment_async_from_dict():
await test_update_capacity_commitment_async(request_type=dict)
def test_update_capacity_commitment_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.UpdateCapacityCommitmentRequest()
request.capacity_commitment.name = "capacity_commitment.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_capacity_commitment), "__call__"
) as call:
call.return_value = reservation.CapacityCommitment()
client.update_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"capacity_commitment.name=capacity_commitment.name/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_update_capacity_commitment_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.UpdateCapacityCommitmentRequest()
request.capacity_commitment.name = "capacity_commitment.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_capacity_commitment), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.CapacityCommitment()
)
await client.update_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"capacity_commitment.name=capacity_commitment.name/value",
) in kw["metadata"]
def test_update_capacity_commitment_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.CapacityCommitment()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.update_capacity_commitment(
capacity_commitment=reservation.CapacityCommitment(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].capacity_commitment == reservation.CapacityCommitment(
name="name_value"
)
assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"])
def test_update_capacity_commitment_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_capacity_commitment(
reservation.UpdateCapacityCommitmentRequest(),
capacity_commitment=reservation.CapacityCommitment(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@pytest.mark.asyncio
async def test_update_capacity_commitment_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.CapacityCommitment()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.CapacityCommitment()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.update_capacity_commitment(
capacity_commitment=reservation.CapacityCommitment(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].capacity_commitment == reservation.CapacityCommitment(
name="name_value"
)
assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"])
@pytest.mark.asyncio
async def test_update_capacity_commitment_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.update_capacity_commitment(
reservation.UpdateCapacityCommitmentRequest(),
capacity_commitment=reservation.CapacityCommitment(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
def test_split_capacity_commitment(
transport: str = "grpc", request_type=reservation.SplitCapacityCommitmentRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.split_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.SplitCapacityCommitmentResponse()
response = client.split_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.SplitCapacityCommitmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.SplitCapacityCommitmentResponse)
def test_split_capacity_commitment_from_dict():
test_split_capacity_commitment(request_type=dict)
def test_split_capacity_commitment_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.split_capacity_commitment), "__call__"
) as call:
client.split_capacity_commitment()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.SplitCapacityCommitmentRequest()
@pytest.mark.asyncio
async def test_split_capacity_commitment_async(
transport: str = "grpc_asyncio",
request_type=reservation.SplitCapacityCommitmentRequest,
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.split_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.SplitCapacityCommitmentResponse()
)
response = await client.split_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.SplitCapacityCommitmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.SplitCapacityCommitmentResponse)
@pytest.mark.asyncio
async def test_split_capacity_commitment_async_from_dict():
await test_split_capacity_commitment_async(request_type=dict)
def test_split_capacity_commitment_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.SplitCapacityCommitmentRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.split_capacity_commitment), "__call__"
) as call:
call.return_value = reservation.SplitCapacityCommitmentResponse()
client.split_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_split_capacity_commitment_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.SplitCapacityCommitmentRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.split_capacity_commitment), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.SplitCapacityCommitmentResponse()
)
await client.split_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_split_capacity_commitment_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.split_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.SplitCapacityCommitmentResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.split_capacity_commitment(
name="name_value", slot_count=1098,
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
assert args[0].slot_count == 1098
def test_split_capacity_commitment_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.split_capacity_commitment(
reservation.SplitCapacityCommitmentRequest(),
name="name_value",
slot_count=1098,
)
@pytest.mark.asyncio
async def test_split_capacity_commitment_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.split_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.SplitCapacityCommitmentResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.SplitCapacityCommitmentResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.split_capacity_commitment(
name="name_value", slot_count=1098,
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
assert args[0].slot_count == 1098
@pytest.mark.asyncio
async def test_split_capacity_commitment_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.split_capacity_commitment(
reservation.SplitCapacityCommitmentRequest(),
name="name_value",
slot_count=1098,
)
def test_merge_capacity_commitments(
transport: str = "grpc", request_type=reservation.MergeCapacityCommitmentsRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.merge_capacity_commitments), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.CapacityCommitment(
name="name_value",
slot_count=1098,
plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
state=reservation.CapacityCommitment.State.PENDING,
renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
)
response = client.merge_capacity_commitments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.MergeCapacityCommitmentsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.CapacityCommitment)
assert response.name == "name_value"
assert response.slot_count == 1098
assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
assert response.state == reservation.CapacityCommitment.State.PENDING
assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
def test_merge_capacity_commitments_from_dict():
test_merge_capacity_commitments(request_type=dict)
def test_merge_capacity_commitments_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.merge_capacity_commitments), "__call__"
) as call:
client.merge_capacity_commitments()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.MergeCapacityCommitmentsRequest()
@pytest.mark.asyncio
async def test_merge_capacity_commitments_async(
transport: str = "grpc_asyncio",
request_type=reservation.MergeCapacityCommitmentsRequest,
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.merge_capacity_commitments), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.CapacityCommitment(
name="name_value",
slot_count=1098,
plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
state=reservation.CapacityCommitment.State.PENDING,
renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
)
)
response = await client.merge_capacity_commitments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.MergeCapacityCommitmentsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.CapacityCommitment)
assert response.name == "name_value"
assert response.slot_count == 1098
assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
assert response.state == reservation.CapacityCommitment.State.PENDING
assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
@pytest.mark.asyncio
async def test_merge_capacity_commitments_async_from_dict():
await test_merge_capacity_commitments_async(request_type=dict)
def test_merge_capacity_commitments_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.MergeCapacityCommitmentsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.merge_capacity_commitments), "__call__"
) as call:
call.return_value = reservation.CapacityCommitment()
client.merge_capacity_commitments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_merge_capacity_commitments_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.MergeCapacityCommitmentsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.merge_capacity_commitments), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.CapacityCommitment()
)
await client.merge_capacity_commitments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_merge_capacity_commitments_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.merge_capacity_commitments), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.CapacityCommitment()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.merge_capacity_commitments(
parent="parent_value",
capacity_commitment_ids=["capacity_commitment_ids_value"],
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].capacity_commitment_ids == ["capacity_commitment_ids_value"]
def test_merge_capacity_commitments_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.merge_capacity_commitments(
reservation.MergeCapacityCommitmentsRequest(),
parent="parent_value",
capacity_commitment_ids=["capacity_commitment_ids_value"],
)
@pytest.mark.asyncio
async def test_merge_capacity_commitments_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.merge_capacity_commitments), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.CapacityCommitment()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.CapacityCommitment()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.merge_capacity_commitments(
parent="parent_value",
capacity_commitment_ids=["capacity_commitment_ids_value"],
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].capacity_commitment_ids == ["capacity_commitment_ids_value"]
@pytest.mark.asyncio
async def test_merge_capacity_commitments_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.merge_capacity_commitments(
reservation.MergeCapacityCommitmentsRequest(),
parent="parent_value",
capacity_commitment_ids=["capacity_commitment_ids_value"],
)
def test_create_assignment(
transport: str = "grpc", request_type=reservation.CreateAssignmentRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_assignment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.Assignment(
name="name_value",
assignee="assignee_value",
job_type=reservation.Assignment.JobType.PIPELINE,
state=reservation.Assignment.State.PENDING,
)
response = client.create_assignment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.CreateAssignmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.Assignment)
assert response.name == "name_value"
assert response.assignee == "assignee_value"
assert response.job_type == reservation.Assignment.JobType.PIPELINE
assert response.state == reservation.Assignment.State.PENDING
def test_create_assignment_from_dict():
test_create_assignment(request_type=dict)
def test_create_assignment_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_assignment), "__call__"
) as call:
client.create_assignment()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.CreateAssignmentRequest()
@pytest.mark.asyncio
async def test_create_assignment_async(
transport: str = "grpc_asyncio", request_type=reservation.CreateAssignmentRequest
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_assignment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.Assignment(
name="name_value",
assignee="assignee_value",
job_type=reservation.Assignment.JobType.PIPELINE,
state=reservation.Assignment.State.PENDING,
)
)
response = await client.create_assignment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.CreateAssignmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.Assignment)
assert response.name == "name_value"
assert response.assignee == "assignee_value"
assert response.job_type == reservation.Assignment.JobType.PIPELINE
assert response.state == reservation.Assignment.State.PENDING
@pytest.mark.asyncio
async def test_create_assignment_async_from_dict():
await test_create_assignment_async(request_type=dict)
def test_create_assignment_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.CreateAssignmentRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_assignment), "__call__"
) as call:
call.return_value = reservation.Assignment()
client.create_assignment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_assignment_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.CreateAssignmentRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_assignment), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.Assignment()
)
await client.create_assignment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_create_assignment_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_assignment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.Assignment()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_assignment(
parent="parent_value", assignment=reservation.Assignment(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].assignment == reservation.Assignment(name="name_value")
def test_create_assignment_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_assignment(
reservation.CreateAssignmentRequest(),
parent="parent_value",
assignment=reservation.Assignment(name="name_value"),
)
@pytest.mark.asyncio
async def test_create_assignment_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_assignment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.Assignment()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.Assignment()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_assignment(
parent="parent_value", assignment=reservation.Assignment(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].assignment == reservation.Assignment(name="name_value")
@pytest.mark.asyncio
async def test_create_assignment_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_assignment(
reservation.CreateAssignmentRequest(),
parent="parent_value",
assignment=reservation.Assignment(name="name_value"),
)
def test_list_assignments(
transport: str = "grpc", request_type=reservation.ListAssignmentsRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_assignments), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.ListAssignmentsResponse(
next_page_token="next_page_token_value",
)
response = client.list_assignments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.ListAssignmentsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListAssignmentsPager)
assert response.next_page_token == "next_page_token_value"
def test_list_assignments_from_dict():
test_list_assignments(request_type=dict)
def test_list_assignments_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_assignments), "__call__") as call:
client.list_assignments()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.ListAssignmentsRequest()
@pytest.mark.asyncio
async def test_list_assignments_async(
transport: str = "grpc_asyncio", request_type=reservation.ListAssignmentsRequest
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_assignments), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.ListAssignmentsResponse(
next_page_token="next_page_token_value",
)
)
response = await client.list_assignments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.ListAssignmentsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListAssignmentsAsyncPager)
assert response.next_page_token == "next_page_token_value"
@pytest.mark.asyncio
async def test_list_assignments_async_from_dict():
await test_list_assignments_async(request_type=dict)
def test_list_assignments_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.ListAssignmentsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_assignments), "__call__") as call:
call.return_value = reservation.ListAssignmentsResponse()
client.list_assignments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_assignments_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.ListAssignmentsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_assignments), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.ListAssignmentsResponse()
)
await client.list_assignments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_list_assignments_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_assignments), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.ListAssignmentsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_assignments(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
def test_list_assignments_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_assignments(
reservation.ListAssignmentsRequest(), parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_assignments_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_assignments), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.ListAssignmentsResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.ListAssignmentsResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_assignments(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
@pytest.mark.asyncio
async def test_list_assignments_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_assignments(
reservation.ListAssignmentsRequest(), parent="parent_value",
)
def test_list_assignments_pager():
client = ReservationServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_assignments), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.ListAssignmentsResponse(
assignments=[
reservation.Assignment(),
reservation.Assignment(),
reservation.Assignment(),
],
next_page_token="abc",
),
reservation.ListAssignmentsResponse(assignments=[], next_page_token="def",),
reservation.ListAssignmentsResponse(
assignments=[reservation.Assignment(),], next_page_token="ghi",
),
reservation.ListAssignmentsResponse(
assignments=[reservation.Assignment(), reservation.Assignment(),],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.list_assignments(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, reservation.Assignment) for i in results)
def test_list_assignments_pages():
client = ReservationServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_assignments), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.ListAssignmentsResponse(
assignments=[
reservation.Assignment(),
reservation.Assignment(),
reservation.Assignment(),
],
next_page_token="abc",
),
reservation.ListAssignmentsResponse(assignments=[], next_page_token="def",),
reservation.ListAssignmentsResponse(
assignments=[reservation.Assignment(),], next_page_token="ghi",
),
reservation.ListAssignmentsResponse(
assignments=[reservation.Assignment(), reservation.Assignment(),],
),
RuntimeError,
)
pages = list(client.list_assignments(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_assignments_async_pager():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_assignments), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.ListAssignmentsResponse(
assignments=[
reservation.Assignment(),
reservation.Assignment(),
reservation.Assignment(),
],
next_page_token="abc",
),
reservation.ListAssignmentsResponse(assignments=[], next_page_token="def",),
reservation.ListAssignmentsResponse(
assignments=[reservation.Assignment(),], next_page_token="ghi",
),
reservation.ListAssignmentsResponse(
assignments=[reservation.Assignment(), reservation.Assignment(),],
),
RuntimeError,
)
async_pager = await client.list_assignments(request={},)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, reservation.Assignment) for i in responses)
@pytest.mark.asyncio
async def test_list_assignments_async_pages():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_assignments), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.ListAssignmentsResponse(
assignments=[
reservation.Assignment(),
reservation.Assignment(),
reservation.Assignment(),
],
next_page_token="abc",
),
reservation.ListAssignmentsResponse(assignments=[], next_page_token="def",),
reservation.ListAssignmentsResponse(
assignments=[reservation.Assignment(),], next_page_token="ghi",
),
reservation.ListAssignmentsResponse(
assignments=[reservation.Assignment(), reservation.Assignment(),],
),
RuntimeError,
)
pages = []
async for page_ in (await client.list_assignments(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
def test_delete_assignment(
transport: str = "grpc", request_type=reservation.DeleteAssignmentRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_assignment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
response = client.delete_assignment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.DeleteAssignmentRequest()
# Establish that the response is the type that we expect.
assert response is None
def test_delete_assignment_from_dict():
test_delete_assignment(request_type=dict)
def test_delete_assignment_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_assignment), "__call__"
) as call:
client.delete_assignment()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.DeleteAssignmentRequest()
@pytest.mark.asyncio
async def test_delete_assignment_async(
transport: str = "grpc_asyncio", request_type=reservation.DeleteAssignmentRequest
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_assignment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
response = await client.delete_assignment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.DeleteAssignmentRequest()
# Establish that the response is the type that we expect.
assert response is None
@pytest.mark.asyncio
async def test_delete_assignment_async_from_dict():
await test_delete_assignment_async(request_type=dict)
def test_delete_assignment_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.DeleteAssignmentRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_assignment), "__call__"
) as call:
call.return_value = None
client.delete_assignment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_assignment_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.DeleteAssignmentRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_assignment), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_assignment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_delete_assignment_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_assignment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_assignment(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_delete_assignment_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_assignment(
reservation.DeleteAssignmentRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_delete_assignment_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_assignment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_assignment(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_delete_assignment_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_assignment(
reservation.DeleteAssignmentRequest(), name="name_value",
)
def test_search_assignments(
transport: str = "grpc", request_type=reservation.SearchAssignmentsRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_assignments), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.SearchAssignmentsResponse(
next_page_token="next_page_token_value",
)
response = client.search_assignments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.SearchAssignmentsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.SearchAssignmentsPager)
assert response.next_page_token == "next_page_token_value"
def test_search_assignments_from_dict():
test_search_assignments(request_type=dict)
def test_search_assignments_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_assignments), "__call__"
) as call:
client.search_assignments()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.SearchAssignmentsRequest()
@pytest.mark.asyncio
async def test_search_assignments_async(
transport: str = "grpc_asyncio", request_type=reservation.SearchAssignmentsRequest
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_assignments), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.SearchAssignmentsResponse(
next_page_token="next_page_token_value",
)
)
response = await client.search_assignments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.SearchAssignmentsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.SearchAssignmentsAsyncPager)
assert response.next_page_token == "next_page_token_value"
@pytest.mark.asyncio
async def test_search_assignments_async_from_dict():
await test_search_assignments_async(request_type=dict)
def test_search_assignments_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.SearchAssignmentsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_assignments), "__call__"
) as call:
call.return_value = reservation.SearchAssignmentsResponse()
client.search_assignments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_search_assignments_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.SearchAssignmentsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_assignments), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.SearchAssignmentsResponse()
)
await client.search_assignments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_search_assignments_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_assignments), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.SearchAssignmentsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.search_assignments(
parent="parent_value", query="query_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].query == "query_value"
def test_search_assignments_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.search_assignments(
reservation.SearchAssignmentsRequest(),
parent="parent_value",
query="query_value",
)
@pytest.mark.asyncio
async def test_search_assignments_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_assignments), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.SearchAssignmentsResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.SearchAssignmentsResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.search_assignments(
parent="parent_value", query="query_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].query == "query_value"
@pytest.mark.asyncio
async def test_search_assignments_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.search_assignments(
reservation.SearchAssignmentsRequest(),
parent="parent_value",
query="query_value",
)
def test_search_assignments_pager():
client = ReservationServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_assignments), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.SearchAssignmentsResponse(
assignments=[
reservation.Assignment(),
reservation.Assignment(),
reservation.Assignment(),
],
next_page_token="abc",
),
reservation.SearchAssignmentsResponse(
assignments=[], next_page_token="def",
),
reservation.SearchAssignmentsResponse(
assignments=[reservation.Assignment(),], next_page_token="ghi",
),
reservation.SearchAssignmentsResponse(
assignments=[reservation.Assignment(), reservation.Assignment(),],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.search_assignments(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, reservation.Assignment) for i in results)
def test_search_assignments_pages():
client = ReservationServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_assignments), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.SearchAssignmentsResponse(
assignments=[
reservation.Assignment(),
reservation.Assignment(),
reservation.Assignment(),
],
next_page_token="abc",
),
reservation.SearchAssignmentsResponse(
assignments=[], next_page_token="def",
),
reservation.SearchAssignmentsResponse(
assignments=[reservation.Assignment(),], next_page_token="ghi",
),
reservation.SearchAssignmentsResponse(
assignments=[reservation.Assignment(), reservation.Assignment(),],
),
RuntimeError,
)
pages = list(client.search_assignments(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_search_assignments_async_pager():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_assignments),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.SearchAssignmentsResponse(
assignments=[
reservation.Assignment(),
reservation.Assignment(),
reservation.Assignment(),
],
next_page_token="abc",
),
reservation.SearchAssignmentsResponse(
assignments=[], next_page_token="def",
),
reservation.SearchAssignmentsResponse(
assignments=[reservation.Assignment(),], next_page_token="ghi",
),
reservation.SearchAssignmentsResponse(
assignments=[reservation.Assignment(), reservation.Assignment(),],
),
RuntimeError,
)
async_pager = await client.search_assignments(request={},)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, reservation.Assignment) for i in responses)
@pytest.mark.asyncio
async def test_search_assignments_async_pages():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_assignments),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.SearchAssignmentsResponse(
assignments=[
reservation.Assignment(),
reservation.Assignment(),
reservation.Assignment(),
],
next_page_token="abc",
),
reservation.SearchAssignmentsResponse(
assignments=[], next_page_token="def",
),
reservation.SearchAssignmentsResponse(
assignments=[reservation.Assignment(),], next_page_token="ghi",
),
reservation.SearchAssignmentsResponse(
assignments=[reservation.Assignment(), reservation.Assignment(),],
),
RuntimeError,
)
pages = []
async for page_ in (await client.search_assignments(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
def test_move_assignment(
transport: str = "grpc", request_type=reservation.MoveAssignmentRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.move_assignment), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.Assignment(
name="name_value",
assignee="assignee_value",
job_type=reservation.Assignment.JobType.PIPELINE,
state=reservation.Assignment.State.PENDING,
)
response = client.move_assignment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.MoveAssignmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.Assignment)
assert response.name == "name_value"
assert response.assignee == "assignee_value"
assert response.job_type == reservation.Assignment.JobType.PIPELINE
assert response.state == reservation.Assignment.State.PENDING
def test_move_assignment_from_dict():
test_move_assignment(request_type=dict)
def test_move_assignment_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.move_assignment), "__call__") as call:
client.move_assignment()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.MoveAssignmentRequest()
@pytest.mark.asyncio
async def test_move_assignment_async(
transport: str = "grpc_asyncio", request_type=reservation.MoveAssignmentRequest
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.move_assignment), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.Assignment(
name="name_value",
assignee="assignee_value",
job_type=reservation.Assignment.JobType.PIPELINE,
state=reservation.Assignment.State.PENDING,
)
)
response = await client.move_assignment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.MoveAssignmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.Assignment)
assert response.name == "name_value"
assert response.assignee == "assignee_value"
assert response.job_type == reservation.Assignment.JobType.PIPELINE
assert response.state == reservation.Assignment.State.PENDING
@pytest.mark.asyncio
async def test_move_assignment_async_from_dict():
await test_move_assignment_async(request_type=dict)
def test_move_assignment_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.MoveAssignmentRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.move_assignment), "__call__") as call:
call.return_value = reservation.Assignment()
client.move_assignment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_move_assignment_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.MoveAssignmentRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.move_assignment), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.Assignment()
)
await client.move_assignment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_move_assignment_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.move_assignment), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.Assignment()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.move_assignment(
name="name_value", destination_id="destination_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
assert args[0].destination_id == "destination_id_value"
def test_move_assignment_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.move_assignment(
reservation.MoveAssignmentRequest(),
name="name_value",
destination_id="destination_id_value",
)
@pytest.mark.asyncio
async def test_move_assignment_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.move_assignment), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.Assignment()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.Assignment()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.move_assignment(
name="name_value", destination_id="destination_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
assert args[0].destination_id == "destination_id_value"
@pytest.mark.asyncio
async def test_move_assignment_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.move_assignment(
reservation.MoveAssignmentRequest(),
name="name_value",
destination_id="destination_id_value",
)
def test_get_bi_reservation(
transport: str = "grpc", request_type=reservation.GetBiReservationRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_bi_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.BiReservation(name="name_value", size=443,)
response = client.get_bi_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.GetBiReservationRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.BiReservation)
assert response.name == "name_value"
assert response.size == 443
def test_get_bi_reservation_from_dict():
test_get_bi_reservation(request_type=dict)
def test_get_bi_reservation_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_bi_reservation), "__call__"
) as call:
client.get_bi_reservation()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.GetBiReservationRequest()
@pytest.mark.asyncio
async def test_get_bi_reservation_async(
transport: str = "grpc_asyncio", request_type=reservation.GetBiReservationRequest
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_bi_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.BiReservation(name="name_value", size=443,)
)
response = await client.get_bi_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.GetBiReservationRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.BiReservation)
assert response.name == "name_value"
assert response.size == 443
@pytest.mark.asyncio
async def test_get_bi_reservation_async_from_dict():
await test_get_bi_reservation_async(request_type=dict)
def test_get_bi_reservation_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.GetBiReservationRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_bi_reservation), "__call__"
) as call:
call.return_value = reservation.BiReservation()
client.get_bi_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_bi_reservation_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.GetBiReservationRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_bi_reservation), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.BiReservation()
)
await client.get_bi_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_get_bi_reservation_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_bi_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.BiReservation()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_bi_reservation(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_get_bi_reservation_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_bi_reservation(
reservation.GetBiReservationRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_get_bi_reservation_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_bi_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.BiReservation()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.BiReservation()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_bi_reservation(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_get_bi_reservation_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_bi_reservation(
reservation.GetBiReservationRequest(), name="name_value",
)
def test_update_bi_reservation(
transport: str = "grpc", request_type=reservation.UpdateBiReservationRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_bi_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.BiReservation(name="name_value", size=443,)
response = client.update_bi_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.UpdateBiReservationRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.BiReservation)
assert response.name == "name_value"
assert response.size == 443
def test_update_bi_reservation_from_dict():
test_update_bi_reservation(request_type=dict)
def test_update_bi_reservation_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_bi_reservation), "__call__"
) as call:
client.update_bi_reservation()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.UpdateBiReservationRequest()
@pytest.mark.asyncio
async def test_update_bi_reservation_async(
transport: str = "grpc_asyncio", request_type=reservation.UpdateBiReservationRequest
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_bi_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.BiReservation(name="name_value", size=443,)
)
response = await client.update_bi_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.UpdateBiReservationRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.BiReservation)
assert response.name == "name_value"
assert response.size == 443
@pytest.mark.asyncio
async def test_update_bi_reservation_async_from_dict():
await test_update_bi_reservation_async(request_type=dict)
def test_update_bi_reservation_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.UpdateBiReservationRequest()
request.bi_reservation.name = "bi_reservation.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_bi_reservation), "__call__"
) as call:
call.return_value = reservation.BiReservation()
client.update_bi_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"bi_reservation.name=bi_reservation.name/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_update_bi_reservation_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.UpdateBiReservationRequest()
request.bi_reservation.name = "bi_reservation.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_bi_reservation), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.BiReservation()
)
await client.update_bi_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"bi_reservation.name=bi_reservation.name/value",
) in kw["metadata"]
def test_update_bi_reservation_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_bi_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.BiReservation()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.update_bi_reservation(
bi_reservation=reservation.BiReservation(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].bi_reservation == reservation.BiReservation(name="name_value")
assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"])
def test_update_bi_reservation_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_bi_reservation(
reservation.UpdateBiReservationRequest(),
bi_reservation=reservation.BiReservation(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@pytest.mark.asyncio
async def test_update_bi_reservation_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_bi_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.BiReservation()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.BiReservation()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.update_bi_reservation(
bi_reservation=reservation.BiReservation(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].bi_reservation == reservation.BiReservation(name="name_value")
assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"])
@pytest.mark.asyncio
async def test_update_bi_reservation_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.update_bi_reservation(
reservation.UpdateBiReservationRequest(),
bi_reservation=reservation.BiReservation(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.ReservationServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.ReservationServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ReservationServiceClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide scopes and a transport instance.
transport = transports.ReservationServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ReservationServiceClient(
client_options={"scopes": ["1", "2"]}, transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.ReservationServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = ReservationServiceClient(transport=transport)
assert client.transport is transport
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.ReservationServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.ReservationServiceGrpcAsyncIOTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
@pytest.mark.parametrize(
"transport_class",
[
transports.ReservationServiceGrpcTransport,
transports.ReservationServiceGrpcAsyncIOTransport,
],
)
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
assert isinstance(client.transport, transports.ReservationServiceGrpcTransport,)
def test_reservation_service_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.ReservationServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_reservation_service_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.bigquery_reservation_v1.services.reservation_service.transports.ReservationServiceTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.ReservationServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
"create_reservation",
"list_reservations",
"get_reservation",
"delete_reservation",
"update_reservation",
"create_capacity_commitment",
"list_capacity_commitments",
"get_capacity_commitment",
"delete_capacity_commitment",
"update_capacity_commitment",
"split_capacity_commitment",
"merge_capacity_commitments",
"create_assignment",
"list_assignments",
"delete_assignment",
"search_assignments",
"move_assignment",
"get_bi_reservation",
"update_bi_reservation",
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
@requires_google_auth_gte_1_25_0
def test_reservation_service_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.bigquery_reservation_v1.services.reservation_service.transports.ReservationServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ReservationServiceTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id="octopus",
)
@requires_google_auth_lt_1_25_0
def test_reservation_service_base_transport_with_credentials_file_old_google_auth():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.bigquery_reservation_v1.services.reservation_service.transports.ReservationServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ReservationServiceTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=(
"https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id="octopus",
)
def test_reservation_service_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
"google.cloud.bigquery_reservation_v1.services.reservation_service.transports.ReservationServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ReservationServiceTransport()
adc.assert_called_once()
@requires_google_auth_gte_1_25_0
def test_reservation_service_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
ReservationServiceClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id=None,
)
@requires_google_auth_lt_1_25_0
def test_reservation_service_auth_adc_old_google_auth():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
ReservationServiceClient()
adc.assert_called_once_with(
scopes=(
"https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id=None,
)
@pytest.mark.parametrize(
"transport_class",
[
transports.ReservationServiceGrpcTransport,
transports.ReservationServiceGrpcAsyncIOTransport,
],
)
@requires_google_auth_gte_1_25_0
def test_reservation_service_transport_auth_adc(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
adc.assert_called_once_with(
scopes=["1", "2"],
default_scopes=(
"https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class",
[
transports.ReservationServiceGrpcTransport,
transports.ReservationServiceGrpcAsyncIOTransport,
],
)
@requires_google_auth_lt_1_25_0
def test_reservation_service_transport_auth_adc_old_google_auth(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus")
adc.assert_called_once_with(
scopes=(
"https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class,grpc_helpers",
[
(transports.ReservationServiceGrpcTransport, grpc_helpers),
(transports.ReservationServiceGrpcAsyncIOTransport, grpc_helpers_async),
],
)
def test_reservation_service_transport_create_channel(transport_class, grpc_helpers):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(
google.auth, "default", autospec=True
) as adc, mock.patch.object(
grpc_helpers, "create_channel", autospec=True
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
create_channel.assert_called_with(
"bigqueryreservation.googleapis.com:443",
credentials=creds,
credentials_file=None,
quota_project_id="octopus",
default_scopes=(
"https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/cloud-platform",
),
scopes=["1", "2"],
default_host="bigqueryreservation.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize(
"transport_class",
[
transports.ReservationServiceGrpcTransport,
transports.ReservationServiceGrpcAsyncIOTransport,
],
)
def test_reservation_service_grpc_transport_client_cert_source_for_mtls(
transport_class,
):
cred = ga_credentials.AnonymousCredentials()
# Check ssl_channel_credentials is used if provided.
with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
mock_ssl_channel_creds = mock.Mock()
transport_class(
host="squid.clam.whelk",
credentials=cred,
ssl_channel_credentials=mock_ssl_channel_creds,
)
mock_create_channel.assert_called_once_with(
"squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_channel_creds,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
# is used.
with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
transport_class(
credentials=cred,
client_cert_source_for_mtls=client_cert_source_callback,
)
expected_cert, expected_key = client_cert_source_callback()
mock_ssl_cred.assert_called_once_with(
certificate_chain=expected_cert, private_key=expected_key
)
def test_reservation_service_host_no_port():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="bigqueryreservation.googleapis.com"
),
)
assert client.transport._host == "bigqueryreservation.googleapis.com:443"
def test_reservation_service_host_with_port():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="bigqueryreservation.googleapis.com:8000"
),
)
assert client.transport._host == "bigqueryreservation.googleapis.com:8000"
def test_reservation_service_grpc_transport_channel():
channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.ReservationServiceGrpcTransport(
host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
def test_reservation_service_grpc_asyncio_transport_channel():
channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.ReservationServiceGrpcAsyncIOTransport(
host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
transports.ReservationServiceGrpcTransport,
transports.ReservationServiceGrpcAsyncIOTransport,
],
)
def test_reservation_service_transport_channel_mtls_with_client_cert_source(
transport_class,
):
with mock.patch(
"grpc.ssl_channel_credentials", autospec=True
) as grpc_ssl_channel_cred:
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
cred = ga_credentials.AnonymousCredentials()
with pytest.warns(DeprecationWarning):
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (cred, None)
transport = transport_class(
host="squid.clam.whelk",
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
adc.assert_called_once()
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
assert transport._ssl_channel_credentials == mock_ssl_cred
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
transports.ReservationServiceGrpcTransport,
transports.ReservationServiceGrpcAsyncIOTransport,
],
)
def test_reservation_service_transport_channel_mtls_with_adc(transport_class):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
mock_cred = mock.Mock()
with pytest.warns(DeprecationWarning):
transport = transport_class(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
def test_assignment_path():
project = "squid"
location = "clam"
reservation = "whelk"
assignment = "octopus"
expected = "projects/{project}/locations/{location}/reservations/{reservation}/assignments/{assignment}".format(
project=project,
location=location,
reservation=reservation,
assignment=assignment,
)
actual = ReservationServiceClient.assignment_path(
project, location, reservation, assignment
)
assert expected == actual
def test_parse_assignment_path():
expected = {
"project": "oyster",
"location": "nudibranch",
"reservation": "cuttlefish",
"assignment": "mussel",
}
path = ReservationServiceClient.assignment_path(**expected)
# Check that the path construction is reversible.
actual = ReservationServiceClient.parse_assignment_path(path)
assert expected == actual
def test_bi_reservation_path():
project = "winkle"
location = "nautilus"
expected = "projects/{project}/locations/{location}/bireservation".format(
project=project, location=location,
)
actual = ReservationServiceClient.bi_reservation_path(project, location)
assert expected == actual
def test_parse_bi_reservation_path():
expected = {
"project": "scallop",
"location": "abalone",
}
path = ReservationServiceClient.bi_reservation_path(**expected)
# Check that the path construction is reversible.
actual = ReservationServiceClient.parse_bi_reservation_path(path)
assert expected == actual
def test_capacity_commitment_path():
project = "squid"
location = "clam"
capacity_commitment = "whelk"
expected = "projects/{project}/locations/{location}/capacityCommitments/{capacity_commitment}".format(
project=project, location=location, capacity_commitment=capacity_commitment,
)
actual = ReservationServiceClient.capacity_commitment_path(
project, location, capacity_commitment
)
assert expected == actual
def test_parse_capacity_commitment_path():
expected = {
"project": "octopus",
"location": "oyster",
"capacity_commitment": "nudibranch",
}
path = ReservationServiceClient.capacity_commitment_path(**expected)
# Check that the path construction is reversible.
actual = ReservationServiceClient.parse_capacity_commitment_path(path)
assert expected == actual
def test_reservation_path():
project = "cuttlefish"
location = "mussel"
reservation = "winkle"
expected = "projects/{project}/locations/{location}/reservations/{reservation}".format(
project=project, location=location, reservation=reservation,
)
actual = ReservationServiceClient.reservation_path(project, location, reservation)
assert expected == actual
def test_parse_reservation_path():
expected = {
"project": "nautilus",
"location": "scallop",
"reservation": "abalone",
}
path = ReservationServiceClient.reservation_path(**expected)
# Check that the path construction is reversible.
actual = ReservationServiceClient.parse_reservation_path(path)
assert expected == actual
def test_common_billing_account_path():
billing_account = "squid"
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
actual = ReservationServiceClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "clam",
}
path = ReservationServiceClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = ReservationServiceClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "whelk"
expected = "folders/{folder}".format(folder=folder,)
actual = ReservationServiceClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "octopus",
}
path = ReservationServiceClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = ReservationServiceClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "oyster"
expected = "organizations/{organization}".format(organization=organization,)
actual = ReservationServiceClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "nudibranch",
}
path = ReservationServiceClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = ReservationServiceClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "cuttlefish"
expected = "projects/{project}".format(project=project,)
actual = ReservationServiceClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "mussel",
}
path = ReservationServiceClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = ReservationServiceClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "winkle"
location = "nautilus"
expected = "projects/{project}/locations/{location}".format(
project=project, location=location,
)
actual = ReservationServiceClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "scallop",
"location": "abalone",
}
path = ReservationServiceClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = ReservationServiceClient.parse_common_location_path(path)
assert expected == actual
def test_client_withDEFAULT_CLIENT_INFO():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.ReservationServiceTransport, "_prep_wrapped_messages"
) as prep:
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.ReservationServiceTransport, "_prep_wrapped_messages"
) as prep:
transport_class = ReservationServiceClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
|
[
"noreply@github.com"
] |
noreply@github.com
|
c66fb03d7a1a952c8183b3246618837ac0c1a7f5
|
9e0cec9c4a4d080dc41a3e1b2bdc92b45be56e15
|
/legacy/autoMNIfilesAnalysis.py
|
bf308600d33f26f2cf111634a7db9c2426beb8e0
|
[] |
no_license
|
FabianRei/neuro_detect
|
373e42cbcd0a6b53d06c23edd14ef51cc4d9ee7d
|
2ea3558617d742d6f22f00256daf9220221589d2
|
refs/heads/master
| 2020-04-04T11:41:53.427101
| 2018-12-21T17:51:47
| 2018-12-21T17:51:47
| 155,901,128
| 0
| 1
| null | 2018-12-21T00:55:47
| 2018-11-02T17:38:34
|
Python
|
UTF-8
|
Python
| false
| false
| 4,090
|
py
|
import numpy as np
import pickle
import torch
from src.data.preprocess import getTensorList_general
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from skimage.measure import block_reduce
from scipy.ndimage import zoom
from dipy.reconst.dti import fractional_anisotropy
class simpleNet(nn.Module):
def __init__(self):
super(simpleNet, self).__init__()
self.fc1 = nn.Linear(12*12*12*6, 200)
self.fc2 = nn.Linear(200, 4)
def forward(self, x):
x = F.relu(self.fc1(x))
x = self.fc2(x)
return F.log_softmax(x, dim=1)
def six2mat(voxel6):
return voxel6[[0, 1, 3, 1, 2, 4, 3, 4, 5]].reshape(3,3)
def getFaArr(arr):
matArr = np.apply_along_axis(six2mat, -1, arr)
evalArr, evecArr = np.linalg.eig(matArr)
faArr = fractional_anisotropy(evalArr)
return faArr
def resizeTensors(tensors, wantedShape):
resizedTensors = []
count = 0
oldShapes = []
newShapes = []
print("Resizing..")
for t in tensors:
if t.shape != wantedShape:
count += 1
oldShapes.append(t.shape)
zoomFactor = []
for i, s in enumerate(t.shape):
zoomFactor.append(wantedShape[i]/s)
t = zoom(t, zoomFactor)
newShapes.append(t.shape)
resizedTensors.append(t)
for i in range(count):
print(f"{oldShapes[i]} -> {newShapes[i]}")
print(f"Resized {count} out of {len(tensors)} tensors. (This takes quite long)")
return resizedTensors
def flipAxes(arr, axis):
# axes is len 3 array, [1,0,0], is flip x, [1,0,1] means flip x and z and so on
if axis == 1:
arr[:, :, :, [1, 3]] = -arr[:, :, :, [1, 3]]
if axis == 2:
arr[:, :, :, [1, 4]] = -arr[:, :, :, [1, 4]]
if axis == 3:
arr[:, :, :, [3, 4]] = -arr[:, :, :, [3, 4]]
return arr
def cropBlockResize(tensors, resizeFactor, crop):
result = []
rf = resizeFactor
for t in tensors:
t = t[crop]
t = block_reduce(t, block_size=(rf, rf, rf, 1), func=np.mean)
result.append(t)
return result
def normalizeByMean(tensors):
result = []
for t in tensors:
t = t/np.mean(t)
result.append(t)
print(f"Normalized {len(tensors)} tensors")
return result
tensorDir = '/black/localhome/reith/Desktop/projects/Tensors/test/'
networkWeights = 'trained_simplenet.torch'
wantedShape = (81, 106, 76, 6)
crop = (slice(7, 55), slice(40, 88), slice(14, 62))
resizeFactor = 4
net = simpleNet()
net.load_state_dict(torch.load(networkWeights))
tensors, names = getTensorList_general(tensorDir, giveNames=True)
# tensors = resizeTensors(tensors, wantedShape)
# for t, n in zip(tensors,names):
# if n[-11:] == 'oMNI.nii.gz':
# print(n)
# print(t[35,35,66])
# print(getFaArr(t[35,35,66]))
t = tensors[3] # YflipAutoMNI
# One of the highest Fas in the area (0.48)
# for t, n in zip(tensors,names):
# if n[-11:] == 'oMNI.nii.gz':
# print(n)
# print(t[40,50, 42])
# print(getFaArr(t[40,50, 42]))
# One of the highest Fas in the area (0.44)
rightTensors = []
rightNames = []
for t, n in zip(tensors,names):
if n[-11:] == 'oMNI.nii.gz':
rightTensors.append(t)
rightNames.append(n)
tensors = rightTensors
names = rightNames
xflip = tensors[2]
yflip = tensors[0]
xflipx = np.copy(xflip)
xflipx = flipAxes(xflipx, 1)
xflipy = np.copy(xflip)
xflipy = np.copy(flipAxes(xflipy, 2))
xflipz = np.copy(xflip)
xflipz = np.copy(flipAxes(xflipz, 3))
print('xflip\n', np.linalg.eig(six2mat(xflip[45,55,47])))
print('yflip\n', np.linalg.eig(six2mat(yflip[45,55,47])))
print('xflipx\n', np.linalg.eig(six2mat(xflipx[45,55,47])))
print('xflipy\n', np.linalg.eig(six2mat(xflipy[45,55,47])))
print('xflipz\n', np.linalg.eig(six2mat(xflipz[45,55,47])))
for t, n in zip(tensors,names):
if n[-11:] == 'oMNI.nii.gz':
print(n)
print(t[45,55, 47])
print(np.linalg.eig(six2mat(t[45,55,47]))[1])
print(getFaArr(t[45,55, 47]))
print("done!")
|
[
"f.abi.an@gmx.de"
] |
f.abi.an@gmx.de
|
31fdb70e09d27fcc19feed1b7e8b826a584a5fe3
|
abf33d1b30906c35a700bb24478efd6eb3979692
|
/interviewbit/count_number_of_duplicates_in_a_list.py
|
11ba129d5f086da11bb6f1c030623dcc3d25d934
|
[] |
no_license
|
soniya-mi/python
|
ac57c7e3be6bc05241e8af4e8b074e5311390f7d
|
2a87ceeebb0403468c4fc2522b59573478e10ea4
|
refs/heads/master
| 2023-06-26T12:51:53.882774
| 2023-06-21T04:45:35
| 2023-06-21T04:45:35
| 137,060,314
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 335
|
py
|
list = [10, 20, 30, 20, 20, 30, 40, 50, -20, 60, 60, -20, -20]
occur ={}
new_list=[]
for item in list:
count = 1
if item in occur.keys():
count=occur[item]
occur[item]=count +1
else:
occur[item] = count
for key in occur.keys():
if occur[key] > 1:
new_list.append(key)
print new_list
|
[
"noreply@github.com"
] |
noreply@github.com
|
0a9382e84f0c116e1aed9e3976dd20cc21d50e62
|
47e447b0b8e422656d95e6f8707d8e24a4c6fda3
|
/mainplot.py
|
108f8dfbc08b9b6edc480e0db5e92ec1f43328bb
|
[] |
no_license
|
HareshKarnan/TD3
|
92b2911bbdccda027da670d2854048907ccc2fee
|
0bf5949f59d4865c685ec6113208a49c63ed455a
|
refs/heads/master
| 2023-04-10T20:51:02.648579
| 2021-04-02T14:45:52
| 2021-04-02T14:45:52
| 212,244,108
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,073
|
py
|
import glob
import csv
import numpy as np
import matplotlib.pyplot as plt
import random
# files = glob.glob('logs/lambda_expts/*')
# files = glob.glob('logs/model_free/*')
# files = glob.glob('logs/fwd_inv_model/*')
files = glob.glob('logs/fwd_model/*')
def extract_expt_data(files):
data = {}
for fileNamefull in files:
fileName = fileNamefull.split('/')[-1]
# print(fileName)
totalTimesteps = fileName.split('_')[6]
modelIters = fileName.split('_')[9]
modelGrads = fileName.split('_')[12]
seed = fileName.split('_')[13]
# print(modelIters, modelGrads, seed)
# first branch of dict -> model iterations
if modelIters not in data.keys(): data[modelIters] = {}
# second branch of dict -> model gradient steps
if modelGrads not in data[modelIters].keys(): data[modelIters][modelGrads] = {}
# third branch of dict -> experiment seed
if seed not in data[modelIters][modelGrads].keys(): data[modelIters][modelGrads][seed] = {}
# store the data of all seeds here
with open(fileNamefull+'/log.csv', 'r') as csvfile:
csvreader = csv.reader(csvfile)
episode, episode_rewards = [], []
curr_episode = 0
for row in csvreader:
reward, done, episode_num, episode_reward, episode_timesteps, total_timesteps = row
if done == 'True':
episode.append(int(episode_num))
episode_rewards.append(float(episode_reward))
data[modelIters][modelGrads][seed]['episode_rewards'] = episode_rewards
data[modelIters][modelGrads][seed]['episode'] = episode
return data
fwd_data = extract_expt_data(glob.glob('logs/fwd_model/*'))
fwd_inv_data = extract_expt_data(glob.glob('logs/dual_final/*'))
model_free_data = extract_expt_data(glob.glob('logs/model_free/*'))
# find the mean and std across expts
ax = plt.subplot()
for i, data in enumerate([fwd_data, fwd_inv_data, model_free_data]):
for modelIters in data.keys():
for modelGrads in data[modelIters].keys():
# select the right experiment here
if i == 0 and (modelIters != '1' or modelGrads != '3'): continue
if i == 1 and (modelIters != '1' or modelGrads != '2'): continue
print(i, modelIters, modelGrads, type(modelIters), type(modelGrads))
color = (random.random(), random.random(), random.random())
# print('found seeds :: ', data[modelIters][modelGrads].keys())
dataX, dataY = [], []
for seed in data[modelIters][modelGrads].keys():
episode = data[modelIters][modelGrads][seed]['episode']
episodeRew = data[modelIters][modelGrads][seed]['episode_rewards']
dataY.append(episodeRew)
dataX.append(episode)
minX = min([val[-1] for val in dataX]) + 1
dataX = [datX[:minX] for datX in dataX]
dataY = [datY[:minX] for datY in dataY]
dataY = np.asarray(dataY)
if i==0:
label = 'Forward Model'
color = (0, 0, 1)
exptnums = 10
# continue
elif i==1:
label = 'Forward + Inverse Model'
color = (0, 1, 0)
exptnums = 5
continue
elif i==2:
label = 'Model Free'
color = (1, 0, 0)
exptnums = 10
ax.plot(dataX[0], np.mean(dataY, axis=0), color=color, label=label)
ax.fill_between( dataX[0],
np.mean(dataY, axis=0) - np.std(dataY, axis=0)/np.sqrt(exptnums),
np.mean(dataY, axis=0) + np.std(dataY, axis=0)/np.sqrt(exptnums),
alpha=0.25,
color=color)
plt.legend()
plt.ylim([0, 900])
plt.xlim([0, 250])
plt.xlabel('Episodes')
plt.ylabel('Episode Returns')
plt.savefig('fwd_model.png')
plt.show()
|
[
"haresh.miriyala@gmail.com"
] |
haresh.miriyala@gmail.com
|
f0c8e4a7e7eedd40041bc507e96e9ebd1d7c55c0
|
3e713a67f370d1cc1ba0882159a03b673bd22f9a
|
/DataStructure and Alogorithms/[HACKERRANK]-cats and a mouse .py
|
d1edad7dc3eaf287f6fbb70ca5520a5f5a091571
|
[] |
no_license
|
s-abhishek2399/competitive-progamming--PYTHON
|
739797ffea0b92cc2781559e7d4eed1d274678a6
|
29f9e63cfc05c01fa605c14fb8a3a55920296d43
|
refs/heads/master
| 2023-03-08T02:40:00.962109
| 2021-02-16T15:07:52
| 2021-02-16T15:07:52
| 328,732,345
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 239
|
py
|
n = int(input())
for i in range(n):
l=[int(x) for x in input().split()]
a = l[0]-l[2]
b = l[1]-l[2]
if abs(a)<abs(b):
print("Cat A")
elif abs(b)<abs(a):
print("Cat B")
else:
print("Mouse C")
|
[
"s.abhishek2399@gmail.com"
] |
s.abhishek2399@gmail.com
|
c785fc4d347223457e4644dfcffd8c08364a742c
|
7700f9014b8f34a2ab9795ce51572dc0c311aae4
|
/source/nc_data_tools/data_tools/reformat_raster.py
|
f9b5e02b9b55b38cb47f49377410edccbf10ef9c
|
[] |
no_license
|
geoneric/nc_data_tools
|
59c057476dfaaccc6a30348d7aecfc22d026c3dc
|
553828836a616e307ca5d60d7f845e5e90ee2429
|
refs/heads/master
| 2021-01-11T22:46:47.139938
| 2017-06-02T08:25:35
| 2017-06-02T08:25:35
| 79,031,535
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 622
|
py
|
import rasterio
from .driver import driver_by_pathname
def reformat_raster(
source_raster_pathname,
target_raster_pathname,
override_crs=None):
target_driver = driver_by_pathname(target_raster_pathname)
with rasterio.open(source_raster_pathname) as source_raster:
profile = source_raster.profile
profile["driver"] = target_driver
if override_crs is not None:
profile["crs"] = override_crs
with rasterio.open(target_raster_pathname, "w", **profile) as \
target_raster:
target_raster.write(source_raster.read())
|
[
"kor@jemig.eu"
] |
kor@jemig.eu
|
00e199a0bfedc87514bce6f71c530ade6ab7313b
|
299355248bb243427667e8192e93f378907150b9
|
/envs/robots/minitaur.py
|
81915b6897aa89b6de918f27ad285fe02c415e07
|
[
"MIT"
] |
permissive
|
FrankTianTT/pytorch_sac
|
d35f60579a31e0063ffbdba44e13db16de2e38ae
|
770a632b25dc00419faef3ba672eb4982f950d2a
|
refs/heads/master
| 2022-11-24T12:48:23.181504
| 2020-07-31T15:30:11
| 2020-07-31T15:30:11
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 52,723
|
py
|
# coding=utf-8
# Copyright 2020 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This file implements the functionalities of a minitaur using pybullet."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import copy
import math
import re
import numpy as np
from robots import minitaur_constants
from robots import minitaur_motor
from robots import robot_config
from robots import action_filter
from robots import kinematics
INIT_POSITION = [0, 0, .2]
INIT_RACK_POSITION = [0, 0, 1]
INIT_ORIENTATION = [0, 0, 0, 1]
KNEE_CONSTRAINT_POINT_RIGHT = [0, 0.005, 0.2]
KNEE_CONSTRAINT_POINT_LEFT = [0, 0.01, 0.2]
OVERHEAT_SHUTDOWN_TORQUE = 2.45
OVERHEAT_SHUTDOWN_TIME = 1.0
LEG_POSITION = ["front_left", "back_left", "front_right", "back_right"]
MOTOR_NAMES = [
"motor_front_leftL_joint", "motor_front_leftR_joint",
"motor_back_leftL_joint", "motor_back_leftR_joint",
"motor_front_rightL_joint", "motor_front_rightR_joint",
"motor_back_rightL_joint", "motor_back_rightR_joint"
]
_CHASSIS_NAME_PATTERN = re.compile(r"chassis\D*center")
_MOTOR_NAME_PATTERN = re.compile(r"motor\D*joint")
_KNEE_NAME_PATTERN = re.compile(r"knee\D*")
_BRACKET_NAME_PATTERN = re.compile(r"motor\D*_bracket_joint")
_LEG_NAME_PATTERN1 = re.compile(r"hip\D*joint")
_LEG_NAME_PATTERN2 = re.compile(r"hip\D*link")
_LEG_NAME_PATTERN3 = re.compile(r"motor\D*link")
SENSOR_NOISE_STDDEV = (0.0, 0.0, 0.0, 0.0, 0.0)
MINITAUR_DEFAULT_MOTOR_DIRECTIONS = (-1, -1, -1, -1, 1, 1, 1, 1)
MINITAUR_DEFAULT_MOTOR_OFFSETS = (0, 0, 0, 0, 0, 0, 0, 0)
MINITAUR_NUM_MOTORS = 8
TWO_PI = 2 * math.pi
MINITAUR_DOFS_PER_LEG = 2
def MapToMinusPiToPi(angles):
"""Maps a list of angles to [-pi, pi].
Args:
angles: A list of angles in rad.
Returns:
A list of angle mapped to [-pi, pi].
"""
mapped_angles = copy.deepcopy(angles)
for i in range(len(angles)):
mapped_angles[i] = math.fmod(angles[i], TWO_PI)
if mapped_angles[i] >= math.pi:
mapped_angles[i] -= TWO_PI
elif mapped_angles[i] < -math.pi:
mapped_angles[i] += TWO_PI
return mapped_angles
class Minitaur(object):
"""The minitaur class that simulates a quadruped robot from Ghost Robotics."""
def __init__(self,
pybullet_client,
num_motors=MINITAUR_NUM_MOTORS,
dofs_per_leg=MINITAUR_DOFS_PER_LEG,
time_step=0.01,
action_repeat=1,
self_collision_enabled=False,
motor_control_mode=robot_config.MotorControlMode.POSITION,
motor_model_class=minitaur_motor.MotorModel,
motor_kp=1.0,
motor_kd=0.02,
motor_torque_limits=None,
pd_latency=0.0,
control_latency=0.0,
observation_noise_stdev=SENSOR_NOISE_STDDEV,
motor_overheat_protection=False,
motor_direction=MINITAUR_DEFAULT_MOTOR_DIRECTIONS,
motor_offset=MINITAUR_DEFAULT_MOTOR_OFFSETS,
on_rack=False,
reset_at_current_position=False,
sensors=None,
enable_action_interpolation=False,
enable_action_filter=False):
"""Constructs a minitaur and reset it to the initial states.
Args:
pybullet_client: The instance of BulletClient to manage different
simulations.
num_motors: The number of the motors on the robot.
dofs_per_leg: The number of degrees of freedom for each leg.
time_step: The time step of the simulation.
action_repeat: The number of ApplyAction() for each control step.
self_collision_enabled: Whether to enable self collision.
motor_control_mode: Enum. Can either be POSITION, TORQUE, or HYBRID.
motor_model_class: We can choose from simple pd model to more accureate DC
motor models.
motor_kp: proportional gain for the motors.
motor_kd: derivative gain for the motors.
motor_torque_limits: Torque limits for the motors. Can be a single float
or a list of floats specifying different limits for different robots. If
not provided, the default limit of the robot is used.
pd_latency: The latency of the observations (in seconds) used to calculate
PD control. On the real hardware, it is the latency between the
microcontroller and the motor controller.
control_latency: The latency of the observations (in second) used to
calculate action. On the real hardware, it is the latency from the motor
controller, the microcontroller to the host (Nvidia TX2).
observation_noise_stdev: The standard deviation of a Gaussian noise model
for the sensor. It should be an array for separate sensors in the
following order [motor_angle, motor_velocity, motor_torque,
base_roll_pitch_yaw, base_angular_velocity]
motor_overheat_protection: Whether to shutdown the motor that has exerted
large torque (OVERHEAT_SHUTDOWN_TORQUE) for an extended amount of time
(OVERHEAT_SHUTDOWN_TIME). See ApplyAction() in minitaur.py for more
details.
motor_direction: A list of direction values, either 1 or -1, to compensate
the axis difference of motors between the simulation and the real robot.
motor_offset: A list of offset value for the motor angles. This is used to
compensate the angle difference between the simulation and the real
robot.
on_rack: Whether to place the minitaur on rack. This is only used to debug
the walking gait. In this mode, the minitaur's base is hanged midair so
that its walking gait is clearer to visualize.
reset_at_current_position: Whether to reset the minitaur at the current
position and orientation. This is for simulating the reset behavior in
the real world.
sensors: a list of sensors that are attached to the robot.
enable_action_interpolation: Whether to interpolate the current action
with the previous action in order to produce smoother motions
enable_action_filter: Boolean specifying if a lowpass filter should be
used to smooth actions.
"""
self.num_motors = num_motors
self.num_legs = self.num_motors // dofs_per_leg
self._pybullet_client = pybullet_client
self._action_repeat = action_repeat
self._self_collision_enabled = self_collision_enabled
self._motor_direction = motor_direction
self._motor_offset = motor_offset
self._observed_motor_torques = np.zeros(self.num_motors)
self._applied_motor_torques = np.zeros(self.num_motors)
self._max_force = 3.5
self._pd_latency = pd_latency
self._control_latency = control_latency
self._observation_noise_stdev = observation_noise_stdev
self._observation_history = collections.deque(maxlen=100)
self._control_observation = []
self._chassis_link_ids = [-1]
self._leg_link_ids = []
self._motor_link_ids = []
self._foot_link_ids = []
self._motor_overheat_protection = motor_overheat_protection
self._on_rack = on_rack
self._reset_at_current_position = reset_at_current_position
self.SetAllSensors(sensors if sensors is not None else list())
self._is_safe = True
self._enable_action_interpolation = enable_action_interpolation
self._enable_action_filter = enable_action_filter
self._last_action = None
if not motor_model_class:
raise ValueError("Must provide a motor model class!")
if self._on_rack and self._reset_at_current_position:
raise ValueError("on_rack and reset_at_current_position "
"cannot be enabled together")
if isinstance(motor_kp, (collections.Sequence, np.ndarray)):
self._motor_kps = np.asarray(motor_kp)
else:
self._motor_kps = np.full(num_motors, motor_kp)
if isinstance(motor_kd, (collections.Sequence, np.ndarray)):
self._motor_kds = np.asarray(motor_kd)
else:
self._motor_kds = np.full(num_motors, motor_kd)
if isinstance(motor_torque_limits, (collections.Sequence, np.ndarray)):
self._motor_torque_limits = np.asarray(motor_torque_limits)
elif motor_torque_limits is None:
self._motor_torque_limits = None
else:
self._motor_torque_limits = motor_torque_limits
self._motor_control_mode = motor_control_mode
self._motor_model = motor_model_class(
kp=motor_kp,
kd=motor_kd,
torque_limits=self._motor_torque_limits,
motor_control_mode=motor_control_mode)
self.time_step = time_step
self._step_counter = 0
# This also includes the time spent during the Reset motion.
self._state_action_counter = 0
_, self._init_orientation_inv = self._pybullet_client.invertTransform(
position=[0, 0, 0], orientation=self._GetDefaultInitOrientation())
if self._enable_action_filter:
self._action_filter = self._BuildActionFilter()
# reset_time=-1.0 means skipping the reset motion.
# See Reset for more details.
self.Reset(reset_time=-1.0)
self.ReceiveObservation()
return
def GetTimeSinceReset(self):
return self._step_counter * self.time_step
def _StepInternal(self, action, motor_control_mode=None):
self.ApplyAction(action, motor_control_mode)
self._pybullet_client.stepSimulation()
self.ReceiveObservation()
self._state_action_counter += 1
return
def Step(self, action):
"""Steps simulation."""
if self._enable_action_filter:
action = self._FilterAction(action)
for i in range(self._action_repeat):
proc_action = self.ProcessAction(action, i)
self._StepInternal(proc_action)
self._step_counter += 1
self._last_action = action
return
def Terminate(self):
pass
def GetFootLinkIDs(self):
"""Get list of IDs for all foot links."""
return self._foot_link_ids
def _RecordMassInfoFromURDF(self):
"""Records the mass information from the URDF file."""
self._base_mass_urdf = []
for chassis_id in self._chassis_link_ids:
self._base_mass_urdf.append(
self._pybullet_client.getDynamicsInfo(self.quadruped, chassis_id)[0])
self._leg_masses_urdf = []
for leg_id in self._leg_link_ids:
self._leg_masses_urdf.append(
self._pybullet_client.getDynamicsInfo(self.quadruped, leg_id)[0])
for motor_id in self._motor_link_ids:
self._leg_masses_urdf.append(
self._pybullet_client.getDynamicsInfo(self.quadruped, motor_id)[0])
def _RecordInertiaInfoFromURDF(self):
"""Record the inertia of each body from URDF file."""
self._link_urdf = []
num_bodies = self._pybullet_client.getNumJoints(self.quadruped)
for body_id in range(-1, num_bodies): # -1 is for the base link.
inertia = self._pybullet_client.getDynamicsInfo(self.quadruped,
body_id)[2]
self._link_urdf.append(inertia)
# We need to use id+1 to index self._link_urdf because it has the base
# (index = -1) at the first element.
self._base_inertia_urdf = [
self._link_urdf[chassis_id + 1] for chassis_id in self._chassis_link_ids
]
self._leg_inertia_urdf = [
self._link_urdf[leg_id + 1] for leg_id in self._leg_link_ids
]
self._leg_inertia_urdf.extend(
[self._link_urdf[motor_id + 1] for motor_id in self._motor_link_ids])
def _BuildJointNameToIdDict(self):
num_joints = self._pybullet_client.getNumJoints(self.quadruped)
self._joint_name_to_id = {}
for i in range(num_joints):
joint_info = self._pybullet_client.getJointInfo(self.quadruped, i)
self._joint_name_to_id[joint_info[1].decode("UTF-8")] = joint_info[0]
def _BuildUrdfIds(self):
"""Build the link Ids from its name in the URDF file.
Raises:
ValueError: Unknown category of the joint name.
"""
num_joints = self._pybullet_client.getNumJoints(self.quadruped)
self._chassis_link_ids = [-1]
# The self._leg_link_ids include both the upper and lower links of the leg.
self._leg_link_ids = []
self._motor_link_ids = []
self._foot_link_ids = []
self._bracket_link_ids = []
for i in range(num_joints):
joint_info = self._pybullet_client.getJointInfo(self.quadruped, i)
joint_name = joint_info[1].decode("UTF-8")
joint_id = self._joint_name_to_id[joint_name]
if _CHASSIS_NAME_PATTERN.match(joint_name):
self._chassis_link_ids.append(joint_id)
elif _BRACKET_NAME_PATTERN.match(joint_name):
self._bracket_link_ids.append(joint_id)
elif _MOTOR_NAME_PATTERN.match(joint_name):
self._motor_link_ids.append(joint_id)
elif _KNEE_NAME_PATTERN.match(joint_name):
self._foot_link_ids.append(joint_id)
elif (_LEG_NAME_PATTERN1.match(joint_name) or
_LEG_NAME_PATTERN2.match(joint_name) or
_LEG_NAME_PATTERN3.match(joint_name)):
self._leg_link_ids.append(joint_id)
else:
raise ValueError("Unknown category of joint %s" % joint_name)
self._leg_link_ids.extend(self._foot_link_ids)
self._chassis_link_ids.sort()
self._motor_link_ids.sort()
self._foot_link_ids.sort()
self._leg_link_ids.sort()
self._bracket_link_ids.sort()
def _RemoveDefaultJointDamping(self):
num_joints = self._pybullet_client.getNumJoints(self.quadruped)
for i in range(num_joints):
joint_info = self._pybullet_client.getJointInfo(self.quadruped, i)
self._pybullet_client.changeDynamics(
joint_info[0], -1, linearDamping=0, angularDamping=0)
def _BuildMotorIdList(self):
self._motor_id_list = [
self._joint_name_to_id[motor_name]
for motor_name in self._GetMotorNames()
]
def _CreateRackConstraint(self, init_position, init_orientation):
"""Create a constraint that keeps the chassis at a fixed frame.
This frame is defined by init_position and init_orientation.
Args:
init_position: initial position of the fixed frame.
init_orientation: initial orientation of the fixed frame in quaternion
format [x,y,z,w].
Returns:
Return the constraint id.
"""
fixed_constraint = self._pybullet_client.createConstraint(
parentBodyUniqueId=self.quadruped,
parentLinkIndex=-1,
childBodyUniqueId=-1,
childLinkIndex=-1,
jointType=self._pybullet_client.JOINT_FIXED,
jointAxis=[0, 0, 0],
parentFramePosition=[0, 0, 0],
childFramePosition=init_position,
childFrameOrientation=init_orientation)
return fixed_constraint
def IsObservationValid(self):
"""Whether the observation is valid for the current time step.
In simulation, observations are always valid. In real hardware, it may not
be valid from time to time when communication error happens between the
Nvidia TX2 and the microcontroller.
Returns:
Whether the observation is valid for the current time step.
"""
return True
def Reset(self, reload_urdf=True, default_motor_angles=None, reset_time=3.0):
"""Reset the minitaur to its initial states.
Args:
reload_urdf: Whether to reload the urdf file. If not, Reset() just place
the minitaur back to its starting position.
default_motor_angles: The default motor angles. If it is None, minitaur
will hold a default pose (motor angle math.pi / 2) for 100 steps. In
torque control mode, the phase of holding the default pose is skipped.
reset_time: The duration (in seconds) to hold the default motor angles. If
reset_time <= 0 or in torque control mode, the phase of holding the
default pose is skipped.
"""
if reload_urdf:
self._LoadRobotURDF()
if self._on_rack:
self.rack_constraint = (
self._CreateRackConstraint(self._GetDefaultInitPosition(),
self._GetDefaultInitOrientation()))
self._BuildJointNameToIdDict()
self._BuildUrdfIds()
self._RemoveDefaultJointDamping()
self._BuildMotorIdList()
self._RecordMassInfoFromURDF()
self._RecordInertiaInfoFromURDF()
self.ResetPose(add_constraint=True)
else:
self._pybullet_client.resetBasePositionAndOrientation(
self.quadruped, self._GetDefaultInitPosition(),
self._GetDefaultInitOrientation())
self._pybullet_client.resetBaseVelocity(self.quadruped, [0, 0, 0],
[0, 0, 0])
self.ResetPose(add_constraint=False)
self._overheat_counter = np.zeros(self.num_motors)
self._motor_enabled_list = [True] * self.num_motors
self._observation_history.clear()
self._step_counter = 0
self._state_action_counter = 0
self._is_safe = True
self._last_action = None
self._SettleDownForReset(default_motor_angles, reset_time)
if self._enable_action_filter:
self._ResetActionFilter()
return
def _LoadRobotURDF(self):
"""Loads the URDF file for the robot."""
urdf_file = self.GetURDFFile()
if self._self_collision_enabled:
self.quadruped = self._pybullet_client.loadURDF(
urdf_file,
self._GetDefaultInitPosition(),
self._GetDefaultInitOrientation(),
flags=self._pybullet_client.URDF_USE_SELF_COLLISION)
else:
self.quadruped = self._pybullet_client.loadURDF(
urdf_file, self._GetDefaultInitPosition(),
self._GetDefaultInitOrientation())
def _SettleDownForReset(self, default_motor_angles, reset_time):
"""Sets the default motor angles and waits for the robot to settle down.
The reset is skipped is reset_time is less than zereo.
Args:
default_motor_angles: A list of motor angles that the robot will achieve
at the end of the reset phase.
reset_time: The time duration for the reset phase.
"""
if reset_time <= 0:
return
# Important to fill the observation buffer.
self.ReceiveObservation()
for _ in range(100):
self._StepInternal(
[math.pi / 2] * self.num_motors,
motor_control_mode=robot_config.MotorControlMode.POSITION)
# Don't continue to reset if a safety error has occurred.
if not self._is_safe:
return
if default_motor_angles is None:
return
num_steps_to_reset = int(reset_time / self.time_step)
for _ in range(num_steps_to_reset):
self._StepInternal(
default_motor_angles,
motor_control_mode=robot_config.MotorControlMode.POSITION)
# Don't continue to reset if a safety error has occurred.
if not self._is_safe:
return
def _SetMotorTorqueById(self, motor_id, torque):
self._pybullet_client.setJointMotorControl2(
bodyIndex=self.quadruped,
jointIndex=motor_id,
controlMode=self._pybullet_client.TORQUE_CONTROL,
force=torque)
def _SetMotorTorqueByIds(self, motor_ids, torques):
self._pybullet_client.setJointMotorControlArray(
bodyIndex=self.quadruped,
jointIndices=motor_ids,
controlMode=self._pybullet_client.TORQUE_CONTROL,
forces=torques)
def _SetDesiredMotorAngleByName(self, motor_name, desired_angle):
self._SetDesiredMotorAngleById(self._joint_name_to_id[motor_name],
desired_angle)
def GetURDFFile(self):
return None
def ResetPose(self, add_constraint):
"""Reset the pose of the minitaur.
Args:
add_constraint: Whether to add a constraint at the joints of two feet.
"""
for i in range(self.num_legs):
self._ResetPoseForLeg(i, add_constraint)
def _ResetPoseForLeg(self, leg_id, add_constraint):
"""Reset the initial pose for the leg.
Args:
leg_id: It should be 0, 1, 2, or 3, which represents the leg at
front_left, back_left, front_right and back_right.
add_constraint: Whether to add a constraint at the joints of two feet.
"""
knee_friction_force = 0
half_pi = math.pi / 2.0
knee_angle = -2.1834
leg_position = LEG_POSITION[leg_id]
self._pybullet_client.resetJointState(
self.quadruped,
self._joint_name_to_id["motor_" + leg_position + "L_joint"],
self._motor_direction[2 * leg_id] * half_pi,
targetVelocity=0)
self._pybullet_client.resetJointState(
self.quadruped,
self._joint_name_to_id["knee_" + leg_position + "L_link"],
self._motor_direction[2 * leg_id] * knee_angle,
targetVelocity=0)
self._pybullet_client.resetJointState(
self.quadruped,
self._joint_name_to_id["motor_" + leg_position + "R_joint"],
self._motor_direction[2 * leg_id + 1] * half_pi,
targetVelocity=0)
self._pybullet_client.resetJointState(
self.quadruped,
self._joint_name_to_id["knee_" + leg_position + "R_link"],
self._motor_direction[2 * leg_id + 1] * knee_angle,
targetVelocity=0)
if add_constraint:
self._pybullet_client.createConstraint(
self.quadruped,
self._joint_name_to_id["knee_" + leg_position + "R_link"],
self.quadruped,
self._joint_name_to_id["knee_" + leg_position + "L_link"],
self._pybullet_client.JOINT_POINT2POINT, [0, 0, 0],
KNEE_CONSTRAINT_POINT_RIGHT, KNEE_CONSTRAINT_POINT_LEFT)
# Disable the default motor in pybullet.
self._pybullet_client.setJointMotorControl2(
bodyIndex=self.quadruped,
jointIndex=(self._joint_name_to_id["motor_" + leg_position +
"L_joint"]),
controlMode=self._pybullet_client.VELOCITY_CONTROL,
targetVelocity=0,
force=knee_friction_force)
self._pybullet_client.setJointMotorControl2(
bodyIndex=self.quadruped,
jointIndex=(self._joint_name_to_id["motor_" + leg_position +
"R_joint"]),
controlMode=self._pybullet_client.VELOCITY_CONTROL,
targetVelocity=0,
force=knee_friction_force)
self._pybullet_client.setJointMotorControl2(
bodyIndex=self.quadruped,
jointIndex=(self._joint_name_to_id["knee_" + leg_position + "L_link"]),
controlMode=self._pybullet_client.VELOCITY_CONTROL,
targetVelocity=0,
force=knee_friction_force)
self._pybullet_client.setJointMotorControl2(
bodyIndex=self.quadruped,
jointIndex=(self._joint_name_to_id["knee_" + leg_position + "R_link"]),
controlMode=self._pybullet_client.VELOCITY_CONTROL,
targetVelocity=0,
force=knee_friction_force)
def GetBasePosition(self):
"""Get the position of minitaur's base.
Returns:
The position of minitaur's base.
"""
return self._base_position
def GetBaseVelocity(self):
"""Get the linear velocity of minitaur's base.
Returns:
The velocity of minitaur's base.
"""
velocity, _ = self._pybullet_client.getBaseVelocity(self.quadruped)
return velocity
def GetTrueBaseRollPitchYaw(self):
"""Get minitaur's base orientation in euler angle in the world frame.
Returns:
A tuple (roll, pitch, yaw) of the base in world frame.
"""
orientation = self.GetTrueBaseOrientation()
roll_pitch_yaw = self._pybullet_client.getEulerFromQuaternion(orientation)
return np.asarray(roll_pitch_yaw)
def GetBaseRollPitchYaw(self):
"""Get minitaur's base orientation in euler angle in the world frame.
This function mimicks the noisy sensor reading and adds latency.
Returns:
A tuple (roll, pitch, yaw) of the base in world frame polluted by noise
and latency.
"""
delayed_orientation = np.array(
self._control_observation[3 * self.num_motors:3 * self.num_motors + 4])
delayed_roll_pitch_yaw = self._pybullet_client.getEulerFromQuaternion(
delayed_orientation)
roll_pitch_yaw = self._AddSensorNoise(
np.array(delayed_roll_pitch_yaw), self._observation_noise_stdev[3])
return roll_pitch_yaw
def GetHipPositionsInBaseFrame(self):
"""Get the hip joint positions of the robot within its base frame."""
raise NotImplementedError("Not implemented for Minitaur.")
def ComputeMotorAnglesFromFootLocalPosition(self, leg_id,
foot_local_position):
"""Use IK to compute the motor angles, given the foot link's local position.
Args:
leg_id: The leg index.
foot_local_position: The foot link's position in the base frame.
Returns:
A tuple. The position indices and the angles for all joints along the
leg. The position indices is consistent with the joint orders as returned
by GetMotorAngles API.
"""
assert len(self._foot_link_ids) == self.num_legs
toe_id = self._foot_link_ids[leg_id]
motors_per_leg = self.num_motors // self.num_legs
joint_position_idxs = [
i for i in range(leg_id * motors_per_leg, leg_id * motors_per_leg +
motors_per_leg)
]
joint_angles = kinematics.joint_angles_from_link_position(
robot=self,
link_position=foot_local_position,
link_id=toe_id,
joint_ids=joint_position_idxs,
)
# Joint offset is necessary for Laikago.
joint_angles = np.multiply(
np.asarray(joint_angles) -
np.asarray(self._motor_offset)[joint_position_idxs],
self._motor_direction[joint_position_idxs])
# Return the joing index (the same as when calling GetMotorAngles) as well
# as the angles.
return joint_position_idxs, joint_angles.tolist()
def ComputeJacobian(self, leg_id):
"""Compute the Jacobian for a given leg."""
# Does not work for Minitaur which has the four bar mechanism for now.
assert len(self._foot_link_ids) == self.num_legs
return kinematics.compute_jacobian(
robot=self,
link_id=self._foot_link_ids[leg_id],
)
def MapContactForceToJointTorques(self, leg_id, contact_force):
"""Maps the foot contact force to the leg joint torques."""
jv = self.ComputeJacobian(leg_id)
all_motor_torques = np.matmul(contact_force, jv)
motor_torques = {}
motors_per_leg = self.num_motors // self.num_legs
com_dof = 6
for joint_id in range(leg_id * motors_per_leg,
(leg_id + 1) * motors_per_leg):
motor_torques[joint_id] = all_motor_torques[
com_dof + joint_id] * self._motor_direction[joint_id]
return motor_torques
def GetFootContacts(self):
"""Get minitaur's foot contact situation with the ground.
Returns:
A list of 4 booleans. The ith boolean is True if leg i is in contact with
ground.
"""
contacts = []
for leg_idx in range(MINITAUR_NUM_MOTORS // 2):
link_id_1 = self._foot_link_ids[leg_idx * 2]
link_id_2 = self._foot_link_ids[leg_idx * 2 + 1]
contact_1 = bool(
self._pybullet_client.getContactPoints(
bodyA=0,
bodyB=self.quadruped,
linkIndexA=-1,
linkIndexB=link_id_1))
contact_2 = bool(
self._pybullet_client.getContactPoints(
bodyA=0,
bodyB=self.quadruped,
linkIndexA=-1,
linkIndexB=link_id_2))
contacts.append(contact_1 or contact_2)
return contacts
def GetFootPositionsInBaseFrame(self):
"""Get the robot's foot position in the base frame."""
assert len(self._foot_link_ids) == self.num_legs
foot_positions = []
for foot_id in self.GetFootLinkIDs():
foot_positions.append(
kinematics.link_position_in_base_frame(
robot=self,
link_id=foot_id,
))
return np.array(foot_positions)
def GetTrueMotorAngles(self):
"""Gets the eight motor angles at the current moment, mapped to [-pi, pi].
Returns:
Motor angles, mapped to [-pi, pi].
"""
motor_angles = [state[0] for state in self._joint_states]
motor_angles = np.multiply(
np.asarray(motor_angles) - np.asarray(self._motor_offset),
self._motor_direction)
return motor_angles
def GetMotorAngles(self):
"""Gets the eight motor angles.
This function mimicks the noisy sensor reading and adds latency. The motor
angles that are delayed, noise polluted, and mapped to [-pi, pi].
Returns:
Motor angles polluted by noise and latency, mapped to [-pi, pi].
"""
motor_angles = self._AddSensorNoise(
np.array(self._control_observation[0:self.num_motors]),
self._observation_noise_stdev[0])
return MapToMinusPiToPi(motor_angles)
def GetTrueMotorVelocities(self):
"""Get the velocity of all eight motors.
Returns:
Velocities of all eight motors.
"""
motor_velocities = [state[1] for state in self._joint_states]
motor_velocities = np.multiply(motor_velocities, self._motor_direction)
return motor_velocities
def GetMotorVelocities(self):
"""Get the velocity of all eight motors.
This function mimicks the noisy sensor reading and adds latency.
Returns:
Velocities of all eight motors polluted by noise and latency.
"""
return self._AddSensorNoise(
np.array(self._control_observation[self.num_motors:2 *
self.num_motors]),
self._observation_noise_stdev[1])
def GetTrueMotorTorques(self):
"""Get the amount of torque the motors are exerting.
Returns:
Motor torques of all eight motors.
"""
return self._observed_motor_torques
def GetMotorTorques(self):
"""Get the amount of torque the motors are exerting.
This function mimicks the noisy sensor reading and adds latency.
Returns:
Motor torques of all eight motors polluted by noise and latency.
"""
return self._AddSensorNoise(
np.array(self._control_observation[2 * self.num_motors:3 *
self.num_motors]),
self._observation_noise_stdev[2])
def GetEnergyConsumptionPerControlStep(self):
"""Get the amount of energy used in last one time step.
Returns:
Energy Consumption based on motor velocities and torques (Nm^2/s).
"""
return np.abs(np.dot(
self.GetMotorTorques(),
self.GetMotorVelocities())) * self.time_step * self._action_repeat
def GetTrueBaseOrientation(self):
"""Get the orientation of minitaur's base, represented as quaternion.
Returns:
The orientation of minitaur's base.
"""
return self._base_orientation
def GetBaseOrientation(self):
"""Get the orientation of minitaur's base, represented as quaternion.
This function mimicks the noisy sensor reading and adds latency.
Returns:
The orientation of minitaur's base polluted by noise and latency.
"""
return self._pybullet_client.getQuaternionFromEuler(
self.GetBaseRollPitchYaw())
def GetTrueBaseRollPitchYawRate(self):
"""Get the rate of orientation change of the minitaur's base in euler angle.
Returns:
rate of (roll, pitch, yaw) change of the minitaur's base.
"""
angular_velocity = self._pybullet_client.getBaseVelocity(self.quadruped)[1]
orientation = self.GetTrueBaseOrientation()
return self.TransformAngularVelocityToLocalFrame(angular_velocity,
orientation)
def TransformAngularVelocityToLocalFrame(self, angular_velocity, orientation):
"""Transform the angular velocity from world frame to robot's frame.
Args:
angular_velocity: Angular velocity of the robot in world frame.
orientation: Orientation of the robot represented as a quaternion.
Returns:
angular velocity of based on the given orientation.
"""
# Treat angular velocity as a position vector, then transform based on the
# orientation given by dividing (or multiplying with inverse).
# Get inverse quaternion assuming the vector is at 0,0,0 origin.
_, orientation_inversed = self._pybullet_client.invertTransform([0, 0, 0],
orientation)
# Transform the angular_velocity at neutral orientation using a neutral
# translation and reverse of the given orientation.
relative_velocity, _ = self._pybullet_client.multiplyTransforms(
[0, 0, 0], orientation_inversed, angular_velocity,
self._pybullet_client.getQuaternionFromEuler([0, 0, 0]))
return np.asarray(relative_velocity)
def GetBaseRollPitchYawRate(self):
"""Get the rate of orientation change of the minitaur's base in euler angle.
This function mimicks the noisy sensor reading and adds latency.
Returns:
rate of (roll, pitch, yaw) change of the minitaur's base polluted by noise
and latency.
"""
return self._AddSensorNoise(
np.array(self._control_observation[3 * self.num_motors +
4:3 * self.num_motors + 7]),
self._observation_noise_stdev[4])
def GetActionDimension(self):
"""Get the length of the action list.
Returns:
The length of the action list.
"""
return self.num_motors
def _ApplyOverheatProtection(self, actual_torque):
if self._motor_overheat_protection:
for i in range(self.num_motors):
if abs(actual_torque[i]) > OVERHEAT_SHUTDOWN_TORQUE:
self._overheat_counter[i] += 1
else:
self._overheat_counter[i] = 0
if (self._overheat_counter[i] >
OVERHEAT_SHUTDOWN_TIME / self.time_step):
self._motor_enabled_list[i] = False
def ApplyAction(self, motor_commands, motor_control_mode=None):
"""Apply the motor commands using the motor model.
Args:
motor_commands: np.array. Can be motor angles, torques, hybrid commands,
or motor pwms (for Minitaur only).
motor_control_mode: A MotorControlMode enum.
"""
self.last_action_time = self._state_action_counter * self.time_step
control_mode = motor_control_mode
if control_mode is None:
control_mode = self._motor_control_mode
motor_commands = np.asarray(motor_commands)
q, qdot = self._GetPDObservation()
qdot_true = self.GetTrueMotorVelocities()
actual_torque, observed_torque = self._motor_model.convert_to_torque(
motor_commands, q, qdot, qdot_true, control_mode)
# May turn off the motor
self._ApplyOverheatProtection(actual_torque)
# The torque is already in the observation space because we use
# GetMotorAngles and GetMotorVelocities.
self._observed_motor_torques = observed_torque
# Transform into the motor space when applying the torque.
self._applied_motor_torque = np.multiply(actual_torque,
self._motor_direction)
motor_ids = []
motor_torques = []
for motor_id, motor_torque, motor_enabled in zip(self._motor_id_list,
self._applied_motor_torque,
self._motor_enabled_list):
if motor_enabled:
motor_ids.append(motor_id)
motor_torques.append(motor_torque)
else:
motor_ids.append(motor_id)
motor_torques.append(0)
self._SetMotorTorqueByIds(motor_ids, motor_torques)
def ConvertFromLegModel(self, actions):
"""Convert the actions that use leg model to the real motor actions.
Args:
actions: The theta, phi of the leg model.
Returns:
The eight desired motor angles that can be used in ApplyActions().
"""
motor_angle = copy.deepcopy(actions)
scale_for_singularity = 1
offset_for_singularity = 1.5
half_num_motors = self.num_motors // 2
quater_pi = math.pi / 4
for i in range(self.num_motors):
action_idx = i // 2
forward_backward_component = (
-scale_for_singularity * quater_pi *
(actions[action_idx + half_num_motors] + offset_for_singularity))
extension_component = (-1)**i * quater_pi * actions[action_idx]
if i >= half_num_motors:
extension_component = -extension_component
motor_angle[i] = (
math.pi + forward_backward_component + extension_component)
return motor_angle
def GetBaseMassesFromURDF(self):
"""Get the mass of the base from the URDF file."""
return self._base_mass_urdf
def GetBaseInertiasFromURDF(self):
"""Get the inertia of the base from the URDF file."""
return self._base_inertia_urdf
def GetLegMassesFromURDF(self):
"""Get the mass of the legs from the URDF file."""
return self._leg_masses_urdf
def GetLegInertiasFromURDF(self):
"""Get the inertia of the legs from the URDF file."""
return self._leg_inertia_urdf
def SetBaseMasses(self, base_mass):
"""Set the mass of minitaur's base.
Args:
base_mass: A list of masses of each body link in CHASIS_LINK_IDS. The
length of this list should be the same as the length of CHASIS_LINK_IDS.
Raises:
ValueError: It is raised when the length of base_mass is not the same as
the length of self._chassis_link_ids.
"""
if len(base_mass) != len(self._chassis_link_ids):
raise ValueError(
"The length of base_mass {} and self._chassis_link_ids {} are not "
"the same.".format(len(base_mass), len(self._chassis_link_ids)))
for chassis_id, chassis_mass in zip(self._chassis_link_ids, base_mass):
self._pybullet_client.changeDynamics(
self.quadruped, chassis_id, mass=chassis_mass)
def SetLegMasses(self, leg_masses):
"""Set the mass of the legs.
A leg includes leg_link and motor. 4 legs contain 16 links (4 links each)
and 8 motors. First 16 numbers correspond to link masses, last 8 correspond
to motor masses (24 total).
Args:
leg_masses: The leg and motor masses for all the leg links and motors.
Raises:
ValueError: It is raised when the length of masses is not equal to number
of links + motors.
"""
if len(leg_masses) != len(self._leg_link_ids) + len(self._motor_link_ids):
raise ValueError("The number of values passed to SetLegMasses are "
"different than number of leg links and motors.")
for leg_id, leg_mass in zip(self._leg_link_ids, leg_masses):
self._pybullet_client.changeDynamics(
self.quadruped, leg_id, mass=leg_mass)
motor_masses = leg_masses[len(self._leg_link_ids):]
for link_id, motor_mass in zip(self._motor_link_ids, motor_masses):
self._pybullet_client.changeDynamics(
self.quadruped, link_id, mass=motor_mass)
def SetBaseInertias(self, base_inertias):
"""Set the inertias of minitaur's base.
Args:
base_inertias: A list of inertias of each body link in CHASIS_LINK_IDS.
The length of this list should be the same as the length of
CHASIS_LINK_IDS.
Raises:
ValueError: It is raised when the length of base_inertias is not the same
as the length of self._chassis_link_ids and base_inertias contains
negative values.
"""
if len(base_inertias) != len(self._chassis_link_ids):
raise ValueError(
"The length of base_inertias {} and self._chassis_link_ids {} are "
"not the same.".format(
len(base_inertias), len(self._chassis_link_ids)))
for chassis_id, chassis_inertia in zip(self._chassis_link_ids,
base_inertias):
for inertia_value in chassis_inertia:
if (np.asarray(inertia_value) < 0).any():
raise ValueError("Values in inertia matrix should be non-negative.")
self._pybullet_client.changeDynamics(
self.quadruped, chassis_id, localInertiaDiagonal=chassis_inertia)
def SetLegInertias(self, leg_inertias):
"""Set the inertias of the legs.
A leg includes leg_link and motor. 4 legs contain 16 links (4 links each)
and 8 motors. First 16 numbers correspond to link inertia, last 8 correspond
to motor inertia (24 total).
Args:
leg_inertias: The leg and motor inertias for all the leg links and motors.
Raises:
ValueError: It is raised when the length of inertias is not equal to
the number of links + motors or leg_inertias contains negative values.
"""
if len(leg_inertias) != len(self._leg_link_ids) + len(self._motor_link_ids):
raise ValueError("The number of values passed to SetLegMasses are "
"different than number of leg links and motors.")
for leg_id, leg_inertia in zip(self._leg_link_ids, leg_inertias):
for inertia_value in leg_inertias:
if (np.asarray(inertia_value) < 0).any():
raise ValueError("Values in inertia matrix should be non-negative.")
self._pybullet_client.changeDynamics(
self.quadruped, leg_id, localInertiaDiagonal=leg_inertia)
motor_inertias = leg_inertias[len(self._leg_link_ids):]
for link_id, motor_inertia in zip(self._motor_link_ids, motor_inertias):
for inertia_value in motor_inertias:
if (np.asarray(inertia_value) < 0).any():
raise ValueError("Values in inertia matrix should be non-negative.")
self._pybullet_client.changeDynamics(
self.quadruped, link_id, localInertiaDiagonal=motor_inertia)
def SetFootFriction(self, foot_friction):
"""Set the lateral friction of the feet.
Args:
foot_friction: The lateral friction coefficient of the foot. This value is
shared by all four feet.
"""
for link_id in self._foot_link_ids:
self._pybullet_client.changeDynamics(
self.quadruped, link_id, lateralFriction=foot_friction)
def SetFootRestitution(self, foot_restitution):
"""Set the coefficient of restitution at the feet.
Args:
foot_restitution: The coefficient of restitution (bounciness) of the feet.
This value is shared by all four feet.
"""
for link_id in self._foot_link_ids:
self._pybullet_client.changeDynamics(
self.quadruped, link_id, restitution=foot_restitution)
def SetJointFriction(self, joint_frictions):
for knee_joint_id, friction in zip(self._foot_link_ids, joint_frictions):
self._pybullet_client.setJointMotorControl2(
bodyIndex=self.quadruped,
jointIndex=knee_joint_id,
controlMode=self._pybullet_client.VELOCITY_CONTROL,
targetVelocity=0,
force=friction)
def GetNumKneeJoints(self):
return len(self._foot_link_ids)
def SetBatteryVoltage(self, voltage):
self._motor_model.set_voltage(voltage)
def SetMotorViscousDamping(self, viscous_damping):
self._motor_model.set_viscous_damping(viscous_damping)
def GetTrueObservation(self):
observation = []
observation.extend(self.GetTrueMotorAngles())
observation.extend(self.GetTrueMotorVelocities())
observation.extend(self.GetTrueMotorTorques())
observation.extend(self.GetTrueBaseOrientation())
observation.extend(self.GetTrueBaseRollPitchYawRate())
return observation
def ReceiveObservation(self):
"""Receive the observation from sensors.
This function is called once per step. The observations are only updated
when this function is called.
"""
self._joint_states = self._pybullet_client.getJointStates(
self.quadruped, self._motor_id_list)
self._base_position, orientation = (
self._pybullet_client.getBasePositionAndOrientation(self.quadruped))
# Computes the relative orientation relative to the robot's
# initial_orientation.
_, self._base_orientation = self._pybullet_client.multiplyTransforms(
positionA=[0, 0, 0],
orientationA=orientation,
positionB=[0, 0, 0],
orientationB=self._init_orientation_inv)
self._observation_history.appendleft(self.GetTrueObservation())
self._control_observation = self._GetControlObservation()
self.last_state_time = self._state_action_counter * self.time_step
def _GetDelayedObservation(self, latency):
"""Get observation that is delayed by the amount specified in latency.
Args:
latency: The latency (in seconds) of the delayed observation.
Returns:
observation: The observation which was actually latency seconds ago.
"""
if latency <= 0 or len(self._observation_history) == 1:
observation = self._observation_history[0]
else:
n_steps_ago = int(latency / self.time_step)
if n_steps_ago + 1 >= len(self._observation_history):
return self._observation_history[-1]
remaining_latency = latency - n_steps_ago * self.time_step
blend_alpha = remaining_latency / self.time_step
observation = (
(1.0 - blend_alpha) * np.array(self._observation_history[n_steps_ago])
+ blend_alpha * np.array(self._observation_history[n_steps_ago + 1]))
return observation
def _GetPDObservation(self):
pd_delayed_observation = self._GetDelayedObservation(self._pd_latency)
q = pd_delayed_observation[0:self.num_motors]
qdot = pd_delayed_observation[self.num_motors:2 * self.num_motors]
return (np.array(q), np.array(qdot))
def _GetControlObservation(self):
control_delayed_observation = self._GetDelayedObservation(
self._control_latency)
return control_delayed_observation
def _AddSensorNoise(self, sensor_values, noise_stdev):
if noise_stdev <= 0:
return sensor_values
observation = sensor_values + np.random.normal(
scale=noise_stdev, size=sensor_values.shape)
return observation
def SetControlLatency(self, latency):
"""Set the latency of the control loop.
It measures the duration between sending an action from Nvidia TX2 and
receiving the observation from microcontroller.
Args:
latency: The latency (in seconds) of the control loop.
"""
self._control_latency = latency
def GetControlLatency(self):
"""Get the control latency.
Returns:
The latency (in seconds) between when the motor command is sent and when
the sensor measurements are reported back to the controller.
"""
return self._control_latency
def SetMotorGains(self, kp, kd):
"""Set the gains of all motors.
These gains are PD gains for motor positional control. kp is the
proportional gain and kd is the derivative gain.
Args:
kp: proportional gain(s) of the motors.
kd: derivative gain(s) of the motors.
"""
if isinstance(kp, (collections.Sequence, np.ndarray)):
self._motor_kps = np.asarray(kp)
else:
self._motor_kps = np.full(self.num_motors, kp)
if isinstance(kd, (collections.Sequence, np.ndarray)):
self._motor_kds = np.asarray(kd)
else:
self._motor_kds = np.full(self.num_motors, kd)
self._motor_model.set_motor_gains(kp, kd)
def GetMotorGains(self):
"""Get the gains of the motor.
Returns:
The proportional gain.
The derivative gain.
"""
return self._motor_kps, self._motor_kds
def GetMotorPositionGains(self):
"""Get the position gains of the motor.
Returns:
The proportional gain.
"""
return self._motor_kps
def GetMotorVelocityGains(self):
"""Get the velocity gains of the motor.
Returns:
The derivative gain.
"""
return self._motor_kds
def SetMotorStrengthRatio(self, ratio):
"""Set the strength of all motors relative to the default value.
Args:
ratio: The relative strength. A scalar range from 0.0 to 1.0.
"""
self._motor_model.set_strength_ratios([ratio] * self.num_motors)
def SetMotorStrengthRatios(self, ratios):
"""Set the strength of each motor relative to the default value.
Args:
ratios: The relative strength. A numpy array ranging from 0.0 to 1.0.
"""
self._motor_model.set_strength_ratios(ratios)
def SetTimeSteps(self, action_repeat, simulation_step):
"""Set the time steps of the control and simulation.
Args:
action_repeat: The number of simulation steps that the same action is
repeated.
simulation_step: The simulation time step.
"""
self.time_step = simulation_step
self._action_repeat = action_repeat
def _GetMotorNames(self):
return MOTOR_NAMES
def _GetDefaultInitPosition(self):
"""Returns the init position of the robot.
It can be either 1) origin (INIT_POSITION), 2) origin with a rack
(INIT_RACK_POSITION), or 3) the previous position.
"""
# If we want continuous resetting and is not the first episode.
if self._reset_at_current_position and self._observation_history:
x, y, _ = self.GetBasePosition()
_, _, z = INIT_POSITION
return [x, y, z]
if self._on_rack:
return INIT_RACK_POSITION
else:
return INIT_POSITION
def _GetDefaultInitOrientation(self):
"""Returns the init position of the robot.
It can be either 1) INIT_ORIENTATION or 2) the previous rotation in yaw.
"""
# If we want continuous resetting and is not the first episode.
if self._reset_at_current_position and self._observation_history:
_, _, yaw = self.GetBaseRollPitchYaw()
return self._pybullet_client.getQuaternionFromEuler([0.0, 0.0, yaw])
return INIT_ORIENTATION
@property
def chassis_link_ids(self):
return self._chassis_link_ids
def SetAllSensors(self, sensors):
"""set all sensors to this robot and move the ownership to this robot.
Args:
sensors: a list of sensors to this robot.
"""
for s in sensors:
s.set_robot(self)
self._sensors = sensors
def GetAllSensors(self):
"""get all sensors associated with this robot.
Returns:
sensors: a list of all sensors.
"""
return self._sensors
def GetSensor(self, name):
"""get the first sensor with the given name.
This function return None if a sensor with the given name does not exist.
Args:
name: the name of the sensor we are looking
Returns:
sensor: a sensor with the given name. None if not exists.
"""
for s in self._sensors:
if s.get_name() == name:
return s
return None
@property
def is_safe(self):
return self._is_safe
@property
def last_action(self):
return self._last_action
def ProcessAction(self, action, substep_count):
"""If enabled, interpolates between the current and previous actions.
Args:
action: current action.
substep_count: the step count should be between [0, self.__action_repeat).
Returns:
If interpolation is enabled, returns interpolated action depending on
the current action repeat substep.
"""
if self._enable_action_interpolation:
if self._last_action is not None:
prev_action = self._last_action
else:
prev_action = self.GetMotorAngles()
lerp = float(substep_count + 1) / self._action_repeat
proc_action = prev_action + lerp * (action - prev_action)
else:
proc_action = action
return proc_action
def _BuildActionFilter(self):
sampling_rate = 1 / (self.time_step * self._action_repeat)
num_joints = self.GetActionDimension()
a_filter = action_filter.ActionFilterButter(
sampling_rate=sampling_rate, num_joints=num_joints)
return a_filter
def _ResetActionFilter(self):
self._action_filter.reset()
return
def _FilterAction(self, action):
# initialize the filter history, since resetting the filter will fill
# the history with zeros and this can cause sudden movements at the start
# of each episode
if self._step_counter == 0:
default_action = self.GetMotorAngles()
self._action_filter.init_history(default_action)
filtered_action = self._action_filter.filter(action)
return filtered_action
@property
def pybullet_client(self):
return self._pybullet_client
@property
def joint_states(self):
return self._joint_states
@classmethod
def GetConstants(cls):
del cls
return minitaur_constants
|
[
"franktian424@qq.com"
] |
franktian424@qq.com
|
3aee32793c6ba419d0594d5a9e2bcaa214a87ed3
|
9edfd50576323ab50517d2694efa7d970a5a487e
|
/bin/shotgun_pickTask.py
|
4cd5957d581ff10f1cf674cd3f75846f41f45a3f
|
[] |
no_license
|
agudmund/util
|
136de9961c3f1cc86faf183bb5c576c55f73417c
|
457c8471296de944252fa6f474fea3ac07376d51
|
refs/heads/master
| 2023-08-11T00:21:53.795855
| 2023-07-25T12:00:46
| 2023-07-25T12:00:46
| 79,660,992
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,435
|
py
|
#!/usr/bin/env C:/Python27/python.exe
import os
import sys
import random
import json
import shotgun_api3
SERVER_PATH = os.getenv("SHOTGUN_SERVER_PATH")
SCRIPT_USER = os.getenv("SHOTGUN_SCRIPT_USER")
SCRIPT_KEY = os.getenv("SHOTGUN_SCRIPT_KEY")
sg = shotgun_api3.Shotgun(SERVER_PATH, SCRIPT_USER, SCRIPT_KEY)
proj = sys.argv[-1]
def listProjects():
x=sg.find('Project',[],['name'])
for n in x:
print(n)
def getProject(sg,project):
filters = [['name', 'is', project]]
fields = ['id', 'name']
print (sg.find_one('Project', filters, fields))
def find_asset_tasks(sg, project, asset):
print ("searching:", project, "for Asset called:", asset)
filters = [
['project.Project.name', 'is', project],
['entity.Asset.code', 'is', asset],
]
fields = ['content', 'id','name']
sg_tasks = sg.find("Task", filters, fields)
return sg_tasks
def pickOne():
filters = [
["sg_status_list", "is_not", "fin"],
["sg_status_list", "is_not", "hld"],
["sg_status_list", "is_not", "omt"],
{"filter_operator": "any",
"filters": [
['project.Project.name', 'is', proj]
]}]
result = sg.find("Asset", filters,['content', 'id','project','code','type'])
rez = random.choice(result)
print (" ".join(['Still stuff to do on', rez['code'], 'in', rez['type']]))
def uploadAssetThumbnails():
filters = [
["sg_status_list", "is_not", "fin"],
["sg_status_list", "is_not", "hld"],
["sg_status_list", "is_not", "omt"],
{"filter_operator": "any",
"filters": [
['project.Project.name', 'is', proj]
]}]
result = sg.find("Asset", filters,['content', 'id','project','code','type'])
rez = random.choice(result)
thumbpath = r'C:\Users\normal\Projects\Match Dot Com\Documents'
thumbs = [ os.path.join(thumbpath,n ) for n in os.listdir(thumbpath) if n.endswith(".JPG")]
for asset in result:
for thumb in thumbs:
if asset['code'] == thumb.split('\\')[-1].split('.')[0]:
print ('x',asset)
sg.upload_thumbnail('Asset',asset['id'] , thumb ) # Needs python 3 apparently
continue
def createShots():
root = r'C:\Users\normal\Projects\Darth Kindergarten\Maya\images\shots'
for shot in os.listdir(root):
filters = {
'project': {"type":"Project","id": "insert project id"},
'code': shot.split(".")[0],
'sg_status_list': 'ip'
}
result = sg.create('Shot', data)
filters = [
{"filter_operator": "any",
"filters": [
['project.Project.name', 'is', "insert project name"]
]}]
result = sg.find('Shot', filters, ['id','code'])
for r in result:
if r['code'] == shot.split('.')[0]:
sg.upload_thumbnail('Shot',r['id'] , os.path.join(root,shot) )
if __name__ == '__main__':
pickOne()
# for asset in result:
# print find_asset_tasks(sg, "Match Dot Com", asset['code'])
# print find_asset_tasks(sg, "Match Dot Com", 1344)
# getProject(sg,"Match Dot Com")
# sg.summarize(entity_type='Task',
# filters = [
# ['entity.Asset.sg_sequence', 'is', {'type': 'Sequence', 'id': 2}],
# ['sg_status_list', 'is_not', 'na']],
# summary_fields=[{'field': 'id', 'type': 'count'}, {'field': 'due_date', 'type': 'latest'}],
# grouping=[{'field': 'entity', 'type': 'exact', 'direction': 'asc'}])
|
[
"aevar.gudmundsson@gmail.com"
] |
aevar.gudmundsson@gmail.com
|
80b3358be81f5d974d6f594abfd81b0f94056eea
|
6701eae4550c7cd3d8703565c7be3a5e26248676
|
/test/functional/txn_clone.py
|
960fbddca9ce3e61f1041d65c503eba4502734fa
|
[
"MIT"
] |
permissive
|
stochastic-thread/bootstrapping-ellocash
|
91de56a330090c004af31d861e6d4cfb8d8b9e36
|
9495f1e3741c7f893457e4f6602d6ef0d84b7b3d
|
refs/heads/master
| 2021-09-05T05:00:19.403780
| 2018-01-24T08:09:44
| 2018-01-24T08:09:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,600
|
py
|
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the wallet accounts properly when there are cloned transactions with malleated scriptsigs."""
from test_framework.test_framework import EllocashTestFramework
from test_framework.util import *
class TxnMallTest(EllocashTestFramework):
def set_test_params(self):
self.num_nodes = 4
def add_options(self, parser):
parser.add_option("--mineblock", dest="mine_block", default=False, action="store_true",
help="Test double-spend of 1-confirmed transaction")
def setup_network(self):
# Start with split network:
super(TxnMallTest, self).setup_network()
disconnect_nodes(self.nodes[1], 2)
disconnect_nodes(self.nodes[2], 1)
def run_test(self):
# All nodes should start with 1,250 LOC:
starting_balance = 1250
for i in range(4):
assert_equal(self.nodes[i].getbalance(), starting_balance)
self.nodes[i].getnewaddress("") # bug workaround, coins generated assigned to first getnewaddress!
# Assign coins to foo and bar accounts:
self.nodes[0].settxfee(.001)
node0_address_foo = self.nodes[0].getnewaddress("foo")
fund_foo_txid = self.nodes[0].sendfrom("", node0_address_foo, 1219)
fund_foo_tx = self.nodes[0].gettransaction(fund_foo_txid)
node0_address_bar = self.nodes[0].getnewaddress("bar")
fund_bar_txid = self.nodes[0].sendfrom("", node0_address_bar, 29)
fund_bar_tx = self.nodes[0].gettransaction(fund_bar_txid)
assert_equal(self.nodes[0].getbalance(""),
starting_balance - 1219 - 29 + fund_foo_tx["fee"] + fund_bar_tx["fee"])
# Coins are sent to node1_address
node1_address = self.nodes[1].getnewaddress("from0")
# Send tx1, and another transaction tx2 that won't be cloned
txid1 = self.nodes[0].sendfrom("foo", node1_address, 40, 0)
txid2 = self.nodes[0].sendfrom("bar", node1_address, 20, 0)
# Construct a clone of tx1, to be malleated
rawtx1 = self.nodes[0].getrawtransaction(txid1,1)
clone_inputs = [{"txid":rawtx1["vin"][0]["txid"],"vout":rawtx1["vin"][0]["vout"]}]
clone_outputs = {rawtx1["vout"][0]["scriptPubKey"]["addresses"][0]:rawtx1["vout"][0]["value"],
rawtx1["vout"][1]["scriptPubKey"]["addresses"][0]:rawtx1["vout"][1]["value"]}
clone_locktime = rawtx1["locktime"]
clone_raw = self.nodes[0].createrawtransaction(clone_inputs, clone_outputs, clone_locktime)
# createrawtransaction randomizes the order of its outputs, so swap them if necessary.
# output 0 is at version+#inputs+input+sigstub+sequence+#outputs
# 40 LOC serialized is 00286bee00000000
pos0 = 2*(4+1+36+1+4+1)
hex40 = "00286bee00000000"
output_len = 16 + 2 + 2 * int("0x" + clone_raw[pos0 + 16 : pos0 + 16 + 2], 0)
if (rawtx1["vout"][0]["value"] == 40 and clone_raw[pos0 : pos0 + 16] != hex40 or
rawtx1["vout"][0]["value"] != 40 and clone_raw[pos0 : pos0 + 16] == hex40):
output0 = clone_raw[pos0 : pos0 + output_len]
output1 = clone_raw[pos0 + output_len : pos0 + 2 * output_len]
clone_raw = clone_raw[:pos0] + output1 + output0 + clone_raw[pos0 + 2 * output_len:]
# Use a different signature hash type to sign. This creates an equivalent but malleated clone.
# Don't send the clone anywhere yet
tx1_clone = self.nodes[0].signrawtransaction(clone_raw, None, None, "ALL|ANYONECANPAY")
assert_equal(tx1_clone["complete"], True)
# Have node0 mine a block, if requested:
if (self.options.mine_block):
self.nodes[0].generate(1)
sync_blocks(self.nodes[0:2])
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
# Node0's balance should be starting balance, plus 50LOC for another
# matured block, minus tx1 and tx2 amounts, and minus transaction fees:
expected = starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"]
if self.options.mine_block: expected += 50
expected += tx1["amount"] + tx1["fee"]
expected += tx2["amount"] + tx2["fee"]
assert_equal(self.nodes[0].getbalance(), expected)
# foo and bar accounts should be debited:
assert_equal(self.nodes[0].getbalance("foo", 0), 1219 + tx1["amount"] + tx1["fee"])
assert_equal(self.nodes[0].getbalance("bar", 0), 29 + tx2["amount"] + tx2["fee"])
if self.options.mine_block:
assert_equal(tx1["confirmations"], 1)
assert_equal(tx2["confirmations"], 1)
# Node1's "from0" balance should be both transaction amounts:
assert_equal(self.nodes[1].getbalance("from0"), -(tx1["amount"] + tx2["amount"]))
else:
assert_equal(tx1["confirmations"], 0)
assert_equal(tx2["confirmations"], 0)
# Send clone and its parent to miner
self.nodes[2].sendrawtransaction(fund_foo_tx["hex"])
txid1_clone = self.nodes[2].sendrawtransaction(tx1_clone["hex"])
# ... mine a block...
self.nodes[2].generate(1)
# Reconnect the split network, and sync chain:
connect_nodes(self.nodes[1], 2)
self.nodes[2].sendrawtransaction(fund_bar_tx["hex"])
self.nodes[2].sendrawtransaction(tx2["hex"])
self.nodes[2].generate(1) # Mine another block to make sure we sync
sync_blocks(self.nodes)
# Re-fetch transaction info:
tx1 = self.nodes[0].gettransaction(txid1)
tx1_clone = self.nodes[0].gettransaction(txid1_clone)
tx2 = self.nodes[0].gettransaction(txid2)
# Verify expected confirmations
assert_equal(tx1["confirmations"], -2)
assert_equal(tx1_clone["confirmations"], 2)
assert_equal(tx2["confirmations"], 1)
# Check node0's total balance; should be same as before the clone, + 100 LOC for 2 matured,
# less possible orphaned matured subsidy
expected += 100
if (self.options.mine_block):
expected -= 50
assert_equal(self.nodes[0].getbalance(), expected)
assert_equal(self.nodes[0].getbalance("*", 0), expected)
# Check node0's individual account balances.
# "foo" should have been debited by the equivalent clone of tx1
assert_equal(self.nodes[0].getbalance("foo"), 1219 + tx1["amount"] + tx1["fee"])
# "bar" should have been debited by (possibly unconfirmed) tx2
assert_equal(self.nodes[0].getbalance("bar", 0), 29 + tx2["amount"] + tx2["fee"])
# "" should have starting balance, less funding txes, plus subsidies
assert_equal(self.nodes[0].getbalance("", 0), starting_balance
- 1219
+ fund_foo_tx["fee"]
- 29
+ fund_bar_tx["fee"]
+ 100)
# Node1's "from0" account balance
assert_equal(self.nodes[1].getbalance("from0", 0), -(tx1["amount"] + tx2["amount"]))
if __name__ == '__main__':
TxnMallTest().main()
|
[
"arthurcolle@Arthurs-MacBook-Pro.local"
] |
arthurcolle@Arthurs-MacBook-Pro.local
|
5f93b1e2b6d1ad818e179659e266a4b51598a5bb
|
34092feef434547abc852dfa1802c5f2178fb9ca
|
/python/searchRange.py
|
ce594aefa0cd98115b600f4ad02ab2d054772517
|
[] |
no_license
|
achyudh/leetcode-solutions
|
c59535650cc42e3b629fa12627aec0768325d2b8
|
3997b8bfa90a27cf8ccda10cdd34e9db3afebd7a
|
refs/heads/master
| 2021-06-26T01:25:38.691813
| 2020-10-26T00:03:48
| 2020-10-26T00:03:48
| 147,906,102
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 860
|
py
|
class Solution(object):
def searchRange(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
if len(nums) == 0:
return -1, -1
ptr_u = len(nums) - 1
ptr_l = 0
while ptr_u >= ptr_l:
ptr_m = (ptr_u + ptr_l) // 2
if nums[ptr_m] <= target:
ptr_l = ptr_m + 1
else:
ptr_u = ptr_m - 1
ptr_a = ptr_l - 1
if nums[ptr_a] != target:
return -1, -1
ptr_u = len(nums) - 1
ptr_l = 0
while ptr_u >= ptr_l:
ptr_m = (ptr_u + ptr_l) // 2
if nums[ptr_m] >= target:
ptr_u = ptr_m - 1
else:
ptr_l = ptr_m + 1
return ptr_l, ptr_a
|
[
"noreply@github.com"
] |
noreply@github.com
|
c7f7d61a99c4af08ce8617ff03de87e252df99b8
|
fd143272c9e958fa814692454ddb4a2eced2ab92
|
/S4.2.py
|
8bbe73c5cbcca0a385bda40a62e76e2558d95687
|
[] |
no_license
|
taowenyin/HelloSLM
|
febf3c4b4ce515da27ec307f8184d3b11e43a0dd
|
adced132a187c0e2d708ca9858535e8cebe664b4
|
refs/heads/master
| 2022-05-27T09:13:59.122326
| 2020-04-26T14:01:47
| 2020-04-26T14:01:47
| 259,044,868
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,413
|
py
|
import numpy as np
from sklearn.naive_bayes import GaussianNB, BernoulliNB, MultinomialNB
if __name__ == '__main__':
data = np.array([
[1, 'S', -1], [1, 'M', -1], [1, 'M', 1], [1, 'S', 1], [1, 'S', -1],
[2, 'S', -1], [2, 'M', -1], [2, 'M', 1], [2, 'L', 1], [2, 'L', 1],
[3, 'L', 1], [3, 'M', 1], [3, 'M', 1], [3, 'L', 1], [3, 'L', -1],
])
# 数据
train_x = data[:, [0, 1]]
# 标签
train_y = data[:, 2].astype(np.int)
# 批量替换
train_x[train_x == 'S'] = 4
train_x[train_x == 'M'] = 5
train_x[train_x == 'L'] = 6
# 类型转换
train_x = train_x.astype(np.int)
# 测试数据
x = np.array([[2, 'S']])
x[x == 'S'] = 4
x = x.astype(np.int)
# 高斯朴素贝叶斯对象
gaussianNB = GaussianNB()
gaussianNB.fit(train_x, train_y)
print('Gaussian Test X = ', gaussianNB.predict(x))
print('Gaussian Test X = ', gaussianNB.predict_proba(x))
# 伯努利朴素贝叶斯
bernoulliNB = BernoulliNB()
bernoulliNB.fit(train_x, train_y)
print('Bernoulli Test X = ', bernoulliNB.predict(x))
print('Bernoulli Test X = ', bernoulliNB.predict_proba(x))
# 多项式朴素贝叶斯
multinomialNB = MultinomialNB()
multinomialNB.fit(train_x, train_y)
print('Multinomial Test X = ', multinomialNB.predict(x))
print('Multinomial Test X = ', multinomialNB.predict_proba(x))
|
[
"wenyin.tao@163.com"
] |
wenyin.tao@163.com
|
090f82f82fdc3d02c3bb17c3ee32ed6c85c8c08e
|
0a25ea42bd8aff27c939b7de9d9a8ea036b0c66f
|
/thrift/thrift-utils/test/ezpz/__init__.py
|
6b1fb9fe954c52e463b0d180312d8dccde9dae94
|
[
"Apache-2.0"
] |
permissive
|
ezbake/ezbake-common-python
|
118a20e2f88aaa29f95459b6bb163d0a828407d0
|
fc82fb71852750cc2cfcbd7af0cb6843fad13b89
|
refs/heads/master
| 2021-01-01T05:38:30.502302
| 2015-03-02T20:08:32
| 2015-03-02T20:08:32
| 31,560,413
| 0
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 623
|
py
|
# Copyright (C) 2013-2015 Computer Sciences Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
[
"jhastings@42six.com"
] |
jhastings@42six.com
|
a6ca45275323f2440e95e9be09e07f653e6250ef
|
f9e4c2e9cd4a95dc228b384e2e8abadc9f1b0bda
|
/fratevents/settings.py
|
22d7c2df522fd15d68bce7043a05c6b6fa4c9fe0
|
[] |
no_license
|
sanchitbareja/fratevents
|
227adddd77c9a0055ccd74d5e0bf6f771790f8d3
|
f50c8ccb40b8c9124b40e70d90c9190ef27a2fb7
|
refs/heads/master
| 2016-09-06T15:36:45.443412
| 2013-02-16T21:13:36
| 2013-02-16T21:13:36
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,645
|
py
|
# Django settings for fratevents project.
import os, os.path, social_auth
if os.environ.has_key('DATABASE_URL'):
DEBUG = True
else:
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('Sanchit Bareja', 'sanchitbareja@gmail.com'),
)
MANAGERS = ADMINS
if os.environ.has_key('DATABASE_URL'):
import dj_database_url
DATABASES = {'default': dj_database_url.config(default='postgres://localhost')}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'fratevents', # Or path to database file if using sqlite3.
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': 'root', # Not used with sqlite3.
'HOST': 'localhost', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Los_Angeles'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = False
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = '/static/'
SEND_BROKEN_LINK_EMAILS = True
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = '/static/'
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = os.path.join(os.path.dirname(__file__), 'static/').replace('\\','/')
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(os.path.dirname(__file__), 'static/').replace('\\','/'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'rsx9)l1^_bsmeyipfk9u#t#gdt%@po-i-hr+#8ensmg012!kpn'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'social_auth.middleware.SocialAuthExceptionMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'fratevents.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'fratevents.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(os.path.dirname(__file__), 'views').replace('\\','/'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'south',
'gunicorn',
'events',
'clubs',
'rage',
'userprofile',
'social_auth',
'storages',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
# EMAIL SETTINGS
EMAIL_USE_TLS = True
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_HOST_USER = 'caleventsinfo@gmail.com'
EMAIL_HOST_PASSWORD = 'qwaszx12,'
EMAIL_PORT = 587
EVENT_MASTERS = ['sanchitbareja@gmail.com','hahardikagrawal@gmail.com','caleventsinfo@gmail.com']
# Facebook Integration Settings
AUTHENTICATION_BACKENDS = (
'social_auth.backends.facebook.FacebookBackend',
'django.contrib.auth.backends.ModelBackend',
)
# userprofile creation
AUTH_PROFILE_MODULE = 'userprofile.UserProfile'
FACEBOOK_APP_ID = '343708889077375'
FACEBOOK_API_SECRET = '0bd34d3dbb482579fb990805860267bd'
FACEBOOK_EXTENDED_PERMISSIONS = ['email', 'user_birthday', 'user_interests', 'user_events', 'manage_pages']
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.contrib.messages.context_processors.messages',
'social_auth.context_processors.social_auth_by_type_backends',
)
LOGIN_REDIRECT_URL = '/'
SOCIAL_AUTH_PIPELINE = (
'social_auth.backends.pipeline.social.social_auth_user',
#'social_auth.backends.pipeline.associate.associate_by_email',
'social_auth.backends.pipeline.user.get_username',
'social_auth.backends.pipeline.user.create_user',
'social_auth.backends.pipeline.social.associate_user',
'social_auth.backends.pipeline.social.load_extra_data',
'social_auth.backends.pipeline.user.update_user_details',
'fratevents.pipeline.create_user_profile',
'fratevents.pipeline.get_user_profile_pic',
'fratevents.pipeline.get_user_events',
'fratevents.pipeline.get_user_network',
'fratevents.pipeline.get_user_pages',
)
SOCIAL_AUTH_CREATE_USERS = True
SOCIAL_AUTH_FORCE_RANDOM_USERNAME = False
SOCIAL_AUTH_DEFAULT_USERNAME = 'socialauth_user'
SOCIAL_AUTH_COMPLETE_URL_NAME = 'socialauth_complete'
LOGIN_ERROR_URL = '/login/error/'
SOCIAL_AUTH_ERROR_KEY = 'socialauth_error'
SOCIAL_AUTH_FORCE_POST_DISCONNECT = True
#AWS S3 Credentials - django-storages
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
AWS_ACCESS_KEY_ID = 'AKIAISDEISAIY3LRYY3Q'
AWS_SECRET_ACCESS_KEY = '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'
AWS_STORAGE_BUCKET_NAME = 'calevents'
BUCKET_NAME = 'calevents'
from S3 import CallingFormat
AWS_CALLING_FORMAT = CallingFormat.SUBDOMAIN
AWS_UPLOAD_DESTINATION = "http://s3.amazonaws.com/"+str(BUCKET_NAME)+"/"
|
[
"sanchitbareja@gmail.com"
] |
sanchitbareja@gmail.com
|
bfe99fb243985b17460e1aa571a5d05a72641616
|
91049acdda426be9c8f22de19500a40e366fef81
|
/Client/modules/code_execution/code_execution.py
|
0f9142463a8563cdc1280343d2605234888597f3
|
[] |
no_license
|
VictorAlonsoCM/Project_RAT
|
f3e55e9525e05175fa8502fa23f5bfd31ec4957f
|
01d3079586ab08a92c8e7c9d2d1053710cac7505
|
refs/heads/master
| 2020-09-30T02:00:23.395275
| 2019-12-10T17:01:45
| 2019-12-10T17:01:45
| 227,172,602
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,603
|
py
|
import os
import re
import subprocess
from modules.sockets.socket import Network
class CodeExecution:
def __init__(self, nt):
self = self
self.nt = nt
def dir(self):
cmd = "dir"
stream = os.popen(cmd)
output = stream.readlines()
data = ''
data = ''
for item in output:
data += str(item)
print(data)
return data
def systeminfo(self):
cmd = "systeminfo"
stream = os.popen(cmd)
output = stream.readlines()
data = ''
for item in output:
data += str(item)
print(data)
return data
def whoami(self):
cmd = "whoami"
stream = os.popen(cmd)
output = stream.readlines()
data = ''
for item in output:
data += str(item)
print(data)
return data
def net_users(self):
cmd = "net users"
stream = os.popen(cmd)
output = stream.readlines()
data = ''
for item in output:
data += str(item)
print(data)
return data
def net_localgroups(self):
cmd = "net localgroups"
stream = os.popen(cmd)
output = stream.readlines()
data = ''
for item in output:
data += str(item)
print(data)
return data
def custom(self):
self.nt.connectionSend("Insert your command: ".encode("utf-8"))
cmd = str(self.nt.connectionRecv())
stream = os.popen(cmd)
output = stream.readlines()
data = ''
for item in output:
data += str(item)
if not data:
return "\r\nSyntax error\r\n"
return data
def get_shell(self):
return "Shell goes here....."
def code_execution(self):
options_list = ["2. dir", "3. systeminfo", "4. whoami", "5. net users", "6. net localgroup", "7. custom", "8. get a shell"]
data = ""
for item in options_list:
print(item)
data += item+"\r\n"
self.nt.connectionSend("Getting RCE...".encode("utf-8"))
self.nt.connectionSend(data.encode("utf-8"))
option = int(self.nt.connectionRecv())
if(option == 1):
self.nt.connectionClose()
switcher = {
2: self.dir,
3: self.systeminfo,
4: self.whoami,
5: self.net_users,
6: self.net_localgroups,
7: self.custom
}
callback = switcher.get(option, "Invalid option")
return callback()
|
[
"vacontrerasmeza@gmail.com"
] |
vacontrerasmeza@gmail.com
|
ed3c4924a869f8d4d77bcbdfcb1d637c1d9150bf
|
40c94870c9bbc5d3ca5fccccbbe648f17167be3e
|
/additional script/Dodelson3.4.py
|
cf7e2638b65c9f8b9d65438aabcfea16f5d0a53b
|
[] |
no_license
|
zsw6666/ASTRO_script
|
62b9cfef756723a7a5ba3d16fb0205eb7c5eb725
|
89de215edb86b516781192b349eddf15a55114b1
|
refs/heads/version1.0
| 2020-04-16T12:31:09.935061
| 2019-10-16T11:41:39
| 2019-10-16T11:41:39
| 165,577,794
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,150
|
py
|
import numpy as np
import astropy.constants as const
import astropy.units as u
from matplotlib import pyplot as plt
def lamda(x):
s=(255/(886.7*(x**5)))*(12.+6.*x+x**2)
return s
def H(x):
Q=((1.293*u.MeV).to(u.J)).value
g_star=10.75
s=(2*np.pi/3)*np.sqrt((g_star*np.pi*const.G.value)/(5.))*((Q**2)/(x**2))
return s
def Mu(x):
tao=886.7
Q=((1.293*u.MeV).to(u.J)).value
s1=(-255./(tao*Q))*(4*(np.pi**3)*const.G.value*(Q**2)*10.75/45.)**(-0.5)
s2=(4/x**3)+(3/x**2)+(1/x)+np.exp(-x)*((4/x**3)+(3/x**2))
return s1*s2
def Intfunc(x,x0):
s1=(lamda(x)*np.exp(-x))/(x*H(x))
s2=np.exp(Mu(x)-Mu(x0))
return s1*s2
def Integrator(func,interval,x0):
x=np.linspace(interval[0],interval[1],1000)
deta=x[1]-x[0]
y=Intfunc(x,x0)
s=np.sum(y*deta)
return s
def Kai_n(x):
s=Integrator(Intfunc,[0.1,x],x)
return s
def Run():
xlist=np.linspace(1,1000,1000)
ylist=[]
for x in xlist:
ylist.append(Kai_n(x))
ylist=np.array(ylist)
plt.plot(xlist,ylist)
plt.yscale('log')
plt.xlabel(r'$x$')
plt.ylabel(r'$X_{n}$')
plt.show()
return None
Run()
|
[
"zsw18@mails.tsinghua.edu.cn"
] |
zsw18@mails.tsinghua.edu.cn
|
a28b2a3cf60f98bd998c8924c10a1f170376436f
|
9bc228372e586a1f90bb0685c43e744be9638ecd
|
/18_정은서/session08/catproject/catproject/asgi.py
|
ff0fcedfade84d46b53bbf8fe87c19d61c08bf30
|
[
"MIT"
] |
permissive
|
LikeLionSCH/9th_ASSIGNMENT
|
3e58862a76e3232aed7e19e8939da23330ff2e22
|
c211995ad12f404833ffec7fd80e1229b82a3bfa
|
refs/heads/master
| 2023-07-03T10:27:11.843177
| 2021-08-02T14:52:02
| 2021-08-02T14:52:02
| 379,633,279
| 7
| 18
|
MIT
| 2021-08-02T14:52:03
| 2021-06-23T14:36:59
|
Python
|
UTF-8
|
Python
| false
| false
| 397
|
py
|
"""
ASGI config for catproject project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'catproject.settings')
application = get_asgi_application()
|
[
"em4784@gmail.com"
] |
em4784@gmail.com
|
925f9662216aa18534707e210800d7309b084bd3
|
dd1b9eaf9e996444f2995220df93f7213133490d
|
/account/migrations/0001_initial.py
|
de37dde6d2e51cd2b1f37b2e175d8169816a843f
|
[] |
no_license
|
4k45hv3rm4/login_system
|
69961f0b81c1e315b2db69588af31eff9d7f6a43
|
a07bd08a3cccd6231a253c69e920cbea528081af
|
refs/heads/master
| 2023-01-10T02:26:50.021694
| 2020-11-16T13:34:27
| 2020-11-16T13:34:27
| 313,309,582
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,226
|
py
|
# Generated by Django 3.0.8 on 2020-07-15 11:36
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Account',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('email', models.EmailField(max_length=60, unique=True, verbose_name='email')),
('username', models.CharField(max_length=30, unique=True)),
('date_joined', models.DateTimeField(auto_now_add=True, verbose_name='date joined')),
('last_login', models.DateTimeField(auto_now=True, verbose_name='last login')),
('is_admin', models.BooleanField(default=False)),
('is_active', models.BooleanField(default=True)),
('is_staff', models.BooleanField(default=False)),
('is_superuser', models.BooleanField(default=False)),
],
options={
'abstract': False,
},
),
]
|
[
"4k45hr0ck5007@gmail.com"
] |
4k45hr0ck5007@gmail.com
|
bd4c1df790a65e3af952ca5f291241677dd3a7c6
|
e02cdc2908ea54bfe1232ccb4c515bb355372320
|
/eventex/urls.py
|
451b393b79f77411e24635476b72ed952943c818
|
[] |
no_license
|
iveinbox/wttd
|
3924a2f73ccee7f43edc44bcd3920d27c27404d2
|
0ad64a5814fa59ed9877123592a6b5fd65c64c69
|
refs/heads/master
| 2019-07-16T13:05:56.778667
| 2017-11-05T04:06:55
| 2017-11-05T04:06:55
| 93,279,371
| 0
| 1
| null | 2017-06-05T15:12:35
| 2017-06-03T23:35:15
|
CSS
|
UTF-8
|
Python
| false
| false
| 847
|
py
|
"""eventex URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
from eventex.core.views import home
from eventex.subscriptions.views import subscribe
urlpatterns = [
url(r'^$', home),
url(r'^inscricao/$', subscribe),
url(r'^admin/', include(admin.site.urls)),
]
|
[
"iveelaureane@gmail.com"
] |
iveelaureane@gmail.com
|
47c78acbdccd77b171655ae99a43265f89f41011
|
30898ff2de7b05412caf60a23c1f5b53ff35ffd9
|
/Script.py
|
b5f7417f84d0251ff6028eb21361ea507a4a02fb
|
[] |
no_license
|
niravshah2705/DynamoDB_to_Athena
|
f106b773dd92008e5e5288cdb49fe16cec97ed1a
|
902d3d92b3a3c0c30b2c1630d024f98424977d4b
|
refs/heads/master
| 2020-04-08T19:26:12.273525
| 2018-12-02T03:59:12
| 2018-12-02T03:59:12
| 159,655,315
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,925
|
py
|
import sys
import ast
from awsglue.transforms import *
from awsglue.utils import getResolvedOptions
from pyspark.context import SparkContext
from awsglue.context import GlueContext
from awsglue.job import Job
## @params: [JOB_NAME]
args = getResolvedOptions(sys.argv, ['JOB_NAME','sourcedb','destinationdb','sourcetable','destinationtable','mapping','fields'])
sc = SparkContext()
glueContext = GlueContext(sc)
spark = glueContext.spark_session
job = Job(glueContext)
job.init(args['JOB_NAME'], args)
## @type: DataSource
## @args: [database = "aws-blogs-glue-database098234ytb2", table_name = "auto_billplatformcredentials", transformation_ctx = "datasource0"]
## @return: datasource0
## @inputs: []
datasource0 = glueContext.create_dynamic_frame.from_catalog(database = args['sourcedb'], table_name = args['sourcetable'], transformation_ctx = "datasource0")
## @type: ApplyMapping
## @args: [mapping = [("cgimageurl", "string", "cgimageurl", "string"), ("id", "string", "id", "string"), ("billname", "string", "billname", "string"), ("billserverurl", "string", "billserverurl", "string"), ("billapiusername", "string", "billapiusername", "string"), ("billcurrency", "string", "billcurrency", "string"), ("billappname", "string", "billappname", "string"), ("billapipassword", "string", "billapipassword", "string"), ("billdescription", "string", "billdescription", "string"), ("billverifykey", "string", "billverifykey", "string"), ("billchannel", "string", "billchannel", "string"), ("billmerchantid", "string", "billmerchantid", "string"), ("billcountry", "string", "billcountry", "string"), ("billsubsamounttype", "string", "billsubsamounttype", "string"), ("billsubsenableretry", "string", "billsubsenableretry", "string"), ("billsubsppionly", "string", "billsubsppionly", "string"), ("billwebsiteweb", "string", "billwebsiteweb", "string"), ("billchannelweb", "string", "billchannelweb", "string"), ("billgeneratechecksumurl", "string", "billgeneratechecksumurl", "string"), ("billverifychecksumurl", "string", "billverifychecksumurl", "string"), ("billmerchantkey", "string", "billmerchantkey", "string"), ("buildtype", "string", "buildtype", "string"), ("billrequesttype", "string", "billrequesttype", "string"), ("billsubsfrequencyunit", "string", "billsubsfrequencyunit", "string"), ("billtheme", "string", "billtheme", "string"), ("billsubsfrequency", "string", "billsubsfrequency", "string"), ("billchannelwap", "string", "billchannelwap", "string"), ("billwebsitewap", "string", "billwebsitewap", "string"), ("billindustrytypeid", "string", "billindustrytypeid", "string"), ("billserverurlsandbox", "string", "billserverurlsandbox", "string")], transformation_ctx = "applymapping1"]
## @return: applymapping1
## @inputs: [frame = datasource0]
applymapping1 = ApplyMapping.apply(frame = datasource0, mappings = ast.literal_eval(args['mapping']), transformation_ctx = "applymapping1")
## @type: SelectFields
## @args: [paths = ["cgimageurl", "id", "billname", "billserverurl", "billapiusername", "billcurrency", "billappname", "billapipassword", "billdescription", "billverifykey", "billchannel", "billmerchantid", "billcountry", "billsubsamounttype", "billsubsenableretry", "billsubsppionly", "billwebsiteweb", "billchannelweb", "billgeneratechecksumurl", "billverifychecksumurl", "billmerchantkey", "buildtype", "billrequesttype", "billsubsfrequencyunit", "billtheme", "billsubsfrequency", "billchannelwap", "billwebsitewap", "billindustrytypeid", "billserverurlsandbox"], transformation_ctx = "selectfields2"]
## @return: selectfields2
## @inputs: [frame = applymapping1]
selectfields2 = SelectFields.apply(frame = applymapping1, paths = ast.literal_eval(args['fields']), transformation_ctx = "selectfields2")
## @type: ResolveChoice
## @args: [choice = "MATCH_CATALOG", database = "aws-blogs-glue-database098234ytb2", table_name = "ddb-target-s3-table-auto-billplatformcredentials", transformation_ctx = "resolvechoice3"]
## @return: resolvechoice3
## @inputs: [frame = selectfields2]
resolvechoice3 = ResolveChoice.apply(frame = selectfields2, choice = "MATCH_CATALOG", database = args['destinationdb'], table_name = args['destinationtable'], transformation_ctx = "resolvechoice3")
## @type: ResolveChoice
## @args: [choice = "make_struct", transformation_ctx = "resolvechoice4"]
## @return: resolvechoice4
## @inputs: [frame = resolvechoice3]
resolvechoice4 = ResolveChoice.apply(frame = resolvechoice3, choice = "make_struct", transformation_ctx = "resolvechoice4")
## @type: DataSink
## @args: [database = "aws-blogs-glue-database098234ytb2", table_name = "ddb-target-s3-table-auto-billplatformcredentials", transformation_ctx = "datasink5"]
## @return: datasink5
## @inputs: [frame = resolvechoice4]
datasink5 = glueContext.write_dynamic_frame.from_catalog(frame = resolvechoice4, database = args['destinationdb'], table_name = args['destinationtable'] , transformation_ctx = "datasink5")
job.commit()
|
[
"noreply@github.com"
] |
noreply@github.com
|
351c87c5d812181e22737f15be126ece632a8f45
|
682b3381751b6178ea7217c723a46bf74b8ce07e
|
/experiments/fmeasure.py
|
32298f834b052829b4760cf36c14c435f43a7686
|
[] |
no_license
|
jminyu/BackgroundSubtraction_by_GBRBM
|
7170bb2c04328274a6e671592a7856e363438f7c
|
6fada0925d674b66933a4440f157832b97ba3bab
|
refs/heads/master
| 2021-01-10T20:59:06.938073
| 2014-07-26T03:00:23
| 2014-07-26T03:00:23
| 22,276,707
| 2
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,297
|
py
|
from numpy import *
import os.path
import Image
import matplotlib.pylab as plt
def f_measure(f, fg,gt):
fg_pixgt = where(gt == 1)
bg_pixgt = where(gt == 0)
fg_pixfg = where(fg == 1)
bg_pixfg = where(fg == 0)
tp = (where(gt[fg_pixgt] == fg[fg_pixgt]))[0]
fp = (where(gt[fg_pixgt] != fg[fg_pixgt]))[0]
tn = (where(gt[bg_pixgt] == fg[bg_pixgt]))[0]
fn = (where(gt[bg_pixgt] != fg[bg_pixgt]))[0]
DR = len(tp) * 1.0 / ( len(tp) + len(fn) )
percision = len(tp) * 1.0 / (len(tp) + len(fp))
F = 2 * DR * percision / (DR + percision)
print f, " fmeasure:", F, " tp:fp:tn:fn =", len(tp),":",len(fp),":",len(tn),":",len(fn), "Recall:", DR," Percision:", percision
return F
def print_it(x, dir_name, files):
global dataset
print "here"
#dataset = []
#dataset = np.zeros(256*320)
for f in files:
imf = Image.open(dir_name + '/' + f)
#d = np.array(imf)
d = ((array(imf.getdata())/255) > 0.5 ) * 1
#print f
#print d.shape
dataset.append(d)
return dataset
def print_fmeasure(x, dir_name, files):
global dataset
print "here"
i = 0
s = 0
for f in files:
imf = Image.open(dir_name + '/' + f)
#d = np.array(imf)
groundtruth1 = (((asarray(imf)[:,:,0])/255.0 > 0.5) * 1).flatten()
#print "shape:", groundtruth1.shape, "shape2:", dataset[i].shape
#print i
# print f
s = s + f_measure(f, groundtruth1, dataset[i])
i = i + 1
print "avg: ", s/i
# img = Image.open("data/changedetection\\baseline\highway/gt001367.png")
# img1 = Image.open("data/changedetection\\baseline\highway/bin001367.png")
# print (asarray(img1)).shape
# plt.imshow(((asarray(img)[:,:])/255.0 > 0.5)* 1)
# plt.figure(2)
# plt.imshow(((asarray(img1)[:,:])/255.0 > 0.5) * 1)
# plt.show()
# print "Doie : " , (asarray(img)[:,:]).shape
# groundtruth = (((asarray(img)[:,:])/255.0 > 0.5) * 1).flatten()
# groundtruth1 = (((asarray(img1)[:,:,0])/255.0 > 0.5) * 1).flatten()
# f_measure(groundtruth1,groundtruth)
dataset = []
os.path.walk('C:\work\\backgdSubt\GRBM\deepbelief\code\data\cdresults\GT', print_it, 0)
os.path.walk('C:\work\\backgdSubt\GRBM\deepbelief\code\data\cdresults\DPGMM', print_fmeasure, 0)
|
[
"jmyu@gist.ac.kr"
] |
jmyu@gist.ac.kr
|
2953bbb2ca4922a469834aebbec2753d820c24e7
|
5aeaa94117ad5f3ac86f83795e05e9444dfce586
|
/scratch.py
|
c2b0809c86ea77328bd0bb6ea65ee63610c849d8
|
[] |
no_license
|
rikkhill/sfcrime
|
debef1b232db35ad735c8badec360bc5e9f1ad95
|
f21fe834da07f701f666198089425e59ed57e70d
|
refs/heads/master
| 2021-01-21T03:31:05.410424
| 2016-08-23T13:38:49
| 2016-08-23T13:38:49
| 49,992,165
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 839
|
py
|
# Rikk's exploratory gubbins
import pandas as pd
import helper.data as hd
import helper.plot as plot
import helper.constants as C
df = hd.get_training_data()
total = len(df)
# Filter for the top ~80%% of crimes
df = df[df.Category.isin([
#'LARCENY/THEFT', # 19.92%
#'OTHER OFFENSES', # 14.37%
#'NON-CRIMINAL', # 10.51%
#'ASSAULT', # 8.76%
'DRUG/NARCOTIC', # 6.15%
#'VEHICLE THEFT', # 6.13%
#'VANDALISM', # 5.09
#'WARRANTS', # 4.81%
#'BURGLARY' # 4.19%
])]
print(len(df)/total)
plot.eventmap(df, 'Category')
|
[
"rikk@unashamedly.co.uk"
] |
rikk@unashamedly.co.uk
|
d165d12587bb3716d7a0ae23d765ebc98ee1ba39
|
7bcba33e06f1fff4b2639aed5c556a79b7a51269
|
/plot_graph2.py
|
c98f32a462f19b57135d4b42d60cac094112522b
|
[] |
no_license
|
guroosh/CS7IS2-AI-project
|
3798d32c60754b27ab044d1a1096f6527660ec56
|
aae851695a022d374c263bd666fa86a20c478887
|
refs/heads/master
| 2020-12-31T10:47:06.545624
| 2020-04-13T08:16:30
| 2020-04-13T08:16:30
| 239,007,884
| 1
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,764
|
py
|
import matplotlib.pyplot as plt
from scipy.ndimage.filters import gaussian_filter1d
# x = [3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35]
# y = [0.0, 0.0, 0.0, 0.0, 0.0, 0.10294117647058823, 0.6617647058823529, 0.59375, 2.6029411764705883, 4.349206349206349, 5.4714285714285715, 7.806451612903226, 10.897058823529411, 16.857142857142858, 11.338461538461539, 17.434782608695652, 16.5, 28.06896551724138, 25.236363636363638, 27.557692307692307, 43.90196078431372, 47.80701754385965, 47.0, 33.0, 34.75, 39.0, 51.0, 42.5, 31.0, 51.5, 128.25, 106.5, 76.66666666666667]
x = []
y1 = []
y2 = []
y3 = []
y4 = []
y5 = []
y6 = []
# y_astar = []
with open('mutation_change_daata.txt', 'r') as inp:
for i in inp:
i = i[:-1]
x.append(float(i.split(', ')[0]))
y1.append(float(i.split(', ')[1]))
y2.append(float(i.split(', ')[2]))
y3.append(float(i.split(', ')[3]))
y4.append(float(i.split(', ')[4]))
y5.append(float(i.split(', ')[5]))
y6.append(float(i.split(', ')[6]))
# ysmoothed = gaussian_filter1d(y30, sigma=2)
def smooth(l1):
return gaussian_filter1d(l1, sigma=2)
y1 = smooth(y1)
y2 = smooth(y2)
y3 = smooth(y3)
y4 = smooth(y4)
y5 = smooth(y5)
y6 = smooth(y6)
plt.plot(x, y6, 'k', label='Global minima (based on A*)')
plt.plot(x, y1, 'r', label='No mutation')
plt.plot(x, y2, 'g', label='30% mutation')
plt.plot(x, y3, 'b', label='50% mutation')
plt.plot(x, y4, 'c', label='70% mutation')
plt.plot(x, y5, 'm', label='100% mutation')
# plt.plot(x40, y40, 'b')
# plt.plot(x30, ysmoothed)
plt.legend(loc="upper left")
plt.xlabel('Grid Size (M) : MxM')
plt.ylabel('Length of best path (after 400 iterations)')
plt.show()
|
[
"csingh@tcd.ie"
] |
csingh@tcd.ie
|
039e3c3ed7b402181e322dec3cbb5fe416969706
|
486e486ffea2feb8601cad443d86854a77e1f390
|
/todoapp/todos/feeds.py
|
aefdded24c2f6b902f1669d3f127f7e7a54d7e8d
|
[] |
no_license
|
saikirananumalla/todoapp
|
ddfbea6b983d1d22e17f6ab2772e18ce26aa387a
|
71aab46c277bdd1f72e0ec5c115f6656ff3a6551
|
refs/heads/master
| 2020-04-09T17:21:05.972821
| 2018-12-05T07:55:25
| 2018-12-05T07:55:25
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 594
|
py
|
from django.contrib.syndication.views import Feed
from django.core.urlresolvers import reverse
from .models import Users
from . import views
class LatestEntryField(Feed):
title = " Updates "
link = "/news-sites/"
description = "Updates on Users"
def items(self):
return Users.objects.order_by ('num')
def item_title(self, item):
return item.user
def item_description(self, item):
return item.nickname
#item link is only needed if NewsItem has no get_absolute_url method
def item_link(self, item):
return reverse(views.index)
|
[
"noreply@github.com"
] |
noreply@github.com
|
03668cd8657241fcab646595058f80c9f4125756
|
c3aad901e32f735625f938b4c26cdfa307254a6b
|
/biothings_explorer/api_preprocess/reasoner.py
|
b89e427492a7a016d9355ed1ccfbe18fd59cd9d8
|
[
"Apache-2.0"
] |
permissive
|
andrewgcodes/biothings_explorer
|
73c598fae2171e8b61687325fa1c1ee1a625fbe1
|
b54aa195bbed19ff5be09ed24dee869b24bb3c16
|
refs/heads/master
| 2022-12-23T18:06:34.061346
| 2022-08-18T20:23:17
| 2022-08-18T20:23:17
| 279,000,723
| 0
| 0
|
Apache-2.0
| 2020-07-12T05:49:16
| 2020-07-12T05:49:15
| null |
UTF-8
|
Python
| false
| false
| 939
|
py
|
from itertools import groupby
def restructure_reasoner_response(json_doc):
"""Restructure the API output from reasoner API.
:param: json_doc: json output from reasoner API
"""
edges = json_doc['knowledge_graph']['edges']
if not edges:
return {}
res = {}
edges = sorted(edges, key=lambda x: x['type'])
for k, g in groupby(edges, lambda x: x['type']):
res[k] = []
for _item in g:
if _item['target_id'].startswith("PANTHER.FAMILY"):
_item['panther'] = _item['target_id'][15:]
if _item['target_id'].startswith("CHEBI"):
_item['chebi'] = _item['target_id']
if _item['target_id'].startswith("CHEMBL:"):
_item['chembl'] = _item['target_id'][7:]
if _item['target_id'].startswith("MONDO:"):
_item['mondo'] = _item['target_id'][6:]
res[k].append(_item)
return res
|
[
"kevinxin@scripps.edu"
] |
kevinxin@scripps.edu
|
09b2477e4532e1781c1726117b833d2dd74a2098
|
c91a9f70e19c25bb8839172ba0733ae90a64c504
|
/apps/core/urls.py
|
e8b4cf1ad85f022d8cdb6681eb2fbc03154d7733
|
[] |
no_license
|
barbarakap19/gestao_rh
|
464ffb0fb031e91ffef9a40df448ff44998a5fb2
|
e2ed295cba055759bd0a9325adf6da9106c58c06
|
refs/heads/master
| 2020-06-14T20:47:46.471310
| 2019-07-23T03:32:42
| 2019-07-23T03:32:42
| 195,121,768
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 107
|
py
|
from django.urls import path
from .views import home
urlpatterns = [
path('', home, name='home'),
]
|
[
"barbara.andrade1901@gmail.com"
] |
barbara.andrade1901@gmail.com
|
021379ed8268f90ad89ad129bff6fa13ecc6d24f
|
7c131f19c3a3ae1ee8832ff68a8dd88e68e4d3d8
|
/filter_mobilisations.py
|
3827c8a12f9092a369e6a67bf2e36dffa856e3d6
|
[] |
no_license
|
isildirik/London_Fire_Station_Closures
|
42abbaa94462144ac1ccbeba398d038bf51e1322
|
bcce10265923fdfeca7fd293712c822e59a95f83
|
refs/heads/master
| 2021-07-12T19:25:38.210866
| 2020-06-24T07:17:35
| 2020-06-24T07:17:35
| 168,384,656
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 709
|
py
|
#!/usr/bin/env python3
# This script reads london fire brigade mobilisations data from original data spreadsheet.
# Filters out the unnecessary columns and subsets the dataset to 2013 and 2014 years and outputs a csv file for further use.
import pandas as pd
import time
xls_path = './original-dataset/LFB Mobilisation data from Jan 2013.xlsx'
xls_cols = 'A,B,D,E,I,P,Q,S,V'
output_csv_name = './mobilisation_data_2013_2014.csv'
start_time = time.time()
mobdf = pd.read_excel(xls_path, sheet_name='Sheet1', usecols=xls_cols)
# Subset data up to 2015 January
mobdf = mobdf[0:314249]
mobdf.to_csv(output_csv_name, index=False)
print("--- Script finished in %.4f seconds ---" % (time.time() - start_time))
|
[
"isildirik@gmail.com"
] |
isildirik@gmail.com
|
6f267f19b412e85da602cd830f9cae1e0f449d29
|
c0990bc7a4fbc2875e2de8998d274b2e70b91ab3
|
/lingvo/jax/layers/stochastics_test.py
|
70ffe9917042bf183f1fc0e5792422d75cca1692
|
[
"Apache-2.0"
] |
permissive
|
Assimilationstheorie/lingvo
|
fa5159dd4201b4469398ff2ef004334773ed3642
|
dd175517894f7ef541262603e1225341ec3fbb51
|
refs/heads/master
| 2023-08-27T18:41:31.691814
| 2021-10-24T21:47:29
| 2021-10-24T21:48:09
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,847
|
py
|
# Lint as: python3
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for lingvo Jax stochastic layers."""
from absl import logging
from absl.testing import absltest
import jax
from jax import numpy as jnp
from jax import test_util
from lingvo.jax import base_layer
from lingvo.jax import test_utils
from lingvo.jax.layers import stochastics
ToNp = test_utils.ToNp
class StochaticsTest(test_util.JaxTestCase):
def test_dropout_layer01(self):
test_layer_p = stochastics.DropoutLayer.Params().Set(
name='dropout', keep_prob=0.8)
layer = test_layer_p.Instantiate()
prng_key = jax.random.PRNGKey(seed=12346)
prng_key, init_key = jax.random.split(prng_key)
initial_vars = layer.InstantiateVariables(init_key)
logging.info('initial_vars: %s', initial_vars)
inputs = jnp.ones([10, 1000], dtype=jnp.bfloat16)
prng_key, compute_key = jax.random.split(prng_key)
global_step = jnp.array(0, dtype=jnp.uint64)
def Comp(theta, prng_key, global_step, inputs):
with base_layer.JaxContext.NewContext():
per_step_prng_key = jax.random.fold_in(prng_key, global_step)
base_layer.ResetPrngKey(per_step_prng_key, global_step)
output1 = layer.FProp(theta, inputs)
output2 = layer.FProp(theta, inputs)
return output1, output2
output1, output2 = Comp(initial_vars, compute_key, global_step, inputs)
out1_sum = jnp.sum(output1)
out2_sum = jnp.sum(output2)
out1_nonzero = jnp.sum(output1 > 0.0)
out2_nonzero = jnp.sum(output2 > 0.0)
logging.info('out1_sum: %s', out1_sum)
logging.info('out2_sum: %s', out2_sum)
logging.info('out1_nonzero: %s', out1_nonzero)
logging.info('out2_nonzero: %s', out2_nonzero)
self.assertEqual(9920.0, out1_sum)
self.assertEqual(10048.0, out2_sum)
self.assertEqual(7944.0, out1_nonzero)
self.assertEqual(8029.0, out2_nonzero)
def test_dropout_layer_02(self):
test_layer_p = stochastics.DropoutLayer.Params().Set(
name='dropout',
keep_prob=0.8,
noise_shape=[10, 6, 8],
noise_shape_broadcast_dims=[2])
layer = test_layer_p.Instantiate()
prng_key = jax.random.PRNGKey(seed=12346)
prng_key, init_key = jax.random.split(prng_key)
initial_vars = layer.InstantiateVariables(init_key)
logging.info('initial_vars: %s', initial_vars)
inputs = jnp.ones([2, 10, 6, 8], dtype=jnp.bfloat16)
prng_key, compute_key = jax.random.split(prng_key)
global_step = jnp.array(0, dtype=jnp.uint64)
def Comp(theta, prng_key, global_step, inputs):
with base_layer.JaxContext.NewContext():
per_step_prng_key = jax.random.fold_in(prng_key, global_step)
base_layer.ResetPrngKey(per_step_prng_key, global_step)
layer.PrepareFProp()
output1 = layer.FProp(theta, inputs)
return output1
output1 = Comp(initial_vars, compute_key, global_step, inputs)
out1_sum = jnp.sum(output1)
out1_nonzero = jnp.sum(output1 > 0.0)
logging.info('out1_sum: %s', out1_sum)
logging.info('out1_nonzero: %s', out1_nonzero)
self.assertEqual(980, out1_sum)
self.assertEqual(784, out1_nonzero)
if __name__ == '__main__':
absltest.main()
|
[
"shafey@google.com"
] |
shafey@google.com
|
bab376dab409e28cbb1e3490bf25c95372117cd9
|
7a724badef6a881d63d7692de6a7b94daaf820be
|
/user.py
|
133f826c760765de1c14e75acb3678cb4361f23d
|
[] |
no_license
|
mirshahzad/python-basic
|
aa19641140e10e6be88944c81c927410ffc23759
|
1552be46b5890c9976fad43dba007410396ad92a
|
refs/heads/master
| 2022-10-24T18:13:58.938338
| 2020-06-13T09:14:12
| 2020-06-13T09:14:12
| 255,885,892
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 166
|
py
|
user_0 = {
'username': 'efermi',
'first': 'enrico',
'last': 'fermi',
}
for key, value in user_0.items():
print(f"\nKey: {key}")
print(f"value: {value}")
|
[
"noreply@github.com"
] |
noreply@github.com
|
6c350abf42358535fd248b6ad5fdb1c201305ee1
|
1dcde4f75d6b5abb115924a8f3b5989b6fbe2dee
|
/app/game.py
|
5b47ab94c64ce1b84557b462d71012b6190ae918
|
[
"MIT"
] |
permissive
|
skrolikowski/PyBox
|
0561a0f9bdae70d7f110a92f5b928dbd14a8f779
|
d79c5229df69f21767a4db15ebe05b91bba3dc8d
|
refs/heads/master
| 2020-03-19T09:17:15.810828
| 2018-09-08T18:07:47
| 2018-09-08T18:07:47
| 136,274,877
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,979
|
py
|
from .registry import Registry
from .window import GameWindow
class Game:
registry = Registry()
@classmethod
def load(cls, func):
cls.registry.add_command("load", func)
return func
@classmethod
def update(cls, func):
cls.registry.add_command("update", func)
return func
@classmethod
def draw(cls, func):
cls.registry.add_command("draw", func)
return func
@classmethod
def key_press(cls, func):
cls.registry.add_command("key_press", func)
return func
@classmethod
def key_release(cls, func):
cls.registry.add_command("key_release", func)
return func
@classmethod
def key_down(cls, func):
cls.registry.add_command("key_down", func)
return func
@classmethod
def text(cls, func):
cls.registry.add_command("key_text", func)
return func
@classmethod
def mouse_drag(cls, func):
cls.registry.add_command("mouse_drag", func)
return func
@classmethod
def mouse_motion(cls, func):
cls.registry.add_command("mouse_motion", func)
return func
@classmethod
def mouse_press(cls, func):
cls.registry.add_command("mouse_press", func)
return func
@classmethod
def mouse_release(cls, func):
cls.registry.add_command("mouse_release", func)
return func
@classmethod
def mouse_scroll(cls, func):
cls.registry.add_command("mouse_scroll", func)
return func
@classmethod
def focus(cls, func):
cls.registry.add_command("window_focus", func)
return func
@classmethod
def blur(cls, func):
cls.registry.add_command("window_blur", func)
return func
@classmethod
def hide(cls, func):
cls.registry.add_command("window_hide", func)
return func
@classmethod
def show(cls, func):
cls.registry.add_command("window_show", func)
return func
@classmethod
def move(cls, func):
cls.registry.add_command("window_move", func)
return func
@classmethod
def enter(cls, func):
cls.registry.add_command("state_enter", func)
return func
@classmethod
def leave(cls, func):
cls.registry.add_command("state_leave", func)
return func
@classmethod
def resume(cls, func):
cls.registry.add_command("state_resume", func)
return func
@classmethod
def switch(cls, state, *args, **kwargs):
cls.registry.switch(state, *args, **kwargs)
@classmethod
def push(cls, state, *args, **kwargs):
cls.registry.push(state, *args, **kwargs)
@classmethod
def pop(cls, *args, **kwargs):
cls.registry.pop(*args, **kwargs)
@classmethod
def run(cls, width=640, height=480, caption="Game"):
cls.registry.window = GameWindow(cls.registry, width=width, height=height, caption=caption)
|
[
"skrolikowski@gmail.com"
] |
skrolikowski@gmail.com
|
35a457296554b87038a7ebfa03198c4b1c60e697
|
ebd5c4632bb5f85c9e3311fd70f6f1bf92fae53f
|
/Sourcem8/pirates/effects/VoodooAura2.py
|
852b91918310ef820ba576e0b80105d5ea24b395
|
[] |
no_license
|
BrandonAlex/Pirates-Online-Retribution
|
7f881a64ec74e595aaf62e78a39375d2d51f4d2e
|
980b7448f798e255eecfb6bd2ebb67b299b27dd7
|
refs/heads/master
| 2020-04-02T14:22:28.626453
| 2018-10-24T15:33:17
| 2018-10-24T15:33:17
| 154,521,816
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,226
|
py
|
# File: V (Python 2.4)
from pandac.PandaModules import *
from direct.interval.IntervalGlobal import *
from direct.particles import ParticleEffect
from direct.particles import Particles
from direct.particles import ForceGroup
from otp.otpbase import OTPRender
from PooledEffect import PooledEffect
from EffectController import EffectController
import random
class VoodooAura2(PooledEffect, EffectController):
cardScale = 128.0
def __init__(self):
PooledEffect.__init__(self)
EffectController.__init__(self)
model = loader.loadModel('models/effects/battleEffects')
self.card = model.find('**/effectVoodooShockwave')
if not self.particleDummy:
self.particleDummy = self.attachNewNode(ModelNode('VoodooAura2ParticleDummy'))
self.particleDummy.setDepthWrite(0)
self.particleDummy.setLightOff()
self.particleDummy.hide(OTPRender.ShadowCameraBitmask)
self.effectColor = Vec4(1, 1, 1, 1)
self.f = ParticleEffect.ParticleEffect('VoodooAura2')
self.f.reparentTo(self)
self.p0 = Particles.Particles('particles-1')
self.p0.setFactory('PointParticleFactory')
self.p0.setRenderer('SpriteParticleRenderer')
self.p0.setEmitter('PointEmitter')
self.f.addParticles(self.p0)
self.p0.setPoolSize(64)
self.p0.setBirthRate(0.02)
self.p0.setLitterSize(1)
self.p0.setLitterSpread(0)
self.p0.setSystemLifespan(0.0)
self.p0.setLocalVelocityFlag(0)
self.p0.setSystemGrowsOlderFlag(0)
self.p0.factory.setLifespanBase(1.0)
self.p0.factory.setLifespanSpread(0.0)
self.p0.factory.setMassBase(1.0)
self.p0.factory.setMassSpread(0.0)
self.p0.factory.setTerminalVelocityBase(400.0)
self.p0.factory.setTerminalVelocitySpread(0.0)
self.p0.renderer.setAlphaMode(BaseParticleRenderer.PRALPHAOUT)
self.p0.renderer.setUserAlpha(0.5)
self.p0.renderer.setFromNode(self.card)
self.p0.renderer.setColor(Vec4(1.0, 1.0, 1.0, 1.0))
self.p0.renderer.setXScaleFlag(1)
self.p0.renderer.setYScaleFlag(1)
self.p0.renderer.setAnimAngleFlag(0)
self.p0.renderer.setInitialXScale(0.0050000000000000001 * self.cardScale)
self.p0.renderer.setFinalXScale(0.012 * self.cardScale)
self.p0.renderer.setInitialYScale(0.0050000000000000001 * self.cardScale)
self.p0.renderer.setFinalYScale(0.012 * self.cardScale)
self.p0.renderer.setNonanimatedTheta(0.0)
self.p0.renderer.setAlphaBlendMethod(BaseParticleRenderer.PPBLENDLINEAR)
self.p0.renderer.setAlphaDisable(0)
self.p0.renderer.getColorInterpolationManager().addLinear(0.0, 1.0, Vec4(0.80000000000000004, 0.80000000000000004, 0.80000000000000004, 1), self.effectColor, 1)
self.p0.renderer.setColorBlendMode(ColorBlendAttrib.MAdd, ColorBlendAttrib.OIncomingAlpha, ColorBlendAttrib.OOne)
self.p0.emitter.setEmissionType(BaseParticleEmitter.ETRADIATE)
self.p0.emitter.setAmplitude(0.20000000000000001)
self.p0.emitter.setAmplitudeSpread(0.0)
self.p0.emitter.setOffsetForce(Vec3(0.0, 0.0, 0.0))
self.p0.emitter.setExplicitLaunchVector(Vec3(1.0, 0.0, 0.0))
self.p0.emitter.setRadiateOrigin(Point3(0.0, 0.0, 0.0))
def createTrack(self, rate = 1):
self.startEffect = Sequence(Func(self.p0.setBirthRate, 0.029999999999999999), Func(self.p0.clearToInitial), Func(self.f.start, self, self.particleDummy))
self.endEffect = Sequence(Func(self.p0.setBirthRate, 100), Wait(1.0), Func(self.cleanUpEffect))
self.track = Sequence(self.startEffect, Wait(0.75), self.endEffect)
def setEffectColor(self, color):
self.effectColor = color
self.p0.renderer.getColorInterpolationManager().clearToInitial()
self.p0.renderer.getColorInterpolationManager().addLinear(0.0, 1.0, Vec4(0.80000000000000004, 0.80000000000000004, 0.80000000000000004, 1), self.effectColor, 1)
def cleanUpEffect(self):
self.detachNode()
self.checkInEffect(self)
def destroy(self):
EffectController.destroy(self)
PooledEffect.destroy(self)
|
[
"brandoncarden12345@gmail.com"
] |
brandoncarden12345@gmail.com
|
4c0ca93db88c706bef26591396d38085e7953adf
|
5e95f3cb251b0a4ed750ef37955f05854e1498eb
|
/test/python/index.py
|
c6de635a94fe7d39d9a1b1ac751279a719de0584
|
[] |
no_license
|
davidenq/prime
|
48214b2fc0509c81a3ca026311652c17c95dac16
|
d65e7048216fceb4221bcf07b062929ce3e91a72
|
refs/heads/master
| 2022-12-10T01:43:44.769602
| 2020-07-19T01:37:30
| 2020-07-19T01:37:30
| 280,770,805
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 77
|
py
|
from cffi import FFI
ffi = FFI()
lib = ffi.dlopn('../../build/libprime.so')
|
[
"david.nunez.dev@gmail.com"
] |
david.nunez.dev@gmail.com
|
f42908edf55755571fd62a4c3989c96f7da5a3b2
|
0def7d63d694908d4fa5422631d86b61a31b3e8b
|
/module/HaiGuan/Python_PJ/YDMPython3.py
|
30a42ed3662dc6a1fa9512916af7ed9a4046d096
|
[] |
no_license
|
921016124/Spiders
|
45fffb5a48ecce2c2754187d2f9b6c9caf1f3eaf
|
e2a3f3f59657974940801de8bc2bbb1a416af55b
|
refs/heads/master
| 2022-12-13T10:59:42.375979
| 2020-09-14T09:02:13
| 2020-09-14T09:02:13
| 191,732,013
| 0
| 1
| null | null | null | null |
GB18030
|
Python
| false
| false
| 819
|
py
|
# -*- coding: cp936 -*-
import os
import sys
from ctypes import *
class PJ:
def Po_Jie(self):
print('>>>正在初始化...')
YDMApi = windll.LoadLibrary('yundamaAPI-x64')
appId = 7931
appKey = b'07e97c69ff9cd82a854d636ae1c1cb5e'
print('软件ID:%d\r\n软件密钥:%s' % (appId, appKey))
username = b'machengguang'
password = b'zxcv_1234'
if username == b'test':
exit('\r\n>>>请先设置用户名密码')
print('\r\n>>>正在一键识别...')
codetype = 1004
result = c_char_p(b" ")
timeout = 60
filename = b'Captcha.jpg'
captchaId = YDMApi.YDM_EasyDecodeByPath(username, password, appId, appKey, filename, codetype, timeout, result)
print("一键识别:验证码ID:%d,识别结果:%s" % (captchaId, result.value))
return result.value
|
[
"921016124@qq.com"
] |
921016124@qq.com
|
b3df07ddba1420150bae25cb281f8f6d17c8c060
|
49b66aad7e8888b8da532d66dac65c5a4e75ceb8
|
/pickel/newssort_loader.py
|
369b3997ccd4d712af714b4e2fe05bf84a5d977a
|
[] |
no_license
|
praneetmehta/news_segregator
|
7e606057b687dc97a69031e804c5704816b1c4bc
|
3be70c007b85559af8c371b815d3122bc2f26ba9
|
refs/heads/master
| 2021-01-19T23:49:30.571651
| 2017-07-25T23:44:06
| 2017-07-25T23:44:06
| 83,790,066
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 497
|
py
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 3 20:07:49 2017
@author: praneet
"""
import pickle
import re
def correct(s):
s = re.sub('\s\W',' ',s)
s = re.sub('\W\s',' ',s)
s = re.sub("[^a-zA-Z']",' ', s)
s = re.sub('\s+',' ',s)
return s
vectorizer,assignment,kmeans = pickle.load(open('../pickel/newssort', 'rb'))
def predict(text):
return assignment[kmeans.predict(vectorizer.transform([correct(text)]))[0]]
|
[
"praneet.mehta@gmail.com"
] |
praneet.mehta@gmail.com
|
57839fbdaf39ce151f280eecf2ac06516ded4c83
|
0123229ac84c057b188f6b17c1131ec630ecaf25
|
/stochastic_gradient_descent/test_sire_offset/offset_fix_phiandpsi/extract_frcmod.py
|
4598f60b3d2efdd919bfb1c52e5dd461d50b8d9e
|
[] |
no_license
|
michellab/paramfit-tests
|
689851ab95406aad7160403c4a70d3ec6be91981
|
39598e93936beff48aefff1604483fd265a5f46a
|
refs/heads/master
| 2021-01-13T05:47:23.287857
| 2017-04-24T10:58:21
| 2017-04-24T10:58:21
| 76,249,680
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 461
|
py
|
from parmed.amber import *
import parmed
import os
base = AmberParm("orig.prmtop", "fit.rst7")
parmed.tools.writeFrcmod(base,"test.frcmod").execute()
frcmod_file = open("test.frcmod","r").readlines()
for fr in frcmod_file:
if "C -N -CT-C " in fr: # this is phi
print("value of Phi")
print(fr)
elif "N -CT-C -N" in fr:
print("value of Psi")
print(fr)
else:
continue
cmd = "rm test.frcmod"
os.system(cmd)
|
[
"stefanobosisio1@gmail.com"
] |
stefanobosisio1@gmail.com
|
a56f746654c8cc821dca0132fe96988a4192a627
|
83cf20e7d181eac59dd0c3b1d5b7e095807fb6e0
|
/maintain/move/some_test.py
|
8d8776898e0137851dba8f01d1e3de0bbd871b52
|
[] |
no_license
|
ys3721/release_update_tools
|
b96fb272103e245fdf4273763c5f9b29e75e1529
|
4fd76ce05e793c3163a208830eee18b5f986ac73
|
refs/heads/master
| 2022-06-27T05:12:07.158151
| 2021-11-30T08:36:53
| 2021-11-30T08:36:53
| 178,325,294
| 2
| 2
| null | 2021-04-26T20:35:39
| 2019-03-29T03:24:35
|
Python
|
UTF-8
|
Python
| false
| false
| 328
|
py
|
#! /usr/bin/python
# -*-coding=utf8-*-
# @Auther: Yao Shuai
import os
import subprocess
sql_name = os.system("sshpass -p 321 ssh root@10.10.6.14 ls /data0/src/s1145_*")
print "os.system.result=" + str(sql_name)
result = os.popen("sshpass -p 321 ssh root@10.10.6.14 ls /data0/src/s1145_*").readline()
print result
|
[
"ys3721@hotmail.com"
] |
ys3721@hotmail.com
|
48096466ced3cec7b5b5429e2b83fb56cd6edfd4
|
a5b9ca98802358af0ad2698696cd0a145379c4dc
|
/python_俄罗斯方块/game/src/gameState.py
|
b4ba0d9cdda27a4e1c960b3834c2c69d3f73e197
|
[] |
no_license
|
young-yang/InitialTemptation
|
dea09f89007f50317b611568a784e0d524e3cfe2
|
79a6fad172076e412dde6a36527f1adf2132cffb
|
refs/heads/master
| 2021-01-22T21:53:46.879485
| 2018-07-24T16:36:37
| 2018-07-24T16:36:37
| 85,488,008
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,131
|
py
|
# -*- coding: UTF-8 -*-
import random
from settings import *
from piece import Piece
from gameWall import GameWall
import pygame
class GameState():
def __init__(self,screen):
self.screen = screen
self.wall = GameWall(screen)
self.piece = None
self.timer_interval = TIMER_INTERVAL
self.game_score = 0
#游戏是否停止
self.stopped = True
#游戏是否暂停
self.paused = False
def set_timer(self,time_interval):
self.game_timer = pygame.time.set_timer(pygame.USEREVENT, time_interval)
def add_score(self,score):
self.game_score += score
def startGame(self):
self.stopped = False
self.set_timer(TIMER_INTERVAL)
self.timer_interval = TIMER_INTERVAL
self.piece = Piece(random.choice(PIECE_TYPES),self.screen,self.wall)
def pauseGame(self):
pygame.time.set_timer(pygame.USEREVENT, 0) #传入0表示清除定时器
self.paused = True
def resumeGame(self):
self.set_timer(self.timer_interval)
self.paused = False
|
[
"314235034@qq.com"
] |
314235034@qq.com
|
aafc7c42fc0bc0c36abc57d07eba7f7d396a0646
|
0db5a9cf6be1e08accb24151982fe297287af158
|
/test2/main.py
|
58a2620a0238e11ca745e00ea22d128bb975a8b8
|
[] |
no_license
|
Robinsondssantos/fastapi-test
|
a30a14bfaa93bbd6aef4c3758f1be5c426927d39
|
7bab2cdbef6344d4c9c817a33fe6e63fd0861d1e
|
refs/heads/master
| 2022-12-16T12:20:11.359535
| 2020-09-29T19:20:39
| 2020-09-29T19:20:39
| 296,133,546
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,274
|
py
|
# from sqlalchemy import create_engine
# from sqlalchemy.ext.declarative import declarative_base
# from sqlalchemy.orm import sessionmaker
import json
import psycopg2
from typing import Optional
from fastapi import FastAPI
# SQLALCHEMY_DATABASE_URL = 'postgresql://postgres:password@localhost:5432/books'
# engine = create_engine(
# SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False}
# )
# SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
# Base = declarative_base()
connection = psycopg2.connect(
host='localhost',
database='fastdb',
user='postgres',
password='postgres'
)
print('connection:', connection)
cursor = connection.cursor()
print('cursor:', cursor)
# cursor.execute(
# """
# CREATE TABLE readings (
# id INTEGER PRIMARY KEY,
# humidity INTEGER
# )
# """
# )
# cursor.close()
# connection.commit()
app = FastAPI()
@app.get('/')
async def read_root():
cursor.execute(
"""
SELECT * FROM readings
"""
)
str_with_quotes = str([dict((cursor.description[i][0], value) for i, value in enumerate(row)) for row in cursor.fetchall()])
str_with_quotes = str_with_quotes.replace("'",'"')
return json.loads(str_with_quotes)
|
[
"robinsonsantos@localhost.localdomain"
] |
robinsonsantos@localhost.localdomain
|
0ca3fb72d679efdb72ddfcd757f6f86a4bc25998
|
3a4975bc5256d0c92e04b943d1eeb81b9ce89640
|
/allPossibleCombination.py
|
6bc8ffc2db7441bf1e57d244ff71f0f06abe4732
|
[] |
no_license
|
Priyankajoshipj/DataStructuresAndAlgorithms
|
edf6c09f4a994341dda2973aef3761c042df36cc
|
513a78b95546b89c74acbcc6691e84c4f0bc4f20
|
refs/heads/master
| 2020-06-27T14:04:23.144829
| 2019-09-18T04:39:11
| 2019-09-18T04:39:11
| 199,972,227
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 803
|
py
|
def allPossibleCombination(s):
n = len(s)
if n <2:
return n
out = set()
def rec_comb_helper(s, i, out):
if i == len(s) - 1:
return ["", s[i]]
one = rec_comb_helper(s, i+1, out)
for com in one:
if s[i] not in com:
com1 = s[i] + com
out.add(com1)
out.add(com)
return list(out)
allp = rec_comb_helper(s, 0, out)
print(sorted(allp))
def permute(choices, s, out1):
if not s:
return [""]
if len(choices) == len(s):
string = "".join(choices)
out1.append(string)
for i in range(len(s)):
choice = s[i]
if choice in choices:
continue
choices.append(choice)
permute(choices, s, out1)
choices.pop()
return out1
res = []
for comb in allp:
a = permute([], comb, [])
res += a
print(sorted(res))
return res
allPossibleCombination("ABC")
|
[
"33767244+Priyankajoshipj@users.noreply.github.com"
] |
33767244+Priyankajoshipj@users.noreply.github.com
|
bd98415fac9fcd5c2df10970fb49b10c06aabbdf
|
125cd1609acc13b0f5d953b7ff0faed7c177eb81
|
/core/middlewares.py
|
e44439059857b530dc2179ef7e71ce36ebb1e121
|
[] |
no_license
|
anibalvf/DjangoWeb
|
6e0a7d356b691fec4a9200f2e645e66cbb57371a
|
0aa3c6528e5d2bbc305b76faf0c2fca4b88801aa
|
refs/heads/main
| 2023-02-06T19:25:39.520929
| 2020-12-30T05:16:50
| 2020-12-30T05:16:50
| 325,130,522
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 537
|
py
|
from django.core.exceptions import PermissionDenied
def simple_middleware(get_response):
# One-time configuration and initialization.
def middleware(request):
# Code to be executed for each request before
# the view (and later middleware) are called
if not request.user.is_superuser:
raise PermissionDenied
response = get_response(request)
# Code to be executed for each request/response after
# the view is called.
return response
return middleware
|
[
"33007157+anibalvf@users.noreply.github.com"
] |
33007157+anibalvf@users.noreply.github.com
|
8941c5b291ec2762adfe11384e1d783e5e747927
|
b5ff404bda572f32b8f3a28025c6e76ed381d504
|
/Ali/forms.py
|
79fbab94a165611df8f85ee32aac656c08c35a39
|
[] |
no_license
|
Daechulbae/56chul
|
d5366e4ea7a43e329435da082f98503dd1a7c0f2
|
aef1028568057264c998a915240256065e776830
|
refs/heads/master
| 2018-10-29T18:30:47.901769
| 2018-08-23T07:12:25
| 2018-08-23T07:12:25
| 124,995,760
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 99
|
py
|
from django import forms
class actionForm(forms.Form):
texbox = forms.CharField(max_length=10)
|
[
"eocjf17@gmail.com"
] |
eocjf17@gmail.com
|
947031afacd28cc7dd21f3fba36625e6915693d7
|
15fa13fad9a05a51843c3ed6cf1f8afbb33aae66
|
/examples/reference/arc/arc.pde
|
b443a2ddb54118463bb6c09c57edbe0b4a5eb328
|
[] |
no_license
|
kazimuth/python-mode-processing
|
4ad39f18c9637206fa7c691ac328baae0fc21b1a
|
e6274f89e0464b771870327a56ce01bff629e0fb
|
refs/heads/master
| 2021-01-22T05:27:44.912530
| 2014-04-22T17:20:15
| 2014-04-22T17:20:15
| 10,946,779
| 4
| 0
| null | 2013-12-31T01:23:52
| 2013-06-25T18:40:55
|
Java
|
UTF-8
|
Python
| false
| false
| 149
|
pde
|
arc(50, 55, 50, 50, 0, PI/2)
noFill()
arc(50, 55, 60, 60, PI/2, PI)
arc(50, 55, 70, 70, PI, TWO_PI-PI/2)
arc(50, 55, 80, 80, TWO_PI-PI/2, TWO_PI)
|
[
"martin_p@lineone.net"
] |
martin_p@lineone.net
|
a7a23ce0dca0223e330d4f53312a3a2dcdd5b5e2
|
00540adf5d4e1be80b3edca5f60f298497eddef0
|
/expression_analysis.py
|
f767375bce05d8fe1368a3bff3ea33932a27efc0
|
[] |
no_license
|
smetroid/expression_analysis
|
4fe274302f29a12689d97872dbc659d734c236f1
|
445a73b7fb2be0e153f6e25666f4be14cf1ae3a4
|
refs/heads/master
| 2020-03-27T11:02:57.525089
| 2018-11-09T04:19:53
| 2018-11-09T04:21:04
| 146,461,705
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,747
|
py
|
#!/usr/bin/python
import sqlite3, csv, re
import sys
def createDBs(conn):
interproscan7_table = "create table interproscan7(trinity, random1, random2, sites, code1, description1, start, stop, evalue, random3, date, code2, description2, goterms, reactome)"
expression_counts_table = "create table expression_counts (trinity, ho8_quants, ho7_quants)"
interproscan8_table = "create table interproscan8(trinity, random1, random2, sites, code1, description1, start, stop, evalue, random3, date, code2, description2, goterms, reactome)"
fastaho7 = "create table fastaho7 (trinity, data BLOB)"
fastaho8 = "create table fastaho8 (trinity, data BLOB)"
fastaho7transdecoder = "create table fastaho7transdecoder (trinity, data BLOB)"
fastaho8transdecoder = "create table fastaho8transdecoder (trinity, data BLOB)"
ho8ids_with_quantids = "create table ho8idswithquantsids (ho8ids, jointids)"
cur = conn.cursor()
cur.execute(interproscan7_table)
cur.execute(interproscan8_table)
cur.execute(expression_counts_table)
cur.execute(fastaho7)
cur.execute(fastaho8)
cur.execute(fastaho7transdecoder)
cur.execute(fastaho8transdecoder)
cur.execute(ho8ids_with_quantids)
def testDB(cur):
cur.execute("SELECT * from hostinfo")
rows = cur.fetchall()
for row in rows:
print(row)
return None
def loadInterproScan7(cur):
tsv_data_file = open("./data/Ho7_K31_Trinity_InterProScan_1.tsv")
tsv_reader = csv.reader(tsv_data_file, delimiter="\t")
for i in tsv_reader:
# Remove the .p1 from the trinity value
#i[0] = re.sub(r'_i.*$', '', i[0])
print(i)
if len(i) < 15:
continue
cur.execute("INSERT INTO interproscan7 VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", tuple(i))
else:
cur.execute("INSERT INTO interproscan7 VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", tuple(i))
def loadInterproScan8(cur):
tsv_data_file = open("./data/Ho8_k31_Trinity_InterProScan.tsv")
tsv_reader = csv.reader(tsv_data_file, delimiter="\t")
for i in tsv_reader:
# Remove the .p1 from the trinity value
#i[0] = re.sub(r'_i.*$', '', i[0])
print(i)
if len(i) < 15:
continue
cur.execute("INSERT INTO interproscan8 VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", tuple(i))
else:
cur.execute("INSERT INTO interproscan8 VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", tuple(i))
def loadNorm(cur):
tsv_data_file = open("./norm")
tsv_reader = csv.reader(tsv_data_file, delimiter="\t")
for i in tsv_reader:
# Remove the "_i1-4" from the trinity value
i[0] = re.sub(r'_i.*$', '', i[0])
print(i)
if len(i) == 0:
continue
else:
cur.execute("INSERT INTO expression_counts VALUES (?, ?, ?)", tuple(i))
def getDataSet(cur, filter):
cur.execute("SELECT trinity,start,stop FROM interproscan7 WHERE code1 LIKE ?", (filter,))
file_name = "%s_%s.csv" % ("interproscan7", filter)
rows = cur.fetchall()
data = []
fo = open(file_name, 'w')
for row in rows:
info = "%s,%s,%s\n" % (row[0],row[1],row[2])
data.append(info)
fo.writelines(data)
fo.close()
def buildTempView(cur):
#Generate a temporary view for the ho8 and ho7 quants aggregate counts
sql_view = ('CREATE VIEW expression_count_aggregates '
'AS '
'SELECT trinity, SUM(ho8_quants) as ho8_quants, '
'SUM(ho7_quants) as ho7_quants '
'FROM expression_counts '
'GROUP BY trinity ')
cur.execute(sql_view)
def getNormAndHoData(cur, filter):
sql = ('SELECT DISTINCT inter.trinity, inter.start, inter.stop, '
'ec.ho8_quants, ec.ho7_quants '
'FROM interproscan7 inter '
'INNER JOIN expression_count_aggregates ec '
'ON inter.trinity = ec.trinity '
'WHERE code1 LIKE "%s" '
'ORDER BY inter.trinity ')
file_name = "%s_%s.csv" % ("normAndHoData",filter)
print sql % (filter)
cur.execute(sql % (filter))
rows = cur.fetchall()
data = []
header = "%s,%s,%s,%s,%s\n" % ("trinity", "start", "stop", "ho8_quants", "ho7_quants")
data.append(header)
fo = open(file_name, 'w')
for row in rows:
info = "%s,%s,%s,%s,%s\n" % (row[0],row[1],row[2],row[3],row[4])
data.append(info)
fo.writelines(data)
fo.close()
def loadFastaDataHo7(cur):
trinity = ""
data = ""
with open("./data/Ho7_K31_Trinity.fasta") as f:
for i in f:
match = re.search(r'^>TRINITY', i)
#print i
if (match):
if (data != ""):
sql_insert = (trinity, data)
cur.execute("INSERT INTO fastaho7 VALUES (?, ?)", tuple(sql_insert))
data = i
else:
data = i
trinity_field = i.split(" ")
#trinity = re.sub(r'_i.*$', '', trinity_field[0]).replace('>', '')
trinity = trinity_field[0].replace('>', '')
else:
data += i
# When EOF is reached commit the last values into SQL
sql_insert = (trinity, data)
cur.execute("INSERT INTO fastaho7 VALUES (?, ?)", tuple(sql_insert))
def loadFastaDataHo8(cur):
trinity = ""
data = ""
with open("./data/Ho8_K31_Trinity.fasta") as f:
for i in f:
match = re.search(r'^>TRINITY', i)
#print i
if (match):
if (data != ""):
sql_insert = (trinity, data)
cur.execute("INSERT INTO fastaho8 VALUES (?, ?)", tuple(sql_insert))
data = i
else:
data = i
trinity_field = i.split(" ")
#trinity = re.sub(r'_i.*$', '', trinity_field[0]).replace('>', '')
trinity = trinity_field[0].replace('>', '')
else:
data += i
# When EOF is reached commit the last values into SQL
sql_insert = (trinity, data)
cur.execute("INSERT INTO fastaho8 VALUES (?, ?)", tuple(sql_insert))
def loadFastaDataHo7Transdecoder(cur):
trinity = ""
data = ""
with open("./data/Ho7_K31_Trinity.fasta.transdecoder.pep") as f:
for i in f:
match = re.search(r'^>TRINITY', i)
#print i
if (match):
if (data != ""):
sql_insert = (trinity, data)
cur.execute("INSERT INTO fastaho7transdecoder VALUES (?, ?)", tuple(sql_insert))
data = i
else:
data = i
trinity_field = i.split(" ")
#trinity = re.sub(r'_i.*$', '', trinity_field[0]).replace('>', '')
trinity = trinity_field[0].replace('>', '')
else:
data += i
# When EOF is reached commit the last values into SQL
sql_insert = (trinity, data)
cur.execute("INSERT INTO fastaho7transdecoder VALUES (?, ?)", tuple(sql_insert))
def loadFastaDataHo8Transdecoder(cur):
trinity = ""
data = ""
with open("./data/Ho8_K31_Trinity.fasta.transdecoder.pep") as f:
for i in f:
match = re.search(r'^>TRINITY', i)
#print i
if (match):
if (data != ""):
sql_insert = (trinity, data)
cur.execute("INSERT INTO fastaho8transdecoder VALUES (?, ?)", tuple(sql_insert))
data = i
else:
data = i
trinity_field = i.split(" ")
#trinity = re.sub(r'_i.*$', '', trinity_field[0]).replace('>', '')
trinity = trinity_field[0].replace('>', '')
else:
data += i
# When EOF is reached commit the last values into SQL
sql_insert = (trinity, data)
cur.execute("INSERT INTO fastaho8transdecoder VALUES (?, ?)", tuple(sql_insert))
def loadHo8idsWithQuantIds(cur):
tsv_data_file = open("./data/ho8_ids_with_corresponding_combined_ho7_ho8_ids.csv")
tsv_reader = csv.reader(tsv_data_file, delimiter="\t")
for i in tsv_reader:
# Remove the "_i1-4" from the trinity value
#i[0] = re.sub(r'_i.*$', '', i[0])
print(i)
if len(i) == 0:
continue
else:
cur.execute("INSERT INTO ho8idswithquantsids VALUES (?, ?)", tuple(i))
def fastaDataQuery(cur):
tsv_data_file = open(sys.argv[2])
tsv_reader = csv.reader(tsv_data_file, delimiter="\t")
for i in tsv_reader:
# Remove the "_i1-4" from the trinity value
i[0] = re.sub(r'_i.*$', '', i[0])
print(i)
if len(i) == 0:
continue
else:
cur.execute("INSERT INTO expression_counts VALUES (?, ?, ?)", tuple(i))
if __name__ == "__main__":
conn = sqlite3.Connection("expression_data.sqlite3")
#conn = sqlite3.Connection(":memory:")
createDBs(conn)
cur = conn.cursor()
loadInterproScan7(cur)
loadInterproScan8(cur)
loadNorm(cur)
# PF00201
getDataSet(cur, "PF00201")
# PS00375
getDataSet(cur, "PS00375")
buildTempView(cur)
getNormAndHoData(cur, "PF00201")
getNormAndHoData(cur, "PS00375")
loadFastaDataHo7(cur)
loadFastaDataHo8(cur)
loadFastaDataHo7Transdecoder(cur)
loadFastaDataHo8Transdecoder(cur)
loadHo8idsWithQuantIds(cur)
conn.commit()
conn.close()
|
[
"enriquegc1982@gmail.com"
] |
enriquegc1982@gmail.com
|
5a4e2fd6d664cb94196906be84cc66e9eb31eac7
|
95807bb74dd42332d094d4de5564f3b4f460adbb
|
/Python 101/indexing_and_slicing.py
|
0b01b489434ada6e2e55ee779ab27e723d07c172
|
[
"MIT"
] |
permissive
|
projetosparalelos/The-Complete-Python-Course-including-Django-Web-Framework
|
92442cfa16609016f714bbc4af91782859c7a646
|
402b35d4739ed91e50d6c3380cab6f085a46c52b
|
refs/heads/main
| 2023-05-12T01:20:41.299572
| 2021-06-03T07:56:56
| 2021-06-03T07:56:56
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 171
|
py
|
lst = ['one', 'two', 'three', 'four', 'five']
# 0 1 2 3 4
print(lst[-2::])
# b = True
# print(b[0])
course = "Python 101"
print(course[5])
|
[
"noreply@github.com"
] |
noreply@github.com
|
298e2aa7b8a6a78750a2be2b758d445d10b343ae
|
b4826ec1ca1f71401cad7dcfe4c7f7573223ffc5
|
/assignment6/submission/solution.py
|
dda3cb4842026a8976b5fcc2a86a6f95531cd331
|
[] |
no_license
|
deepmodh1996/Artificial-Intelligence-Lab
|
01d5240127198e85ddcd853379343c44ad0d481e
|
cf945095cf599e291116b61784b109f868418ae3
|
refs/heads/master
| 2021-01-11T18:25:53.871827
| 2017-06-17T05:22:10
| 2017-06-17T05:22:10
| 79,544,323
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,302
|
py
|
import sys
filename = sys.argv[1]
# print filename
f = open(filename,'r')
MDP = f.read().replace('\n','\t').split('\t')
MDP = [x for x in MDP if x]
# print MDP
S = int(MDP[0])
A = int(MDP[1])
# print S
# print A
r = 2
R = [[[0 for x in range(S)] for y in range(A)] for z in range(S)]
for i in range(0, S):
for j in range(0, A):
for k in range(0, S):
R[i][j][k] = float(MDP[r])
r = r + 1
# print R
T = [[[0 for x in range(S)] for y in range(A)] for z in range(S)]
for i in range(0, S):
for j in range(0, A):
for k in range(0, S):
T[i][j][k] = float(MDP[r])
r = r + 1
# print T
gamma = float(MDP[r])
# print gamma
V = [0]*S
PI = [0]*S
t = 0
epsilon = 10**(-16)
while (True):
t = t + 1
Vprev = V[:]
# print Vprev
for i in range(0, S):
maxvalue = 0.0
PI[i] = 0
for k in range(0, S):
maxvalue += T[i][0][k]*(R[i][0][k] + gamma*Vprev[k])
for j in range(0, A):
value = 0.0
for k in range(0, S):
value += T[i][j][k]*(R[i][j][k] + gamma*Vprev[k])
if (value > maxvalue):
maxvalue = max(value, maxvalue)
PI[i] = j
V[i] = maxvalue
numValid = 0
for i in range(0, len(V)):
if(abs(V[i] - Vprev[i]) > epsilon):
numValid = 1
if (numValid == 0):
break
for i in range(0, S):
print str(V[i]) + '\t' + str(PI[i])
print "Iterations" + '\t' + str(t)
|
[
"deepmodh1996@gmail.com"
] |
deepmodh1996@gmail.com
|
14c5cd9b73325b3972cb62a0961b995040c5e757
|
c36d980ba59de3a562a878f185bad628078ea279
|
/Mundo_2_Python/estrutura_for/desafio_53.py
|
f00fb7eac2b231297a400abccb2d112bd91e8d87
|
[
"MIT"
] |
permissive
|
tamyrds/Exercicios-Python
|
d79f295acec4204293a076842598832f7d42e6c6
|
73bd8ea49e74db88b39c4f20dfe058a4805c0567
|
refs/heads/main
| 2023-04-11T21:40:15.446784
| 2021-05-11T02:12:34
| 2021-05-11T02:12:34
| 363,760,074
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 306
|
py
|
frase = str(input('Digite uma frase: '))
palavra = frase.split()
junto = ''.join(palavra)
inverso = ''
for letra in range(len(junto) - 1,-1,-1):
inverso += junto[letra]
print(junto,inverso)
if inverso == junto:
print('Temos um palindromo')
else:
print('A frase digitada não é um palindromo')
|
[
"78001437+tamyrds@users.noreply.github.com"
] |
78001437+tamyrds@users.noreply.github.com
|
f67e1e6de3d56e55471bc879166edec1c32ba813
|
8da79aedfb20c9798de0f4db4c5d85929a32f82b
|
/boo/columns.py
|
200ff1a19478b1dd373b0d3bbfd9b11bfc79fc79
|
[
"MIT"
] |
permissive
|
nasingfaund/boo
|
a94e941ca8d3251fbb320c2e2f63e439f7ef4d59
|
96d08857abd790bc44f48256e7be7da130543a84
|
refs/heads/master
| 2023-07-01T00:33:33.085311
| 2021-08-03T21:23:03
| 2021-08-03T21:23:03
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,429
|
py
|
"""Преобразование сырых названий столбцов в названия переменных.
Описания полей отчетности можно посмотреть например в:
http://info.avtovaz.ru/files/avtovaz_ras_fs_2012_rus_secured.pdf
Более подробно о публикуемой форме отчетности:
http://www.consultant.ru/document/cons_doc_LAW_103394/b990bf4a13bd23fda86e0bba50c462a174c0d123/#dst100515
"""
from collections import OrderedDict
from dataclasses import dataclass
import numpy
import pandas as pd
# Column names as provided at Rosstat web site
TTL_COLUMNS = [
"Наименование",
"ОКПО",
"ОКОПФ",
"ОКФС",
"ОКВЭД",
"ИНН",
"Код единицы измерения",
"Тип отчета",
"11103",
"11104",
"11203",
"11204",
"11303",
"11304",
"11403",
"11404",
"11503",
"11504",
"11603",
"11604",
"11703",
"11704",
"11803",
"11804",
"11903",
"11904",
"11003",
"11004",
"12103",
"12104",
"12203",
"12204",
"12303",
"12304",
"12403",
"12404",
"12503",
"12504",
"12603",
"12604",
"12003",
"12004",
"16003",
"16004",
"13103",
"13104",
"13203",
"13204",
"13403",
"13404",
"13503",
"13504",
"13603",
"13604",
"13703",
"13704",
"13003",
"13004",
"14103",
"14104",
"14203",
"14204",
"14303",
"14304",
"14503",
"14504",
"14003",
"14004",
"15103",
"15104",
"15203",
"15204",
"15303",
"15304",
"15403",
"15404",
"15503",
"15504",
"15003",
"15004",
"17003",
"17004",
"21103",
"21104",
"21203",
"21204",
"21003",
"21004",
"22103",
"22104",
"22203",
"22204",
"22003",
"22004",
"23103",
"23104",
"23203",
"23204",
"23303",
"23304",
"23403",
"23404",
"23503",
"23504",
"23003",
"23004",
"24103",
"24104",
"24213",
"24214",
"24303",
"24304",
"24503",
"24504",
"24603",
"24604",
"24003",
"24004",
"25103",
"25104",
"25203",
"25204",
"25003",
"25004",
"32003",
"32004",
"32005",
"32006",
"32007",
"32008",
"33103",
"33104",
"33105",
"33106",
"33107",
"33108",
"33117",
"33118",
"33125",
"33127",
"33128",
"33135",
"33137",
"33138",
"33143",
"33144",
"33145",
"33148",
"33153",
"33154",
"33155",
"33157",
"33163",
"33164",
"33165",
"33166",
"33167",
"33168",
"33203",
"33204",
"33205",
"33206",
"33207",
"33208",
"33217",
"33218",
"33225",
"33227",
"33228",
"33235",
"33237",
"33238",
"33243",
"33244",
"33245",
"33247",
"33248",
"33253",
"33254",
"33255",
"33257",
"33258",
"33263",
"33264",
"33265",
"33266",
"33267",
"33268",
"33277",
"33278",
"33305",
"33306",
"33307",
"33406",
"33407",
"33003",
"33004",
"33005",
"33006",
"33007",
"33008",
"36003",
"36004",
"41103",
"41113",
"41123",
"41133",
"41193",
"41203",
"41213",
"41223",
"41233",
"41243",
"41293",
"41003",
"42103",
"42113",
"42123",
"42133",
"42143",
"42193",
"42203",
"42213",
"42223",
"42233",
"42243",
"42293",
"42003",
"43103",
"43113",
"43123",
"43133",
"43143",
"43193",
"43203",
"43213",
"43223",
"43233",
"43293",
"43003",
"44003",
"44903",
"61003",
"62103",
"62153",
"62203",
"62303",
"62403",
"62503",
"62003",
"63103",
"63113",
"63123",
"63133",
"63203",
"63213",
"63223",
"63233",
"63243",
"63253",
"63263",
"63303",
"63503",
"63003",
"64003",
"Дата актуализации",
]
# -- Текстовые поля
MAPPER = OrderedDict(
[
("Наименование", "name"),
("ОКПО", "okpo"),
("ОКОПФ", "okopf"),
("ОКФС", "okfs"),
("ОКВЭД", "okved"),
("ИНН", "inn"),
("Код единицы измерения", "unit"),
("Тип отчета", "report_type"),
("Дата актуализации", "date_published"),
# -- Баланс
# -- Внеоборотные активы
("1100", "ta_fix"),
("1150", "of"),
("1170", "ta_fix_fin"),
# -- Оборотные активы
("1200", "ta_nonfix"),
("1210", "inventory"),
("1230", "receivables"),
("1240", "ta_nonfix_fin"),
("1250", "cash"),
("1600", "ta"),
# -- Пассивы
("1300", "tp_capital"),
("1360", "retained_earnings"),
("1400", "tp_long"),
("1410", "debt_long"),
("1500", "tp_short"),
("1510", "debt_short"),
("1520", "payables"),
("1700", "tp"),
# -- ОПУ
("2110", "sales"),
("2120", "costs"),
("2200", "profit_oper"),
("2330", "exp_interest"),
("2300", "profit_before_tax"),
("2400", "profit_after_tax"),
# -- ОДДС
("4400", "cf"),
# -- Операционная деятельность
("4100", "cf_oper"),
("4110", "cf_oper_in"),
("4111", "cf_oper_in_sales"),
("4120", "cf_oper_out"),
("4121", "paid_to_supplier"),
("4122", "paid_to_worker"),
("4123", "paid_interest"),
("4124", "paid_profit_tax"),
# -- Инвестицонная деятельность
("4200", "cf_inv"),
("4210", "cf_inv_in"),
("4220", "cf_inv_out"),
("4221", "paid_fa_investment"),
# -- Финансовая деятельность
("4300", "cf_fin"),
("4310", "cf_fin_in"),
("4311", "cf_loan_in"),
("4312", "cf_eq_in_1"),
("4313", "cf_eq_in_2"),
("4314", "cf_bond_in"),
("4320", "cf_fin_out"),
("4321", "cf_eq_out"),
("4322", "cf_div_out"),
("4323", "cf_debt_out"),
]
)
def ask(code):
return MAPPER.get(str(code))
def fst(text):
return text[0]
def last(text):
return text[-1]
def trim(text):
return text[0:-1]
NON_NUMERIC = "x"
# This type assures missing interger values will be converted to NaNs
# See https://pandas.pydata.org/pandas-docs/stable/user_guide/integer_na.html
# and https://github.com/ru-corporate/boo/issues/18
INT_TYPE = pd.Int64Dtype()
@dataclass
class Column:
code: str
section: str
lag: bool
def rename_with(self, mapper: dict):
new_code = mapper.get(self.code, self.code)
return Column(new_code, self.section, self.lag)
def is_numeric(self):
return self.section != NON_NUMERIC
@property
def label(self):
return self.code + ("_lag" if self.lag else "")
@property
def dtype(self):
return INT_TYPE if self.is_numeric() else str
def is_lagged(text):
if fst(text) == "3":
return False
if last(text) == "3":
return False
if last(text) == "4":
return True
return None
assert is_lagged("63243") is False
assert is_lagged("Дата актуализации") is None
assert is_lagged("23304") is True
def section(text):
num = text[0]
return {
"1": "Баланс",
"2": "ОПУ",
"3": "Изменения капитала",
"4": "ОДДС",
"6": "Extras",
}.get(num, NON_NUMERIC)
def code(text):
if fst(text) in ["1", "2", "4", "6"]:
return text[0:-1]
else:
return text
def column(text):
return Column(code(text), section(text), is_lagged(text))
columns = [column(x) for x in TTL_COLUMNS]
INDEX = [i for (i, c) in enumerate(columns) if c.rename_with(MAPPER) != c]
columns_short = [c.rename_with(MAPPER) for c in columns if c.rename_with(MAPPER) != c]
NAMES = {c.label: c.dtype for c in columns_short}
assert len(INDEX) == len(NAMES)
|
[
"e.pogrebnyak@gmail.com"
] |
e.pogrebnyak@gmail.com
|
b27851062a7ab359e902306b8840e240f1c7031a
|
2473096eef0a4c19d73494616562612e5fe8d85f
|
/chapters/04_machine_learning_basics/linear_regression_graph.py
|
ebf439242c9279c49239c3b3ccc6476a85c33556
|
[] |
no_license
|
barmi/tensorflowbook
|
85e38fa6575efc0df08f4f62af5c6eff68d9860a
|
b2d8bd6c612786c89f7b94aefd545bfbe4943c8d
|
refs/heads/master
| 2021-01-22T12:38:36.253253
| 2017-10-12T11:51:33
| 2017-10-12T11:51:33
| 102,354,484
| 0
| 0
| null | 2017-09-04T11:31:35
| 2017-09-04T11:31:35
| null |
UTF-8
|
Python
| false
| false
| 2,124
|
py
|
# Linear regression example in TF.
import tensorflow as tf
import numpy as np
def inference(X):
return tf.add(tf.matmul(X, W, name='W_mul_X'), b, name='inference')
def evaluate(sess):
print(sess.run(inference([[80., 25.]]))) # ~ 303
print(sess.run(inference([[65., 25.]]))) # ~ 256
# weight_age
x_data = np.float32([
[84, 46], [73, 20], [65, 52], [70, 30], [76, 57],
[69, 25], [63, 28], [72, 36], [79, 57], [75, 44],
[27, 24], [89, 31], [65, 52], [57, 23], [59, 60],
[69, 48], [60, 34], [79, 51], [75, 50], [82, 34],
[59, 46], [67, 23], [85, 37], [55, 40], [63, 30]])
# blood_fat_content
y_data = [
354, 190, 405, 263, 451,
302, 288, 385, 402, 365,
209, 290, 346, 254, 395,
434, 220, 374, 308, 220,
311, 181, 274, 303, 244]
graph = tf.Graph()
with graph.as_default():
W = tf.Variable(tf.random_uniform([2,1], -1.0, 1.0), name="weight")
b = tf.Variable(tf.zeros([1]), name="bias")
y = tf.add(tf.matmul(x_data, W, name="mul"), b, name="add")
loss = tf.reduce_sum(tf.squared_difference(y, y_data), name="loss")
train_op = tf.train.GradientDescentOptimizer(0.0000001).minimize(loss, name="GradientDescent")
# Launch the graph in a session, setup boilerplate
with tf.Session(graph=graph) as sess:
writer = tf.summary.FileWriter('./linearReg', graph)
tf.global_variables_initializer().run()
'''
def inference(X):
return tf.add(tf.multiply(X, W, name='W_mul_X'), b, name='inference')
def loss(X, Y):
Y_predicted = inference(X)
return tf.reduce_sum(tf.squared_difference(Y, Y_predicted))
def train(total_loss):
learning_rate = 0.0000001
return tf.train.GradientDescentOptimizer(learning_rate).minimize(total_loss)
'''
# actual training loop
training_steps = 10000
for step in range(training_steps):
result = sess.run(train_op)
w_res = sess.run(W)
if step % 100 == 0:
print("%5d : W (%12.8f, %12.8f), b (%12.8f), loss: %12.8f" % (step, w_res[0], w_res[1], sess.run(b), sess.run(loss)))
writer.flush()
evaluate(sess)
|
[
"skshin@nbreds.com"
] |
skshin@nbreds.com
|
bda639d9da4402bab567f7414ee6b727da5e4aa0
|
2b7b5628f199bca51ff916b81140794ee2f79edd
|
/Array/array_reverse.py
|
0bb4dd7f925eb88352441b11a1509c9346815905
|
[] |
no_license
|
Koilada-Rao-au16/DSA_Solver
|
9d01cdc72bf8602926a1824ba8dc739924b62406
|
6db2d9b855b0bf7ee6dfb5e5bcf50f4969534fc6
|
refs/heads/main
| 2023-04-06T03:26:47.902446
| 2021-04-12T10:07:56
| 2021-04-12T10:07:56
| 348,692,423
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 432
|
py
|
# Iterative python program to reverse an array
def reverseList(self):
print(self[::-1])
self = [1,2,3,4,5]
print(self)
print("reverse list is ")
reverseList(self)
# method 2
def reverseList(A,start,end):
if start >= end:
return
A[start],A[end] = A[end],A[start]
reverseList(A, start+1 , end-1)
A = [1,2,3,4,5]
print(A)
reverseList(A, 0, 4)
print("reversed list is")
print(A)
# time complexity O(n)
|
[
"bhaskar9.koilada@gmail.com"
] |
bhaskar9.koilada@gmail.com
|
775d4e974bbace6a037417248f6885324aebea6a
|
85764904e918310f9e4a209f64570dcdcf099818
|
/loutilities/user/roles.py
|
3df49ee5b5f5d63b1edda6261c2976dbd2e6b5e1
|
[
"Apache-2.0"
] |
permissive
|
louking/loutilities
|
05bb20994ae06d2e68989cd6a779c350a9a430ad
|
aaf7410849d0167001cd5f06ab0dae6563e58ec7
|
refs/heads/master
| 2023-07-24T18:32:36.128102
| 2023-07-15T10:02:43
| 2023-07-15T10:02:43
| 5,824,315
| 2
| 2
| null | 2023-05-10T09:59:37
| 2012-09-15T21:29:29
|
Python
|
UTF-8
|
Python
| false
| false
| 3,136
|
py
|
###########################################################################################
# roles - common location for xtility role declaration
#
# Date Author Reason
# ---- ------ ------
# 03/11/20 Lou King Create
#
# Copyright 2020 Lou King. All rights reserved
###########################################################################################
from loutilities.user.model import APP_CONTRACTS, APP_MEMBERS, APP_ROUTES, APP_SCORES, APP_ALL
# common roles
ROLE_SUPER_ADMIN = 'super-admin'
ROLES_COMMON = [ROLE_SUPER_ADMIN]
roles_common = [
{'name': 'super-admin', 'description': 'allowed to do everything on all applications', 'apps': APP_ALL},
]
# members roles
ROLE_LEADERSHIP_ADMIN = 'leadership-admin'
ROLE_LEADERSHIP_MEMBER = 'leadership-member'
ROLE_MEMBERSHIP_ADMIN = 'membership-admin'
ROLE_MEETINGS_ADMIN = 'meetings-admin'
ROLE_MEETINGS_MEMBER = 'meetings-member'
ROLE_RACINGTEAM_ADMIN = 'racingteam-admin'
ROLE_RACINGTEAM_MEMBER = 'racingteam-member'
roles_members = [
{'name': ROLE_LEADERSHIP_ADMIN, 'description': 'access to leadership tasks for members application', 'apps':[APP_MEMBERS]},
{'name': ROLE_LEADERSHIP_MEMBER, 'description': 'user of leadership tasks for members application', 'apps':[APP_MEMBERS]},
{'name': ROLE_MEMBERSHIP_ADMIN, 'description': 'access to membership admininstration for members application', 'apps':[APP_MEMBERS]},
{'name': ROLE_MEETINGS_ADMIN, 'description': 'access to meetings administration for members application', 'apps':[APP_MEMBERS]},
{'name': ROLE_MEETINGS_MEMBER, 'description': 'user of meetings for members application', 'apps':[APP_MEMBERS]},
{'name': ROLE_RACINGTEAM_ADMIN, 'description': 'access to racingteam administration for members application', 'apps':[APP_MEMBERS]},
{'name': ROLE_RACINGTEAM_MEMBER, 'description': 'user of racingteam module for members application', 'apps':[APP_MEMBERS]},
]
# routes roles
ROLE_ROUTES_ADMIN = 'routes-admin'
ROLE_ICON_ADMIN = 'icon-admin'
roles_routes = [{'name': ROLE_ROUTES_ADMIN, 'description': 'access to routes for routes application', 'apps':[APP_ROUTES]},
{'name': ROLE_ICON_ADMIN, 'description': 'access to icons for routes application', 'apps':[APP_ROUTES]}
]
# contracts roles
ROLE_EVENT_ADMIN = 'event-admin'
ROLE_SPONSOR_ADMIN = 'sponsor-admin'
roles_contracts = [{'name': ROLE_EVENT_ADMIN, 'description': 'access to events for contracts application', 'apps':[APP_CONTRACTS]},
{'name': ROLE_SPONSOR_ADMIN, 'description': 'access to sponsors/races for contracts application', 'apps':[APP_CONTRACTS]}
]
# scores roles
ROLE_SCORES_ADMIN = 'scores-admin'
ROLE_SCORES_VIEWER = 'scores-viewer'
roles_scores = [{'name': ROLE_SCORES_ADMIN, 'description': 'administer scores application', 'apps':[APP_SCORES]},
{'name': ROLE_SCORES_VIEWER, 'description': 'view scores application', 'apps':[APP_SCORES]},
]
all_roles = [roles_common, roles_contracts, roles_members, roles_routes, roles_scores]
|
[
"lking@pobox.com"
] |
lking@pobox.com
|
19f57d01e4d553b3bab39e996318902932a5bef7
|
19631688a8be0e390f25a915f634a76c9a3d4fa3
|
/ABC108/B_rined_square.py
|
1adcdf28210c801eb65f6cbd8748a2b5afd49091
|
[] |
no_license
|
tsurusekazuki/AtCoder-practice
|
7141fd74f6a876c9f3e65a1dca400ef4c153bca8
|
e9538157b6a63f43300c6693e9c5deadaa4d5d2a
|
refs/heads/master
| 2020-05-02T02:57:14.180926
| 2019-04-28T10:00:49
| 2019-04-28T10:00:49
| 177,715,718
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 87
|
py
|
a, b, c, d = map(int, input().split())
x = c - a
y = d - b
print(c-y, d+x, a-y, b+x)
|
[
"b1714935@planet.kanazawa-it.ac.jp"
] |
b1714935@planet.kanazawa-it.ac.jp
|
d3bba560b1a63f14196f8ac1f02e0fc94bd6ac84
|
c5873c616d60d51ddc0a2388dce3c69ee0332d96
|
/poker_project/settings/base.py
|
1ab88949cbc4e7687010c5a66291d865059adfc6
|
[] |
no_license
|
dmongey101/texas-hold-em
|
4e915acc6326cd07463e7b5160e870556a69a689
|
9e3481d8fed77b0f80c8740f5946505e9ead4a6e
|
refs/heads/master
| 2022-12-12T23:04:30.665901
| 2022-10-24T17:56:32
| 2022-10-24T17:56:32
| 158,427,569
| 3
| 1
| null | 2022-12-08T01:28:49
| 2018-11-20T17:30:48
|
HTML
|
UTF-8
|
Python
| false
| false
| 3,459
|
py
|
"""
Django settings for ecommerce_project project.
Generated by 'django-admin startproject' using Django 2.0.8.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
import dj_database_url
import environ
import os
env = environ.Env()
environ.Env.read_env()
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "jnem='&rm_rqin%u!h1dwqh6wc-qkr#j-=77)r%*dw^-cxb#!++"
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_forms_bootstrap',
'accounts',
'poker',
'storages',
'donations'
]
MIDDLEWARE = [
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'poker_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, "templates")],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.media',
],
},
},
]
WSGI_APPLICATION = 'poker_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': dj_database_url.parse(os.environ.get("DATABASE_URL", "sqlite:///db.sqlite3"))
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
LOGIN_REDIRECT_URL = '/'
LOGOUT_REDIRECT_URL = '/'
STRIPE_PUBLISHABLE = os.environ.get('STRIPE_PUBLISHABLE')
STRIPE_SECRET = os.environ.get('STRIPE_SECRET')
|
[
"donalmongey@gmail.com"
] |
donalmongey@gmail.com
|
1f0baac7e207e96a1df522f360dbc91a273ce68e
|
d4bad471feefc230441ff2bbb2c312b50c96d554
|
/chapter_code/chapter32_Spark_AI/HorovodEstimator.py
|
36243e6c0f91c166e37312e6b7640cb19d773c1b
|
[] |
no_license
|
limiaoiao/code-of-spark-big-data-business-trilogy
|
6d69b1c6cf4b91ec12c65b9e4c64b0034f679d79
|
6bb7aa6c0209c8e20c5c1d7a191162635ecd658b
|
refs/heads/master
| 2022-12-29T05:47:53.133212
| 2020-08-16T12:38:34
| 2020-08-16T12:38:34
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,356
|
py
|
# Databricks notebook source
import numpy as np
import tensorflow as tf
import horovod.tensorflow as hvd
from pyspark.sql.types import *
from pyspark.sql.functions import rand, when
from sparkdl.estimators.horovod_estimator.estimator import HorovodEstimator
# COMMAND ----------
# Load MNIST dataset, with images represented as arrays of floats
(x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data("/tmp/mnist")
x_train = x_train.reshape((x_train.shape[0], -1))
data = [(x_train[i].astype(float).tolist(), int(y_train[i])) for i in range(len(y_train))]
schema = StructType([StructField("image", ArrayType(FloatType())),
StructField("label_col", LongType())])
df = spark.createDataFrame(data, schema)
display(df)
# COMMAND ----------
help(HorovodEstimator)
# COMMAND ----------
def model_fn(features, labels, mode, params):
"""
Arguments:
* features: Dict of DataFrame input column name to tensor (each tensor corresponding to
batch of data from the input column)
* labels: Tensor, batch of labels
* mode: Specifies if the estimator is being run for training, evaluation or prediction.
* params: Optional dict of hyperparameters. Will receive what is passed to
HorovodEstimator in params parameter. This allows for configuring Estimators for
hyperparameter tuning.
Returns: tf.estimator.EstimatorSpec describing our model.
"""
from tensorflow.examples.tutorials.mnist import mnist
# HorovodEstimator feeds scalar Spark SQL types to model_fn as tensors of shape [None]
# (i.e. a variable-sized batch of scalars), and array Spark SQL types (including
# VectorUDT) as tensors of shape [None, None] (i.e. a variable-sized batch of dense variable-length arrays).
#
# Here image data is fed from an ArrayType(FloatType()) column,
# e.g. as a float tensor with shape [None, None]. We know each float array is of length 784,
# so we reshape our tensor into one of shape [None, 784].
input_layer = features['image']
#input_layer = tf.reshape(input_layer, shape=[-1, 784])
logits = mnist.inference(input_layer, hidden1_units=params["hidden1_units"],
hidden2_units=params["hidden2_units"])
serving_key = tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY
# Generate a dictionary of inference output name to tensor (for PREDICT mode)
# Tensor outputs corresponding to the DEFAULT_SERVING_SIGNATURE_DEF_KEY are produced as output columns of
# the TFTransformer generated by fitting our estimator
predictions = {
"classes": tf.argmax(input=logits, axis=1, name="classes_tensor"),
"probabilities": tf.nn.softmax(logits, name="softmax_tensor"),
}
export_outputs = {serving_key: tf.estimator.export.PredictOutput(predictions)}
# If the estimator is running in PREDICT mode, you can stop building our model graph here and simply return
# our model's inference outputs
if mode == tf.estimator.ModeKeys.PREDICT:
return tf.estimator.EstimatorSpec(mode=mode, predictions=predictions,
export_outputs=export_outputs)
# Calculate Loss (for both TRAIN and EVAL modes)
onehot_labels = tf.one_hot(indices=tf.cast(labels, tf.int32), depth=10)
loss = tf.losses.softmax_cross_entropy(onehot_labels=onehot_labels, logits=logits)
if mode == tf.estimator.ModeKeys.TRAIN:
# Set up logging hooks; these run on every worker.
logging_hooks = [tf.train.LoggingTensorHook(tensors={"predictions": "classes_tensor"}, every_n_iter=5000)]
# Horovod: scale learning rate by the number of workers, add distributed optimizer
optimizer = tf.train.MomentumOptimizer(
learning_rate=0.001 * hvd.size(), momentum=0.9)
optimizer = hvd.DistributedOptimizer(optimizer)
train_op = optimizer.minimize(
loss=loss,
global_step=tf.train.get_global_step())
return tf.estimator.EstimatorSpec(mode=mode, loss=loss, train_op=train_op,
export_outputs=export_outputs,
training_hooks=logging_hooks)
# If running in EVAL mode, add model evaluation metrics (accuracy) to your EstimatorSpec so that
# they're logged when model evaluation runs
eval_metric_ops = {"accuracy": tf.metrics.accuracy(
labels=labels, predictions=predictions["classes"])}
return tf.estimator.EstimatorSpec(
mode=mode, loss=loss, eval_metric_ops=eval_metric_ops, export_outputs=export_outputs)
# COMMAND ----------
# Model checkpoints will be saved to the driver machine's local filesystem.
model_dir = "/tmp/horovod_estimator"
dbutils.fs.rm(model_dir[5:], recurse=True)
# Create estimator
est = HorovodEstimator(modelFn=model_fn,
featureMapping={"image": "image"},
modelDir=model_dir,
labelCol="label_col",
batchSize=64,
maxSteps=5000,
isValidationCol="isVal",
modelFnParams={"hidden1_units": 100, "hidden2_units": 50},
saveCheckpointsSecs=30)
# COMMAND ----------
# Add column indicating whether each row is in the training/validation set; we perform a random split of the data
df_with_val = df.withColumn("isVal", when(rand() > 0.8, True).otherwise(False))
# Fit estimator to obtain a TFTransformer
transformer = est.fit(df_with_val)
# Apply the TFTransformer to our training data and display the results. Note that our predicted "classes" tend to
# match the label column in our training set.
res = transformer.transform(df)
display(res)
# COMMAND ----------
est.setMaxSteps(10000)
new_transformer = est.fit(df_with_val)
new_res = transformer.transform(df)
display(new_res)
# COMMAND ----------
dbutils.fs.cp("file:/tmp/horovod_estimator/", "dbfs:/horovod_estimator/", recurse=True)
# COMMAND ----------
# MAGIC %sh
# MAGIC ls -ltr /tmp/horovod_estimator
# COMMAND ----------
print(dbutils.fs.ls("dbfs:/horovod_estimator/"))
# COMMAND ----------
# MAGIC %sh
# MAGIC rm -rf /tmp/horovod_estimator
# COMMAND ----------
# MAGIC %sh
# MAGIC ls -ltr /tmp/horovod_estimator
|
[
"noreply@github.com"
] |
noreply@github.com
|
f771322752f5feab04cb77f3b2f35d3026f3513f
|
8aa3069cd4840fd216b917187a9c96bd7d3e2367
|
/Exercícios/binomiofatorial.py
|
424d1e4b8b3bb4389d4000032efe0357afec0102
|
[] |
no_license
|
rafaelsaidbc/USP
|
b10a28f958a1af5670fe48061f7b0c8b9db5d5d0
|
8c077f392fccd814380ea0e1b5ec228a54d4f779
|
refs/heads/master
| 2020-03-24T00:41:12.718523
| 2018-07-25T18:31:47
| 2018-07-25T18:31:47
| 142,302,564
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 901
|
py
|
def fatorial(n):
fat = 1 #variavel fat recebe o valor 1, porque 1 eh um valor nulo em uma multiplicacao
while(n > 1): #enquanto n for maior que 1, o laço (while) continua executando
fat = fat * n #multiplica fat por n
n = n - 1 #atualiza o n subtraindo 1
return fat #finalzia o while e atualiza a variavel fat
def numero_binomial(n, k):
return fatorial(n) / (fatorial(k) * fatorial(n - k))
def testa_fatorial(): #testa a funcao fatorial
if fatorial(1) == 1:
print("Funciona para 1")
else:
print("Não funciona para 1")
if fatorial(2) == 2:
print("Funciona para 2")
else:
print("Não funciona para 2")
if fatorial(0) == 1:
print("Funciona para 0")
else:
print("Não funciona para 0")
if fatorial(5) == 120:
print("Funciona para 5")
else:
print("Não funciona para 5")
|
[
"rafaelsaidbc@yahoo.com.br"
] |
rafaelsaidbc@yahoo.com.br
|
0b20a4b796a7b98a278b4bcbbc680c4308972641
|
35438635c64c8ec1066285f9849bb3634ee46644
|
/weibo乃万/analyze.py
|
11fbf4a9ad459c2d9824f995016c70746c68a6bb
|
[] |
no_license
|
fancccc/code2021
|
8bbd6e68e1c2587e520daa18a147c48f431a5269
|
8e85a69bd30092e7337cbfade5c34ec62b7c2ad3
|
refs/heads/main
| 2023-04-26T06:27:06.812121
| 2021-05-13T15:10:22
| 2021-05-13T15:10:22
| 367,084,415
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,339
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Mon May 10 23:27:01 2021
@author: Mario
"""
import pandas as pd
from snownlp import SnowNLP
from snownlp import sentiment
import random
import jieba
import imageio
from wordcloud import WordCloud,ImageColorGenerator
import matplotlib.pyplot as plt
from PIL import Image
import numpy as np
from pyecharts import options as opts
from pyecharts.charts import TreeMap
from matplotlib.patches import ConnectionPatch
plt.rcParams['font.sans-serif']=['SimHei']
#sentiment.train('neg.txt', 'pos.txt')
#sentiment.save('QuanShi.marshal')
df = pd.read_csv('AllData.csv',encoding = 'gb18030')
df['ZAN'] = df['ZAN'].replace('list index out of range',0)
df['ZAN'] = df['ZAN'].apply(int)
'''
lis = df['comment'][80:100].tolist()
for text in lis:
s = SnowNLP(text)
print(text,'\n',s.sentiments)
#df.dropna(subset=['comment'],inplace=True)
qs = []
for i in df['comment'].tolist():
try:
qs.append(SnowNLP(i).sentiments)
except:
qs.append(0)
df['拳师score'] = qs
df['拳师'] = df['拳师score'].apply(lambda x:'yes' if x >= 0.5 else 'no')
df.to_csv('AllData.csv',index = 0,encoding = 'gb18030')
'''
dfY = df[df['拳师'] == 'yes']
dfN = df[df['拳师'] == 'no']
for example in random.sample(dfY['comment'].tolist(), 10):
print(example)
sex_propor = len(dfY[dfY['sex'] == '女']) / len(dfY)
print(sex_propor)
# 绘制词云
def draw_wordcloud(s,filename):
#读入一个txt文件
comment_text = s
#结巴分词,生成字符串,如果不通过分词,无法直接生成正确的中文词云
cut_text = " ".join(jieba.cut(comment_text))
#color_mask = imageio.imread("rock.png") # 读取背景图片
#color_mask = np.array(Image.open('rock.png'))
cloud = WordCloud(
font_path = 'msyhl.ttc',
#设置背景色
background_color = 'white',
#词云形状
#mask = color_mask,
#允许最大词汇
max_words = 2000,
#最大号字体
max_font_size = 50,
scale = 6,#分辨率
)
word_cloud = cloud.generate(cut_text) # 产生词云
plt.figure(figsize = (10,10),dpi = 80)
word_cloud.to_file('img/'+filename) #保存图片
plt.axis('off')
# 显示词云图片
plt.imshow(word_cloud)
s = ''
for i in dfN['comment']:
s += str(i) + '。'
draw_wordcloud(s,'非女权.png')
s = ''
for i in dfY['comment']:
s += str(i) + '。'
draw_wordcloud(s,'女权.png')
plt.figure(dpi = 120)
x = [len(dfY), len(dfN)]
explode = [0.1,0.01]
labels = ['拳师','非拳师']
plt.pie(x = x,
explode = explode,
labels = labels,
autopct = '%3.2f%%')
plt.title('总体分布')
plt.savefig('img/总体分布.png')
plt.show()
plt.figure(dpi = 120)
x = [len(dfY[dfY['sex'] == '女']), len(dfY[dfY['sex'] == '男'])]
explode = [0.01,0.01]
labels = ['女','男']
plt.pie(x = x,
explode = explode,
labels = labels,
autopct = '%3.2f%%')
plt.title('性别')
plt.savefig('img/性别.png')
plt.show()
plt.figure(figsize=(15,15),dpi = 120)
df_acre = df.groupby('acre')['id'].count()
x = df_acre.values
labels = df_acre.index
explode = [0.001] * len(x)
plt.pie(x = x,
explode = explode,
labels = labels,
autopct = '%3.2f%%')
plt.title('地区')
plt.savefig('img/地区.png')
plt.show()
df['lable'] = df['拳师score'].apply(lambda x:1 if x >= 0.5 else 0)
replyY = dfY['reply'].mean()
replyN = dfN['reply'].mean()
weiboY = dfY['weibo'].mean()
weiboN = dfN['weibo'].mean()
befanY = dfY['befan'].mean()
befanN = dfN['befan'].mean()
fanY = dfY['fan'].mean()
fanN = dfN['fan'].mean()
zanY = dfY['ZAN'].mean()
zanN = dfN['ZAN'].mean()
plt.figure(dpi = 120)
x = ['回复数(评论)','点赞数(评论)','微博数','关注数','粉丝数']
index = np.arange(len(x))
bar_width = 0.45
y1 = [replyY, zanY, weiboY, befanY, fanY]
y2 = [replyN, zanN, weiboN, befanN, fanN]
plt.bar(index, y1, bar_width, label = '女权')
plt.bar(index+bar_width, y2, bar_width, label = '非女权')
plt.xticks(index+bar_width/2, x)
plt.legend()
plt.title('数据对比')
plt.xlabel('指标均值')
for a,b in zip(index,y1):
plt.text(a, b+10,'%.1f'%b, ha = 'center',va = 'bottom')
for a,b in zip(index,y2):
plt.text(a+bar_width, b+10,'%.1f'%b, ha = 'center',va = 'bottom')
plt.savefig('img/各项指标分析.png')
plt.show()
|
[
"workfc@163.com"
] |
workfc@163.com
|
6137faebb9a642e09f39a277fe0b98ca4709d399
|
8398421e297b61c345f81005940f01aa79e2bf53
|
/subwindows/delete_orphans/delete_orphans_creator.py
|
f1e9c501371625d623e3f3f2f90533ab408fc85d
|
[] |
no_license
|
fcunhaneto-test/mscollection_qt
|
e206a90eb8f3a80885d1067648c718f32a6d2057
|
3ccc26087d503aa833d0725e93f927a86800d7c0
|
refs/heads/master
| 2022-11-09T20:16:41.730677
| 2019-06-15T10:48:58
| 2019-06-15T10:48:58
| 166,302,027
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,376
|
py
|
import os
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QColor, QFont
from PyQt5.QtWidgets import QMdiSubWindow, QTableWidget, QWidget, \
QTableWidgetItem, QVBoxLayout, QHBoxLayout, QCheckBox
import texts
from db.db_model import Creator, SeriesCreator
from db.db_settings import Database as DB
from lib.function_lib import hbox_create, pb_create, delete_orphans
class DeleteOrphansCreator(QMdiSubWindow):
def __init__(self, main):
"""
Class for delete creators who are orphan in database.
:param main: Reference for main windows.
"""
super(DeleteOrphansCreator, self).__init__()
self.session = DB.get_session()
self.creator = self.session.query(Creator)
self.main = main
window_title = texts.delete_orphans + ' ' + texts.creator_p
self.setWindowTitle(window_title)
self.subwindow = QWidget()
p = self.palette()
p.setColor(self.backgroundRole(), QColor(230, 230, 250))
self.setPalette(p)
self.setWidget(self.subwindow)
font = QFont()
font.setPointSize(12)
# Vbox Main
self.vbox_main = QVBoxLayout(self.subwindow)
self.vbox_main.setContentsMargins(20, 20, 20, 20)
self.vbox_main.setSpacing(10)
# Table Cast
self.table = QTableWidget()
self.table.setColumnCount(2)
self.table.setContentsMargins(20, 0, 0, 0)
self.headers = [
texts.creator_s,
'Del'
]
self.table.setHorizontalHeaderLabels(self.headers)
# table set column width
w = int(0.5 * main.frameSize().width())
col_1 = int(0.60 * (w - 50))
col_2 = int(0.20 * (w - 50))
col_width = col_1 + col_2 + 4
self.table.setColumnWidth(0, col_1)
self.table.setColumnWidth(1, col_2)
self.table.rowHeight(30)
self.table.setFixedWidth(col_width)
self.table.horizontalHeader().setFont(font)
self.table.horizontalHeader().setStyleSheet(
'background-color: rgb(230, 230, 230);')
self.table.verticalHeader().setVisible(False)
self.rows = 0
self.ch_del = []
self.vbox_main.addWidget(self.table)
# Buttons
self.pb_delete = pb_create(texts.pb_delete, 12, 40)
self.pb_delete.setMinimumHeight(40)
self.pb_delete.setShortcut('Ctrl+D')
self.pb_delete.clicked.connect(self.delete)
self.pb_leave = pb_create(texts.pb_leave, 12, 40)
self.pb_leave.setMinimumHeight(40)
self.pb_leave.setShortcut('Ctrl+Q')
self.pb_leave.clicked.connect(self.close)
self.pb_help = pb_create(texts.pb_help, height=40)
self.pb_help.setMinimumHeight(40)
self.pb_help.clicked.connect(self.help)
self.pb_help.setShortcut('Ctrl+H')
self.pb_select_all = pb_create(texts.pb_select_all, 12, 40)
self.pb_select_all.setMinimumHeight(40)
self.pb_select_all.setShortcut('Ctrl+A')
self.pb_select_all.clicked.connect(self.select_all)
self.hb_pb = QHBoxLayout()
self.hb_pb.setSpacing(10)
self.hb_pb.addWidget(self.pb_delete)
self.hb_pb.addWidget(self.pb_leave)
self.hb_pb.addWidget(self.pb_help)
self.hb_pb.addWidget(self.pb_select_all)
self.vbox_main.addLayout(self.hb_pb)
self.width = col_width + 44
self.height = int(0.8 * main.frameSize().height())
self.setGeometry(0, 0, self.width, self.height)
self.create_table()
def create_table(self):
"""
Create a table for show all orphan creators info and with a QCheckBox
that if is check the actor will be delete.
"""
sub = self.session.query(SeriesCreator.creator_id)
sub = sub.distinct()
creator_result = self.creator.filter(Creator.id.notin_(sub)).all()
for creator in creator_result:
self.table.insertRow(self.rows)
self.table.setItem(self.rows, 0, QTableWidgetItem(creator.name))
ch_del = QCheckBox(str(creator.id))
self.ch_del.append(ch_del)
hb_del = hbox_create([self.ch_del[self.rows]], 0)
hb_del.setAlignment(Qt.AlignCenter)
cell_del = QWidget()
cell_del.setLayout(hb_del)
self.table.setCellWidget(self.rows, 1, cell_del)
if self.rows % 2 != 0:
self.table.item(self.rows, 0).setBackground(
QColor(230, 230, 230)
)
self.table.cellWidget(self.rows, 1).setStyleSheet(
'background-color: #E6E6E6;'
'color: #E6E6E6;'
)
else:
self.table.cellWidget(self.rows, 1).setStyleSheet(
'color: #FFFFFF;'
)
self.table.item(self.rows, 0).setFlags(
Qt.ItemIsSelectable | Qt.ItemIsEnabled
)
self.rows += 1
height = self.rows * 30 + 20
self.table.setMinimumHeight(height)
self.height = height + 130
self.setGeometry(0, 0, self.width, self.height)
def delete(self):
"""
Delete creator from database.
"""
delete_orphans(self.session, self.ch_del, Creator, texts.creator_s)
self.clear()
self.create_table()
def select_all(self):
"""
Mark all delete QCheckBox.
"""
for ch in self.ch_del:
ch.setChecked(True)
# Clear
def clear(self):
"""
Clear all values in windows.
"""
for row in range(self.rows):
self.table.removeRow(row)
self.table.clear()
self.table.setColumnCount(2)
self.table.setRowCount(0)
self.table.setHorizontalHeaderLabels(self.headers)
self.rows = 0
self.ch_del = []
self.session.expire_all()
# Help
def help(self):
# I have to perform help preview functions on the main because the bug
# "stack_trace posix.cc (699)" does not let the page find its directory.
dir = os.getcwd()
url = 'file:///' + dir + '/views_help/help_delete_orphans.html'
self.main.views_help(url, texts.help_edit_movie)
# Close Event
def closeEvent(self, event):
self.session.close()
|
[
"fcunhaneto@gmail.com"
] |
fcunhaneto@gmail.com
|
8ff2e48f09e238f0d2399ee7edab5a7d44af8ee2
|
142122fb03679fe84d54ccae65493416b925fe25
|
/code/main_classification.py
|
1104890bc6c75b9d8b475ab8f2a841ee4d69ff00
|
[] |
no_license
|
mshaikh2/IPMI2021
|
dce84bf37204a1518b776f1dc50e0b4078e3744f
|
cb4eb39f707995f5cfc2b54d54ed5d763f508e1e
|
refs/heads/master
| 2023-01-28T16:55:06.184132
| 2020-12-15T02:18:32
| 2020-12-15T02:18:32
| 316,624,372
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,152
|
py
|
from __future__ import print_function
from misc.config import Config
from dataset_classification import IUDataset, build_dataset
from trainer_classification import JoImTeR as trainer
import os
# import os
import sys
import time
import random
import pprint
import datetime
import dateutil.tz
import argparse
import numpy as np
import pandas as pd
import torch
import torchvision.transforms as transforms
import pickle
dir_path = (os.path.abspath(os.path.join(os.path.realpath(__file__), './.')))
sys.path.append(dir_path)
cfg = Config()
def parse_args():
parser = argparse.ArgumentParser(description='')
parser.add_argument('--gpu', dest='gpu_id', type=int, default=-1)
parser.add_argument('--data_dir', dest='data_dir', type=str, default='')
args = parser.parse_args()
return args
if __name__ == "__main__":
args = parse_args()
if args.gpu_id != -1:
cfg.GPU_ID = args.gpu_id
else:
cfg.CUDA = False
if args.data_dir != '':
cfg.DATA_DIR = args.data_dir
torch.manual_seed(cfg.seed)
if cfg.CUDA:
torch.cuda.manual_seed_all(cfg.seed)
########################################
now = datetime.datetime.now(dateutil.tz.tzlocal())
timestamp = now.strftime('%Y_%m_%d_%H_%M_%S')
# LAMBDA_FT,LAMBDA_FI,LAMBDA_DAMSM=01,50,10
output_dir = '../output/%s_%s_%s'%(cfg.DATASET_NAME, cfg.CONFIG_NAME, timestamp)
data_set = build_dataset('train', cfg)
train_loader = torch.utils.data.DataLoader(
data_set, batch_size=cfg.batch_size, drop_last=True,
shuffle=True, num_workers=cfg.num_workers)
data_set = build_dataset('val', cfg)
val_loader = torch.utils.data.DataLoader(
data_set, batch_size=cfg.val_batch_size, drop_last=False,
shuffle=False, num_workers=cfg.num_workers)
# Define models and go to train/evaluate
algo = trainer(output_dir, train_loader, val_loader)
start_t = time.time()
algo.train()
end_t = time.time()
print('Total time for training:', end_t - start_t)
|
[
"mshaikh2@buffalo.edu"
] |
mshaikh2@buffalo.edu
|
efa8b8921a7754cc8ad6ddb8d8d7f7bae7ff52b6
|
4c9ea189dac171be81ef2d72c2bbc7541e93e0a4
|
/credit_risk.py
|
b0cfd2a010acf81e76f5f09a18948982a7ac03d6
|
[] |
no_license
|
Ayush19443/Credit-risk
|
dc44a388afe2f378c8fe71df86aa216c42985149
|
cbb2aed96e93217a11d9b379318f678e524fb809
|
refs/heads/master
| 2022-12-02T19:51:26.785161
| 2020-08-15T11:49:10
| 2020-08-15T11:49:10
| 287,735,727
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,582
|
py
|
#%%
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
import pandas as pd
import numpy as np
import warnings
import time
import sys
import os
DeprecationWarning('ignore')
warnings.filterwarnings('ignore',message="don't have warning")
#%%
from sklearn.tree import DecisionTreeClassifier
#%%
tf=pd.read_csv('credit_risk.csv')
#%%
tf.head()
#%%
tf.sample(12)
#%%
tf.describe()
#%%
tf.isnull().sum()
#%%
tf.Gender[tf.Gender == 'Male'] = 1
tf.Gender[tf.Gender == 'Female'] = 2
tf.Married[tf.Married == 'Yes'] = 1
tf.Married[tf.Married == 'No'] = 2
tf.Education[tf.Education == 'Graduate'] = 1
tf.Education[tf.Education == 'Not Graduate'] = 2
tf.Self_Employed[tf.Self_Employed == 'Yes'] = 1
tf.Self_Employed[tf.Self_Employed == 'No'] = 2
tf.Property_Area[tf.Property_Area == 'Rural'] = 1
tf.Property_Area[tf.Property_Area == 'Urban'] = 2
tf.Property_Area[tf.Property_Area == 'Semiurban']= 3
tf.Dependents[tf.Dependents=='3+']=3
#%%
tf.head()
#%%
import seaborn as sns
sns.distplot(tf.Gender.dropna())
#%%
train,test = train_test_split(tf, test_size=0.2, random_state=12)
#%%
clf = DecisionTreeClassifier()
#%%
train.shape
#%%
test.shape
#%%
train.isnull().sum()
#%%
def fill_Gender(tf):
median= 1
tf['Gender'].fillna(median, inplace = True)
return tf
def fill_Married(tf):
median= 1
tf['Married'].fillna(median, inplace = True)
return tf
def fill_Dependents(tf):
median= 0
tf['Dependents'].fillna(median, inplace = True)
return tf
def fill_Self_Employed(tf):
median= 2
tf['Self_Employed'].fillna(median, inplace = True)
return tf
def fill_LoanAmount(tf):
mean= 142.5717
tf['LoanAmount'].fillna(mean, inplace = True)
return tf
def fill_Loan_Amoount_Term(tf):
median= 360
tf['Loan_Amount_Term'].fillna(median, inplace = True)
return tf
def fill_Credit_Historys(tf):
median= 1
tf['Credit_History'].fillna(median, inplace = True)
return tf
def encode_feature(tf):
tf = fill_Gender(tf)
tf=fill_Married(tf)
tf=fill_Dependents(tf)
tf=fill_Self_Employed(tf)
tf=fill_LoanAmount(tf)
tf=fill_Loan_Amoount_Term(tf)
tf=fill_Credit_Historys(tf)
return(tf)
#%%
tf=encode_feature(tf)
#%%
train = encode_feature(train)
test = encode_feature(test)
#%%
def x_and_y(tf):
x = tf.drop(["Loan_Status","Loan_ID","Gender","Dependents","Property_Area","Education","Self_Employed","ApplicantIncome","CoapplicantIncome"],axis=1)
y = tf["Loan_Status"]
return x,y
x_train,y_train = x_and_y(train)
x_test,y_test = x_and_y(test)
"""
clf_entropy = DecisionTreeClassifier(
criterion = "entropy", random_state = 100,
max_depth = 3, min_samples_leaf = 5)
"""
#%%
"""
clf_entropy.fit(x_train,y_train)
return clf_entropy
"""
#%%
from sklearn.metrics import accuracy_score, confusion_matrix
from sklearn.linear_model import LogisticRegression
#%%
log_model = DecisionTreeClassifier(criterion='entropy')
log_model.fit(x_train,y_train)
prediction = log_model.predict(x_train)
score = accuracy_score(y_train,prediction)
print(score*100)
#%%
y_train.shape
#%%
x_train.columns
#%%
log_model = DecisionTreeClassifier(criterion='entropy')
log_model.fit(x_train,y_train)
prediction = log_model.predict(x_test)
score1 = accuracy_score(y_test,prediction)
print(score1)
#%%
import seaborn as sns
sns.distplot(tf.LoanAmount.dropna())
#%%
|
[
"noreply@github.com"
] |
noreply@github.com
|
25e372cb14bdc5d7011802d05410d01a864a361a
|
7f8d2288dc8d81275269bdb8e8f196339a52d30d
|
/code/1010_solution.py
|
c14133019520efb5a27564644e2a7e131773bfda
|
[] |
no_license
|
ishaansharma/leetcode-3
|
f9cab568c31322e2bf84768264f3c644182cd470
|
9081dd3ff86409d554b0298a8152ed40a6befa96
|
refs/heads/master
| 2023-03-25T15:36:04.235650
| 2021-03-30T20:15:45
| 2021-03-30T20:15:45
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 229
|
py
|
class Solution:
def numPairsDivisibleBy60(self, time: List[int]) -> int:
count = 0
seen = [0] * 60
for t in time:
count += seen[-t % 60]
seen[t % 60] += 1
return count
|
[
"noreply@github.com"
] |
noreply@github.com
|
17d4ac7a0625e913523b131d25040cd6fe5c1260
|
8b64dba83a0f1bedf713faa0dcd96a218c80af08
|
/app/requirement/migrations/0001_initial.py
|
d8bd78c43fe4041130960411584382bf4142437c
|
[] |
no_license
|
chrxr/studyplan-test
|
818a669dd85e74dd92393c5d006e2446c16f83b1
|
791b9400302d61f65cc37e9f3912cbcc9d4c041f
|
refs/heads/main
| 2023-02-27T02:45:55.565870
| 2021-01-29T13:35:21
| 2021-01-29T13:35:21
| 329,410,529
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 491
|
py
|
# Generated by Django 3.1.5 on 2021-01-13 19:33
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Requirement',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
],
),
]
|
[
"chrxr@outlook.com"
] |
chrxr@outlook.com
|
c4fb0116985e3ace94fc0fe7bbfb80ab7f53d331
|
7edb6f64afb9a9d5fd2b712faae9841d45c3a3b3
|
/monkeyAndPerformance/allCode/performanceTest/traffic/traffic.py
|
9edb99221fc8f0b920e0abebe9a4f074378baddb
|
[] |
no_license
|
Hanlen520/AppSpecialTest
|
413babbbecbeaa8e25dd1fd70dd349a1de07eb5e
|
06f69f116245162220985ad2632fbff3af72450c
|
refs/heads/master
| 2023-04-22T19:59:35.523780
| 2019-08-08T09:48:28
| 2019-08-08T09:48:28
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,398
|
py
|
import csv,os,time
from config.config import *
from monkeyAndPerformance.allCode.util.gettimestr import GetTimeStr
gettimestr = GetTimeStr() #实例化GetTimeStr
#控制类
class Controller(object):
def __init__(self):
self.counter = RunTrafficCount # 定义测试的次数
#定义收集数据的数组
self.alldata = [("deviceid","appversion","timestamp", "traffic")] # 要保存的数据,时间戳及流量
#单次测试过程
def TestProcessOnce(self):
#执行获取进程的命令
cmd = 'adb shell "ps | grep %s"' % AppPackageName # 获取进程
content = os.popen(cmd)
result = content.readlines()
print("result:%s"% result)
print("result.length:%s" % len(result))
if len(result):
#获取进程ID
# pid = result[0].split(" ")[5]
pid = result[0].split(" ")[3]
print("result[0].split():%s" % result[0].split(" "))
print("pid:%s"% pid)
self.DeleyTime(3)
#执行进程ID使用的流量
cmd = 'adb shell cat /proc/%s/net/dev'% pid # 获取流量
content = os.popen(cmd)
traffic = content.readlines()
print("traffic:%s"% traffic)
#获取流量
for line in traffic:
print("line:%s" % line)
if "wlan0" in line:
#将所有空行换成#
line = "#".join(line.split())
print("line##:%s"% line)
#按#号拆分,获取收到和发出的流量
receive = line.split("#")[1]
print("receive#:%s"%receive)
transmit = line.split("#")[9]
print("transmit##:%s"% transmit)
# if "eth0" in line:
# #将所有空行换成#
# line = "#".join(line.split())
# #按#号拆分,获取收到和发出的流量
# receive = line.split("#")[1]
# transmit = line.split("#")[9]
# elif "eth1" in line:
# # 将所有空行换成#
# line = "#".join(line.split())
# # 按#号拆分,获取收到和发出的流量
# receive2 = line.split("#")[1]
# transmit2 = line.split("#")[9]
#计算所有流量的之和
# alltraffic = int(receive) + int(transmit) + int(receive2) + int(transmit2)
alltraffic = int(receive) + int(transmit)
#按KB计算流量值
alltraffic = alltraffic/1024
currenttime = self.GetCurrentTime() # 获取当前时间
#将获取到的数据存储到数组中
self.alldata.append((TestDeviceID,AppVersion,currenttime,alltraffic)) # 写入数据到self.alldata
else:
print("没有获取到相应进程,请确定打开相应的app")
#延时函数
def DeleyTime(self,delaytime):
delaytime = int(delaytime)
time.sleep(delaytime) # 等待5秒
print("等待%s秒..."% delaytime)
#多次执行测试过程
def RunMore(self):
#设置手机进入非充电状态
cmd = 'adb shell dumpsys battery set status 1'
os.popen(cmd)
self.DeleyTime(3)
print("循环开始时间:%s" % self.GetCurrentTime() )
while self.counter>0: # 如果次数大于0
self.TestProcessOnce() # 则执行一次测试过程
self.counter = self.counter -1 # 测试次数减一
self.DeleyTime(5) # 间隔5秒取一次值
gettimestr.outPutMyLog("流量统计剩余运行次数为:%s" % self.counter)
print("循环结束时间:%s" % self.GetCurrentTime())
#获取当前存储数据的时间戳
def GetCurrentTime(self):
currenttime = time.strftime("%Y-%m-%d %H:%M:%S",time.localtime()) # 获取当前时间
return currenttime # 返回当前时间
# 获取当前时间的字符串
def GetCurrentTimeString(self):
currenttime = time.strftime("%Y%m%d%H%M%S", time.localtime()) # 获取当前时间
return currenttime # 返回当前时间
#存储数据到CSV时间
def SaveDataToCSV(self,timestr):
basedir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) + "/" + "codeResult"
nyrsfmdir = gettimestr.createNYRSFMdir(basedir,timestr)
csvfile = "%s/%s_%s" % (nyrsfmdir,timestr,AppTrafficCSVFile)
opencsvfile = open(csvfile, "w",newline="") #加入newline="",解决python3写入csv出现空白行
writercsv = csv.writer(opencsvfile) # 写入文件
writercsv.writerows(self.alldata) # 写入数据,将字符串数据转换为字节,存储到CSV中
opencsvfile.close() # 关闭文件
print("数据:%s" % self.alldata)
print("数据保存路径:%s"% csvfile)
print("流量消耗:最后一次的流量值减去第一次的流量值,就是本次操作消耗的流量值")
def run(self,timestr): # 运行
self.RunMore()
self.SaveDataToCSV(timestr)
if __name__ == "__main__":
timestr = gettimestr.getTimeStr()
controller = Controller()
controller.run(timestr)
|
[
"410287958@qq.com"
] |
410287958@qq.com
|
bd8fcf8cfabbf0d617f23ee7dd8b9d937d3551d1
|
7be178ac527253028a5d3009bac33d2e7e9533cd
|
/alphabet.py
|
ed7eda8cf9f8a6508fa0fb5a336a328d5f56c609
|
[] |
no_license
|
mrajeshraj/guvi
|
27f745ea7a24ba8ecb465d2cbc1373dedef7a776
|
275cf24f1f1fe11571cec88c12b64bb80ad4c93a
|
refs/heads/master
| 2020-05-25T23:02:11.526682
| 2019-06-17T17:05:33
| 2019-06-17T17:05:33
| 188,025,966
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 111
|
py
|
ch=input()
if(ch=='a' or ch=='b' or ch=='c' or ch=='d'):
print("Alphabet")
else:
print("Not an Alphabet")
|
[
"noreply@github.com"
] |
noreply@github.com
|
6cf223258093e6753c9230a394a1b18bb4ec23de
|
6f7474a0e586e5aba19fef5ad1d02ba35f596cc7
|
/exam_16_26/p_know_19.py
|
0edd5f5feb8290f12cac42036535c777ba7c2003
|
[] |
no_license
|
dainioska/python-opencv
|
690b71bf843d9e486a3a7152fe733e97720e86ff
|
12c2baa67a9b462448233a1569b9d43fef60e139
|
refs/heads/main
| 2023-04-01T16:23:27.588108
| 2021-04-15T15:54:03
| 2021-04-15T15:54:03
| 343,664,442
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 399
|
py
|
#matplotlib filtering
import cv2
import numpy as np
from matplotlib import pyplot as plt
img = cv2.imread('samples/smarties.png')
img = cv2.cvtColor(img,cv2.COLOR_BGR2RGB)
titles = ['image']
images = [img]
for i in range(1):
plt.subplot(1, 1, i+1), plt.imshow(images[i], 'gray')
plt.title(titles[i])
plt.xticks([]), plt.yticks([])
plt.show()
cv2.waitKey(0)
cv2.destroyAllWindows()
|
[
"dainioshka@gmail.com"
] |
dainioshka@gmail.com
|
cc27af1c9bbe4c95be19d7ed9110de8cba144d7d
|
ad21a8962f2e375cffbfb5cd3e228357c145d95e
|
/isbn_calculator/test_isbn_calculator.py
|
1593f0195b8330a811f9753947b3a4ec74792b33
|
[] |
no_license
|
sabrown89/python-practice-problems
|
2301c93486ec1c04ded562c635090eb1f4705c94
|
678dcc04f8b1e481b8d4d8a5d4633a9a6d434ead
|
refs/heads/master
| 2022-11-22T02:55:58.808825
| 2021-08-11T16:12:34
| 2021-08-11T16:12:34
| 117,746,775
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,059
|
py
|
import hiker
def test_remove_hyphens_and_spaces():
douglas = hiker.Hiker('978-0-13- 149505- 0')
assert douglas.isbn == '9780131495050'
def test_isbn_is_thirteen_digits():
douglas = hiker.Hiker('978-0-13- 149505- 0')
assert douglas._isbn_is_correct_number_of_digits() is True
def test_isbn_is_correct_number_digits_is_false_if_not_all_digits():
douglas = hiker.Hiker('978-0-1A- 149505- 0')
assert douglas._isbn_is_correct_number_of_digits() is False
def test_isbn_is_correct_number_of_digits_is_false_if_not_length_thirteen():
douglas = hiker.Hiker('97- 149505- 0')
assert douglas._isbn_is_correct_number_of_digits() is False
def test_is_isbn_thirteen_returns_false():
douglas = hiker.Hiker('97- 149505- 0')
assert douglas.is_isbn() is False
def test_calculate_sums():
douglas = hiker.Hiker('978-0-13- 149505- 4')
assert douglas._calculate_sums_for_isbn_thirteen() == 100
def test_check_digit():
douglas = hiker.Hiker('978-0-13- 149505- 4')
assert douglas._check_digit() == 4
def test_is_isbn_thirteen_true():
douglas = hiker.Hiker('978-0-13- 149505- 0')
assert douglas.is_isbn() is True
def test_is_isbn_thirteen_true_another_number():
douglas = hiker.Hiker('978-0596809485')
assert douglas.is_isbn() is True
def test_isbn_calculator():
douglas = hiker.Hiker('978-0596809485')
assert douglas._isbn_calculator() == 5
def test_isbn_calculator_another():
douglas = hiker.Hiker('978-0-13- 149505- 0')
assert douglas._isbn_calculator() == 0
def test_isbn_ten_is_valid():
douglas = hiker.Hiker('0471958697', 10)
assert douglas.is_isbn() is True
def test_valid_isbn_standard_invalid():
douglas = hiker.Hiker('0471958697', 5)
assert douglas._valid_isbn_standard() is False
def test_valid_isbn_standard_valid_10():
douglas = hiker.Hiker('047195869')
assert douglas._valid_isbn_standard() is True
def test_valid_isbn_standard_valid_13():
douglas = hiker.Hiker('0471958697', 13)
assert douglas._valid_isbn_standard() is True
|
[
"scottabrown89@gmial.com"
] |
scottabrown89@gmial.com
|
f576f6b14129bdf18f73c27796621f6de5c53ec6
|
9613fbaea6fab62623b3c8f11d205a28a959c2c3
|
/25-刘杰-北京/第七周/ransac_manual.py
|
c57ea23fe2f2d42e96a83457a4947a7ca551a7d6
|
[] |
no_license
|
strongerfly/badou-Turing
|
4eeae21a5ebf1fdd4df1ffe1156958f3fa939473
|
3262f01ccc64e4cbf66be2fd43ec437eb80c8663
|
refs/heads/main
| 2023-07-25T21:09:38.696925
| 2021-09-06T14:09:53
| 2021-09-06T14:09:53
| 378,666,594
| 1
| 0
| null | 2021-06-20T14:34:54
| 2021-06-20T14:34:54
| null |
UTF-8
|
Python
| false
| false
| 2,572
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
@Project :badou-Turing
@File :ransac_manual.py
@Author :luigi
@Date :2021/8/2 下午4:02
'''
import numpy as np
import matplotlib.pyplot as plt
class liner():
"""定义线性类"""
# 通过最小二乘法拟合线性关系
def fit(self, data):
x = data[0]
y = data[1]
A = np.vstack([x, np.ones(len(x))]).T
self.m, self.c = np.linalg.lstsq(A, y, rcond=None)[0]
return self.m, self.c
# 线性预估
def predict(self, X):
return X * self.m + self.c
def ransac(data, model, sample_number, epoch, threshold):
""" ransac算法实现
:param data: 数据集
:type data: np.ndarray
:param model: 模型
:type model: class type
:param sample_number: 随机采样样本数
:type sample_number: int
:param epoch: 迭代次数
:type epoch: int
:param threshold: 判断内群的阈值
:type threshold: int
:return: 模型
:rtype: class type
"""
max = 0
target = None
for i in range(epoch):
# 根据参数sample_number,选择k个随机点作为内群
dataIndex = np.arange(data.shape[0])
dataIndexRandomk = np.random.choice(dataIndex, sample_number)
dataRandomK = data[dataIndexRandomk]
# 选取除k个随机点之外的所有点作为验证模型的数据点
# 方式一:通过list generation
# dataRandomExcept = data[[i for i in dataIndex if i not in dataIndexRandomk]]
# 方式二:通过numpy mask
mask = np.ones(data.shape[0], dtype=bool)
mask[dataIndexRandomk] = False
dataRandomExcept = data[mask]
valX = dataRandomExcept[:, 0]
valY = dataRandomExcept[:, 1]
# 模型拟合
model.fit(dataRandomK)
# 模型预估
predictY = model.predict(valX)
# 损失函数
cost = np.absolute(valY - predictY)
# 计算内群数
count = np.sum(cost <= threshold)
if count > max:
max = count
print('max is:{}'.format(count))
target = model
return target
def main():
model = liner()
data = np.random.randint(1, 100, (100, 2))
sample_k = 5
epoch = 10000
threshold = 5
ransac(data, model, sample_k, epoch, threshold)
x = data[:, 0]
y = data[:, 1]
plt.plot(x, y, 'o', label='original data', markersize=10)
plt.plot(x, model.m * x + model.c, 'r', label='fitter line')
plt.show()
if __name__ == '__main__':
main()
|
[
"86213076+luigide2020@users.noreply.github.com"
] |
86213076+luigide2020@users.noreply.github.com
|
cb840373802f4a2f053aa9b6db014d5a830284dd
|
404cb0431675327a751f7a6f422f53288a92b85b
|
/chirp/library/order_test.py
|
33fccabf573816f97b45246bff10199393e598bb
|
[
"Apache-2.0"
] |
permissive
|
chirpradio/chirpradio-machine
|
ade94d7ac9ded65f91e1b3845be408723c0501da
|
6fea6a87f2eb3cfac2a47831892c9ce02163b03b
|
refs/heads/master
| 2023-09-01T02:57:07.749370
| 2023-08-28T23:57:46
| 2023-08-28T23:57:46
| 2,330,078
| 9
| 10
|
Apache-2.0
| 2018-03-16T01:26:29
| 2011-09-05T19:10:48
|
Python
|
UTF-8
|
Python
| false
| false
| 3,525
|
py
|
#!/usr/bin/env python
import unittest
import mutagen.id3
from chirp.library import order
class OrderTest(unittest.TestCase):
def test_decode(self):
test_cases = (("1", 1, None),
(" 6", 6, None),
("006", 6, None),
("1/2", 1, 2),
("3 of 7", 3, 7),
("03anything04", 3, 4))
for text, order_num, max_num in test_cases:
self.assertEqual((order_num, max_num), order.decode(text))
# These should not be parseable.
error_test_cases = ("", "xxx", "0", "-1", "0/3", "3/", "3/0", "6/5",
"-1/4", "2/-1", "2/-", "3-4", "3/0")
for text in error_test_cases:
self.assertRaises(order.BadOrderError, order.decode, text)
def test_encode(self):
test_cases = ((1, 3, "1/3"), (7, None, "7"))
for order_num, total_num, expected_text in test_cases:
self.assertEqual(expected_text, order.encode(order_num, total_num))
error_test_cases = ((7, 5), (0, 3), (-1, 3), (4, 0), (4, -1))
for order_num, total_num in error_test_cases:
self.assertRaises(order.BadOrderError,
order.encode, order_num, total_num)
def test_standardize_str(self):
self.assertEqual("3", order.standardize_str(" 3 "))
self.assertEqual("3/7", order.standardize_str("3 of 7"))
def test_standardize(self):
tag = mutagen.id3.TRCK(text=["3 of 7"])
order_num, max_num = order.standardize(tag)
self.assertEqual(["3/7"], tag.text)
self.assertEqual(3, order_num)
self.assertEqual(7, max_num)
def test_is_archival(self):
self.assertTrue(order.is_archival("3/7"))
self.assertFalse(order.is_archival("bad"))
self.assertFalse(order.is_archival("3"))
self.assertFalse(order.is_archival("3 of 7"))
self.assertFalse(order.is_archival("7/3"))
self.assertFalse(order.is_archival(" 3/7"))
def test_verify_and_standardize_str_list(self):
# Check the simplest valid case.
self.assertEqual(["1/1"], order.verify_and_standardize_str_list(["1"]))
# Check an already-standardized set.
self.assertEqual(
["1/4", "3/4", "2/4", "4/4"],
order.verify_and_standardize_str_list(
["1/4", "3/4", "2/4", "4/4"]))
# Check strings without a max number.
self.assertEqual(
["1/4", "3/4", "2/4", "4/4"],
order.verify_and_standardize_str_list(["1", "3", "2", "4"]))
# Check mixed formats.
self.assertEqual(
["1/4", "3/4", "2/4", "4/4"],
order.verify_and_standardize_str_list(["1", "3/4", "2", "4 of 4"]))
# Check empty list.
self.assertRaises(order.BadOrderError,
order.verify_and_standardize_str_list, [])
# Check garbage in list.
self.assertRaises(order.BadOrderError,
order.verify_and_standardize_str_list, ["xxx"])
# Check treatment of gaps.
self.assertRaises(order.BadOrderError,
order.verify_and_standardize_str_list,
["1", "2", "4"])
# Check bad max number.
self.assertRaises(order.BadOrderError,
order.verify_and_standardize_str_list,
["1/5", "3/5", "2/5", "4/5"])
if __name__ == "__main__":
unittest.main()
|
[
"kumar.mcmillan@gmail.com"
] |
kumar.mcmillan@gmail.com
|
dd3f9af7d6f306b028114ba50b54b6d9a3c66546
|
eff6be2d99ba83a1080cdacb2094fa6aaec1477c
|
/.venv/bin/jupyter-troubleshoot
|
3bbf5b79a436aed52757e87445950d074d852a92
|
[] |
no_license
|
CFtriksX/Embeded_AI_A1
|
1b1b5559faf6e7569ba225b71ca60406ccf49a39
|
d2fc13305293677d2f529ed86ce06f4f7c0afc0d
|
refs/heads/main
| 2023-04-13T21:19:43.129144
| 2021-04-20T17:17:09
| 2021-04-20T17:17:09
| 359,892,756
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 263
|
#!/home/paulgelas/assignment/assignment1/.venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from jupyter_core.troubleshoot import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"paulgelas@desktop-c57a2a9.home"
] |
paulgelas@desktop-c57a2a9.home
|
|
3862e819cf78b3797b247c2104e6c3624d5586fa
|
d1c4ea86a7148ceb4068c27ba6b51cedd3abd46f
|
/Natural_Language_Process/Lab1/src/utils/pre-treat.py
|
f7a50c81777b0241b55c07249f9ac23ecc78e9cc
|
[] |
no_license
|
zirui-HIT/HIT_Lab
|
dfbf756e7951e4bf65ad96b561f6fb0c5d88dc59
|
e74550450d7b7362bd873a613f28208401a26c45
|
refs/heads/main
| 2023-06-05T14:00:09.423997
| 2021-07-02T06:34:38
| 2021-07-02T06:34:38
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,247
|
py
|
from copy import deepcopy
def simplify_data(data_path: str, save_path: str):
target = open(save_path, 'w')
with open(data_path) as f:
for line in f:
words = line.split()
for i in range(len(words)):
target.write((words[i].split('/'))[0].strip('[').strip(']') +
' ')
target.write('\n')
def check_not_mark(word: str) -> bool:
marks = ["。", ",", "《", "》", "“", "”", "、", "!",
"?", "‘", "’", "(", ")", "[", "]", ";", ":"]
if word in marks:
return False
return True
def get_dictionary(data_path: str, dictionary_path: str):
single_dictionary = []
double_dictionary = []
single_cnt = {}
double_cnt = {}
with open(data_path, encoding='utf-8') as f:
for line in f:
words = line.split()
for i in range(len(words)):
single_dictionary.append(
(words[i].split('/'))[0].strip('[').strip(']'))
last = len(single_dictionary) - 1
if i != 0:
pre_word = deepcopy(single_dictionary[last - 1])
current_word = deepcopy(single_dictionary[last])
if check_not_mark(pre_word) and check_not_mark(
current_word):
double_dictionary.append(pre_word + ' ' + current_word)
for w in single_dictionary:
single_cnt[w] = single_cnt.get(w, 0) + 1
for w in double_dictionary:
double_cnt[w] = double_cnt.get(w, 0) + 1
single_dictionary = list(set(single_dictionary))
single_dictionary = sorted(single_dictionary)
double_dictionary = list(set(double_dictionary))
double_dictionary = sorted(double_dictionary)
with open(dictionary_path, 'w') as f:
for w in single_dictionary:
f.write('1' + ' ' + str(single_cnt[w]) + ' ' + w + '\n')
for w in double_dictionary:
f.write('2' + ' ' + str(double_cnt[w]) + ' ' + w + '\n')
if __name__ == '__main__':
# simplify_data('../data/199801_seg&pos.txt', '../result/simplified.txt')
get_dictionary('../data/199801_seg&pos.txt', '../result/dic.txt')
|
[
"WDZRMPCBIT@163.com"
] |
WDZRMPCBIT@163.com
|
b8d7b70e9e7650a1684d15ecd33a819b51bcfc93
|
9a425f153816cd206451876b8da570a9446d76c4
|
/reservations/admin.py
|
0a1968bae83ee5649193989b193a5268f3beb9cb
|
[] |
no_license
|
salhi100/airbnb-clone-2
|
cea837f8dd24ab4e634ff7e2f0ce474581313642
|
685477bedc6fed18e40fb53febf22e4393796f4f
|
refs/heads/master
| 2023-01-31T11:31:37.190244
| 2020-01-09T04:53:35
| 2020-01-09T04:53:35
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 500
|
py
|
from django.contrib import admin
from . import models
@admin.register(models.Reservation)
class ReservationAdmin(admin.ModelAdmin):
""" Reservation Admin Definition """
list_display = (
"room",
"status",
"check_in",
"check_out",
"guest",
"in_progress",
"is_finished",
)
list_filter = ("status",)
@admin.register(models.BookedDay)
class BookedDayAdmin(admin.ModelAdmin):
list_display = ('day', 'reservation')
|
[
"53186618+hanulbom@users.noreply.github.com"
] |
53186618+hanulbom@users.noreply.github.com
|
3e637f3de409c402ebe0b44c9d3ce320ed721c64
|
c7d39fd93d6c616cf6adc005bab9298947c305f6
|
/library/sns_command.py
|
c8b876f261a3be3ad63b4cb3089ece8fd7da7cae
|
[
"Apache-2.0"
] |
permissive
|
mareckis/SNS-tests
|
810d80612ced7b0dabd7a907c8c5169ba24c0e72
|
e939c5830067671a217bbf6d878b8ba3481bcd32
|
refs/heads/master
| 2023-01-22T04:49:05.179410
| 2020-12-02T19:55:22
| 2020-12-02T19:55:22
| 259,371,654
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,053
|
py
|
#!/usr/bin/python
# Copyright: (c) 2018, Stormshield https://www.stormshield.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: sns_command
short_description: API client to configure Stormshield Network Security appliances
description:
This module executes configuration commands or scripts on the remote appliance.
Configuration API reference: https://documentation.stormshield.eu/SNS/v3/en/Content/CLI_Serverd_Commands_reference_Guide_v3/Introduction.htm
options:
script:
description:
- Configuration script to execute
expect_disconnect:
description
- Set to True if the script makes the remote server to disconnect (ie: install firmware update)
force_modify:
description
- Set to true to disconnect other administrator already connected with modify privilege.
appliance:
description:
- appliance connection's parameters (host, port, user, password, sslverifypeer, sslverifyhost, cabundle, usercert, proxy)
author:
- Remi Pauchet (@stormshield)
notes:
- This module requires python-SNS-API library
'''
EXAMPLES = '''
- name: Get appliance properties
sns_command:
script: "SYSTEM PROPERTY"
appliance:
host: myappliance.local
password: mypassword
delegate_to: localhost
- name: Update firmware with a local update file
sns_command:
script: |
SYSTEM UPDATE UPLOAD < /tmp/fwupd-SNS-3.7.1-amd64-M.maj
SYSTEM UPDATE ACTIVATE
expect_disconnect: True
appliance:
host: myappliance.local
password: mypassword
delegate_to: localhost
'''
RETURN = '''
ret:
description: last command return code
returned: changed
type: int
sample: 100
output:
description: script execution output
returned: changed
type: string
sample: |
> CONFIG NTP SERVER LIST
101 code=00a01000 msg="Begin" format="section_line"
[Result]
name=fr.pool.ntp.org keynum=none type=host
100 code=00a00100 msg="Ok"
> HELP
101 code=00a01000 msg="Begin" format="raw"
AUTH : User authentication
CHPWD : Return if it's necessary to update password or not
CONFIG : Firewall configuration functions
GLOBALADMIN : Global administration
HA : HA functions
HELP : Display available commands
LIST : Display the list of connected users, show user rights (Level) and rights for current session (SessionLevel).
LOG : Log related functions.Everywhere a timezone is needed, if not specified the command is treated with firewall timezone setting.
MODIFY : Get / lose the modify or the mon_write right
MONITOR : Monitor related functions
NOP : Do nothing but avoid disconnection from server.
PKI : show or update the pki
QUIT : Log off
REPORT : Handling of reports
SYSTEM : System commands
USER : User related functions
VERSION : Display server version
100 code=00a00100 msg="Ok"
result:
description: last command output
returned: changed
type: string
sample: |
101 code=00a01000 msg="Begin" format="section_line"
[Result]
name=ntp1.stormshieldcs.eu keynum=none type=host
name=ntp2.stormshieldcs.eu keynum=none type=host
100 code=00a00100 msg="Ok"
data:
description: last parsed command result
type: complex
sample: |
{'Result': [
{'name': 'ntp1.stormshieldcs.eu', 'keynum': 'none', 'type': 'host'},
{'name': 'ntp2.stormshieldcs.eu', 'keynum': 'none', 'type': 'host'}
]}
'''
import re
from stormshield.sns.sslclient import SSLClient
from ansible.module_utils.basic import AnsibleModule
def main():
module = AnsibleModule(
argument_spec={
"command": {"required": False, "type": "str"},
"script": {"required": False, "type": "str"},
"expect_disconnect": {"required": False, "type":"bool", "default":False},
"force_modify": {"required": False, "type":"bool", "default":False},
"appliance": {
"required": True, "type": "dict",
"options": {
"host": {"required": True, "type": "str"},
"ip": {"required": False, "type": "str"},
"port": {"required": False, "type": "int", "default": 443},
"user": {"required": False, "type": "str", "default": "admin"},
"password": {"required": False, "type": "str"},
"sslverifypeer": {"required": False, "type": "bool", "default": True},
"sslverifyhost": {"required": False, "type": "bool", "default": True},
"cabundle": {"required": False, "type": "str"},
"usercert": {"required": False, "type": "str"},
"proxy": {"required": False, "type": "str"},
}
}
}
)
EMPTY_RE = re.compile(r'^\s*$')
command = module.params['command']
script = module.params['script']
expect_disconnect = module.params['expect_disconnect']
force_modify = module.params['force_modify']
if command is None and script is None:
module.fail_json(msg="A command or a script is required")
if command is not None and script is not None:
module.fail_json(msg="Got both command and script")
try:
client = SSLClient(
host=module.params['appliance']['host'],
ip=module.params['appliance']['ip'],
port=module.params['appliance']['port'],
user=module.params['appliance']['user'],
password=module.params['appliance']['password'],
sslverifypeer=module.params['appliance']['sslverifypeer'],
sslverifyhost=module.params['appliance']['sslverifyhost'],
cabundle=module.params['appliance']['cabundle'],
usercert=module.params['appliance']['usercert'],
proxy=module.params['appliance']['proxy'],
autoconnect=False)
except Exception as exception:
module.fail_json(msg=str(exception))
try:
client.connect()
except Exception as exception:
module.fail_json(msg=str(exception))
if force_modify:
try:
response = client.send_command("MODIFY FORCE ON")
except Exception as exception:
client.disconnect()
module.fail_json(msg="Can't take Modify privilege: {}".format(str(exception)))
if response.ret >= 200:
client.disconnect()
module.fail_json(msg="Can't take Modify privilege", result=response.output,
data=response.parser.serialize_data(), ret=response.ret)
if command is not None:
# execute single command
try:
response = client.send_command(command)
except Exception as exception:
client.disconnect()
module.fail_json(msg=str(exception))
client.disconnect()
module.exit_json(changed=True, result=response.output,
data=response.parser.serialize_data(), ret=response.ret)
else:
# execute script
output = ""
success = True
need_reboot = False
for command in script.splitlines():
command = command.strip('\r\n')
output += command + "\n"
if command.startswith('#'):
continue
if EMPTY_RE.match(command):
continue
try:
response = client.send_command(command)
output += response.output + "\n"
if response.ret >= 200:
success = False
elif response.ret == client.SRV_RET_MUSTREBOOT:
need_reboot = True
except Exception as exception:
if expect_disconnect and str(exception) == "Server disconnected":
break
else:
client.disconnect()
module.fail_json(msg=str(exception), output=output, success=False, need_reboot = need_reboot)
client.disconnect()
if success:
module.exit_json(changed=True, output=output, success=True, need_reboot = need_reboot)
else:
module.fail_json(msg="Errors during the script execution", output=output, success=False, need_reboot = need_reboot)
if __name__ == '__main__':
main()
|
[
"remi.pauchet@stormshield.eu"
] |
remi.pauchet@stormshield.eu
|
ed52f2978ed7fd109f6361a4a2a3228b37559c87
|
09f09b393b8c909a22bbb84a21a50e007e35556e
|
/control.py
|
d576303e30aa86b271f597b6d2cd13a0a71b4a5d
|
[] |
no_license
|
Henry-Hwang/audio-tools
|
abd61a0cf865ebeba3c42c40493efc7d22e35ffe
|
b19aac94b94577ca1ef0951cbbd4f83ab08ab3d3
|
refs/heads/master
| 2020-03-22T21:58:54.831972
| 2018-08-19T15:48:47
| 2018-08-19T15:48:47
| 140,726,006
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 863
|
py
|
import os
import sys
import commands
import time
import argparse
import tparser
import tinycmd
from decimal import Decimal
class Codec(object):
name = ""
def __init__(self):
pass
def get_codecs(self):
cmdstr = "adb shell cat /d/asoc/codecs"
print cmdstr
result = os.popen(cmdstr)
ret = result.read()
codecs = ret.split('\n')
self.codecs = codecs
#asoc_t = Asoc()
for i in range(len(codecs)):
print self.codecs[i]
#result = os.popen("adb shell cat " + paths[i])
#ret = result.read()
#if (ret.strip() != "closed"):
# print ret
def get_snd_cards(self):
cmdstr = "adb shell cat /proc/asound/cards"
print cmdstr
result = os.popen(cmdstr)
ret = result.read()
snds = ret.split('\n')
self.snd_cards.append(snds[1].strip())
for i in range(len(self.snd_cards)):
print self.snd_cards[i]
def get_dais(self):
pass
|
[
"henry.huang@cirrus.com"
] |
henry.huang@cirrus.com
|
6db98282426351cd2e6b5f2fb0f62ec064600def
|
a0215172ddf6663916b15cea3879e5b0ebdfddd9
|
/scripts/yum_pkglist_from_ks.py
|
797dbaad45ebdaaae277b2bcf954298c6fd09579
|
[
"Apache-2.0"
] |
permissive
|
perfsonar/toolkit-building
|
fa502a9ec04ba951a75a82c5b02ae3db69651bca
|
aca87d58649d3bc95c5b6768bf307702331f2535
|
refs/heads/master
| 2022-06-14T03:36:20.681776
| 2020-10-30T20:05:29
| 2020-10-30T20:05:29
| 32,428,947
| 2
| 3
| null | 2016-07-22T12:59:48
| 2015-03-18T00:43:47
|
Shell
|
UTF-8
|
Python
| false
| false
| 4,393
|
py
|
#########################################################################################
# Name: yum_pkglist_from_ks.py
# Usage: python yum_pkglist_from_ks.py [options] kickstart outfile
# Description:
# This script takes a kickstart file, extracts the package list, and finds all the
# dependencies in the dependency tree for those packages. This list is then output to a
# file with each package name on a line. The output file can be passed to a tool such as
# yum downloader to download all the packages needed for the kickstart. This can be
# especially useful when building custom Linux ISOs.
########################################################################################
import yum
import optparse
import sys
################
# Setup CLI opts
################
parser = optparse.OptionParser(usage="python %prog [options] kickstart outfile")
parser.add_option('-i', '--installroot', dest="installroot", help="Install root for yum. Useful in chroot environments. Defaults to '/'.")
(options, args) = parser.parse_args()
if len(args) != 2:
parser.print_help()
kickstart_path = args[0]
outfile = args[1]
###################
# Parse Kickstart
###################
kickstart = open(kickstart_path)
found_packages = False
input_pkg_names = []
input_pkg_groups = []
for line in kickstart:
line = line.rstrip().lstrip()
if not line:
continue
elif line.startswith("#"):
continue
elif line.startswith("%end"):
break
elif found_packages:
if line.startswith("@"):
input_pkg_groups.append(line.replace("@", ""))
else:
input_pkg_names.append(line)
elif line.startswith("%packages"):
found_packages = True
###################
# Initialize yum
###################
yb = yum.YumBase()
yb.conf.assumeyes = True
if options.installroot:
yb.conf.installroot = options.installroot
############################
# Form initial package lists
############################
raw_pkg_names = {}
output_pkg_names = []
missing_pkg_names = []
pkg_names = input_pkg_names
##
# Expand package groups and add to inital package list
for input_pkg_group in input_pkg_groups:
g = yb.comps.return_group(input_pkg_group)
for p in g.packages:
if p not in pkg_names:
pkg_names.append(p)
############################
# Walk the dependency tree
############################
while pkg_names:
pkj_objs = []
while pkg_names:
pkg_name = pkg_names.pop()
##
# searchProvides allows us to look fo packages in lots of different forms
# e.g perl(LWP) or perl-LWP
results = yb.pkgSack.searchProvides(name=pkg_name)
if not results:
if pkg_name not in missing_pkg_names:
##
# if we didn't find it, may not be a big deal. make sure we mark
# as visited though so we don't loop forever
missing_pkg_names.append(pkg_name)
continue
for r in results:
# use r.name to normalize package name to what yum actually calls it
raw_pkg_names[r.name] = 1
##
# Add pkg_name to list so we can also make we track searches we've already done
# where a specific package name was not given. e.g perl(LWP) vs perl-LWP
output_pkg_names.append(pkg_name)
pkj_objs.append(results[0])
##
# For each package found go through the dependencies and find ones we haven't seen yet
deps = yb.findDeps(pkj_objs)
for parent_pkg in deps:
for dep in deps[parent_pkg]:
if (dep[0] not in output_pkg_names) and (dep[0] not in missing_pkg_names) and (dep[0] not in pkg_names):
pkg_names.append(dep[0])
################
# Output to file
################
fout = open(outfile, "w")
##
# Print out the package names as we saw them in kickstart and dependency list except for
# names like perl(LWP), libX.so, /usr/bin/python that yumdownloader won't take. This may
# be overkill and lead to some duplicates in the list, but ensures we get all we need
for r in output_pkg_names:
if r.startswith("/"):
continue
elif "." in r:
continue
elif "(" in r:
continue
fout.write("%s\n" % r)
##
# Print the nicely formatted package names
for r in raw_pkg_names:
fout.write("%s\n" % r)
fout.close()
|
[
"andy@es.net"
] |
andy@es.net
|
4f188c997813865f09a503591890d02c54d164c7
|
3db0c85f582dafd3b6c16543275953a2e22f1276
|
/graph.py
|
b186c22bf45d1d61126a1859b4493c139b526f10
|
[
"MIT"
] |
permissive
|
BartMassey/nb-misc
|
005ee943f962a05a6c340f8109792dd00b1de077
|
ec8f6fdba200fcb4816e170c1517899f1c03db04
|
refs/heads/master
| 2021-01-01T16:40:34.234512
| 2014-09-02T18:46:19
| 2014-09-02T18:46:19
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 487
|
py
|
# Copyright © 2014 Bart Massey
# Markable graph node class.
class Node(object):
def __init__(self, label):
self.label = label
self.marked = False
def clear_mark(self):
self.marked = False
def set_mark(self):
self.marked = True
def is_marked(self):
return self.marked
class Graph(object):
def __init__(self, nodes, edges, weights):
self.nodes = nodes
self.edges = edges
self.weights = weights
|
[
"bart@cs.pdx.edu"
] |
bart@cs.pdx.edu
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.