blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2 values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220 values | src_encoding stringclasses 30 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 2 10.3M | extension stringclasses 257 values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a8b5ba72724f1f79ee0c9186191a238c4b463315 | 27a4208c86693ea1da9abd485e69f5db89c9dcf9 | /tests/irteusgl9-29_slime2-22_color.py | 8c9f0c5764c6af10ffccd86fd7e914fbf0538957 | [
"BSD-3-Clause"
] | permissive | Affonso-Gui/euslime | b70eec1766680cfbbb6fd94e22d52eb3494c4002 | cc457db9b839073fe95d01e93325afbe1f8fa996 | refs/heads/devel | 2023-01-23T21:59:58.582923 | 2022-12-22T10:34:07 | 2022-12-22T10:34:07 | 481,096,158 | 0 | 0 | BSD-3-Clause | 2022-05-12T07:52:30 | 2022-04-13T06:19:20 | Python | UTF-8 | Python | false | false | 84 | py | from irteusgl import irteusgl
class irteusgl_color(irteusgl):
USE_COLOR = True
| [
"guilherme.c.affonso@gmail.com"
] | guilherme.c.affonso@gmail.com |
794f3abbe9fca363cc78d142151697bd2f6917bf | 9bf5d10a4644ebb4b788233cae2e82ad9515d433 | /Bank/settings.py | c7b4ff3d587217be41a204660a47977d685de06f | [] | no_license | Mohitkashyap123/Bank | 2ea3d808406d1fa06a7a0e9e2a005d45dc53810f | 8f6d3b509a6945ba7eaecedc0fe5287bc769c8f6 | refs/heads/master | 2022-12-17T03:39:29.581970 | 2020-09-26T07:04:53 | 2020-09-26T07:04:53 | 276,882,798 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,121 | py | """
Django settings for Bank project.
Generated by 'django-admin startproject' using Django 3.0.6.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'x^1e=5%0gik&6=5c!9a$%7t^dztyz-p0^n+*&qv7!b30h+20w!'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'info.apps.InfoConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'Bank.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Bank.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
| [
"mohitkashyap656@gmail.com"
] | mohitkashyap656@gmail.com |
4fe043e3715e5b8438e6d887a92c5386dfd3f625 | 2044867096d52f2737102681dbb1baf6b05eeb97 | /Robot sense Localization.py | 256c0425096c8aaeed3e4ba1a4add37ba9af80cc | [] | no_license | ahmedfarid98/Robot_Localization | 4a42fbb81d83cdcca69a5d47ed2e8b1634343d81 | 4b618eef94e9e7dbb4785feb3dbfc4a34da3d791 | refs/heads/master | 2022-11-15T14:28:22.447170 | 2020-07-13T00:12:20 | 2020-07-13T00:12:20 | 277,865,945 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,555 | py | ############################### Robot Localization #########################
################# Example 1 ####################
#Modify the code below so that the function sense, which takes p and Z as inputs, will output the NON-normalized probability distribution, q,
#after multiplying the entries in p by pHit or pMiss according to the color in the corresponding cell in world.
#### Then Normalize the values in q and get the final posterior Probability.
p=[0.2, 0.2, 0.2, 0.2, 0.2] ### prior probab. are equal at the begining before any sense or measurement
world=['green', 'red', 'red', 'green', 'green']
Z='red'
pHit = 0.6 ## if robot sense red multiply it's probab. by 0.6
pMiss = 0.2 ## if robot sense green multiply it's probab. by 0.2
def sense(p, Z):
q=[]
for i in range(len(p)):
hit = (Z == world[i]) ### flag set to be used in the next line
# this flag = 1 if color is red & flag=0 if color is green
q.append(p[i]*(hit*pHit + (1-hit)*pMiss)) ## Nice Logic code to give us the posterior probability
# To normalize probabilities
s=sum(q)
for i in range(len(p)):
q[i]=q[i]/s
return q
print(sense(p,Z))
############################
############## Example 2 ####################
''' For Multiple Measurement
#measurements = ['red' , 'green']
for k in range(len(measurements)):
p=sense(p,measurements[k])
print(p)
'''
#####################################################################
| [
"noreply@github.com"
] | ahmedfarid98.noreply@github.com |
bce6368fc8a866dd4bff9c0a271687bdaea848c1 | 5e014f95b49f376b34d20760c41f09bdca094247 | /flask_ide/auth/models.py | 2fe1fcdca8701cfe3cf45972adb5b95603c108eb | [] | no_license | jstacoder/flask-ide | 34ae304c211c7b263f37b2fcf0660ae76053c0a2 | 3890756c094b4b7872bad7d915e764e3e32dcb2d | refs/heads/master | 2023-02-12T11:22:24.412680 | 2020-07-20T17:21:55 | 2020-07-20T17:21:55 | 29,079,246 | 50 | 10 | null | 2023-02-02T07:17:40 | 2015-01-11T02:51:35 | JavaScript | UTF-8 | Python | false | false | 3,478 | py | from flask_xxl.basemodels import BaseMixin
from flask import url_for
from LoginUtils import encrypt_password, check_password
from sqlalchemy.ext.declarative import declared_attr
#import sqlalchemy to global namespace
from sqlalchemy import (
UnicodeText,func,Enum,UniqueConstraint,DateTime,Text,Column,Integer,
ForeignKey,Boolean,String,Table
)
from sqlalchemy.orm import relationship, backref
class UnknownUser(object):
is_unknown = True
class Role(BaseMixin):
__tablename__ = 'roles'
name = Column(String(255))
can_view = Column(Boolean,default=True,nullable=False)
can_add = Column(Boolean,default=False,nullable=False)
can_edit = Column(Boolean,default=False,nullable=False)
can_delete = Column(Boolean,default=False,nullable=False)
class User(BaseMixin):
__tablename__ = 'users'
first_name = Column(String(255),default="")
last_name = Column(String(255),default="")
email = Column(String(255),nullable=False,unique=True)
role_id = Column(Integer,ForeignKey('roles.id'))
role = relationship('Role',backref=backref(
'users',lazy='dynamic'))
add_date = Column(DateTime,default=func.now())
_pw_hash = Column(UnicodeText,nullable=False)
age = Column(Integer)
def __init__(self,*args,**kwargs):
if 'first_name' in kwargs:
self.first_name = kwargs.pop('first_name')
if 'last_name' in kwargs:
self.last_name = kwargs.pop('last_name')
if 'email' in kwargs:
self.email = kwargs.pop('email')
if 'role' in kwargs:
self.role = kwargs.pop('role')
if 'role_id' in kwargs:
self.role_id = kwargs.pop('role_id')
if 'password' in kwargs:
self.password = kwargs.pop('password')
def _to_json(self):
import json
return json.dumps(
{
'first_name':self.first_name,
'last_name':self.last_name,
'email':self.email,
'age':self.age,
'date_added':self.add_date,
}
)
@declared_attr
def __table_args__(cls):
return (UniqueConstraint('email','first_name','last_name'),{})
@property
def is_unknown(self):
return False
def check_password(self, pw):
return check_password(pw,self._pw_hash)
@classmethod
def get_by_email(cls, email):
return cls.query().filter_by(email=email).first()
@property
def password(self):
return 'private'
raise ValueError('Private Value!!!!')
@password.setter
def password(self,pw):
self._pw_hash = encrypt_password(pw)
@property
def full_name(self):
return '{} {}'.format(self.first_name.title(),self.last_name.title())
@property
def name(self):
return str(self.first_name)
def __str__(self):
if self.first_name != "":
rtn = self.full_name
else:
rtn = self.email
return rtn
def __repr__(self):
return 'User<{} {}'.format(self.email,self.first_name)
def _get_absolute_url(self):
return url_for('member.profile',member_id=str(int(self.id)))
@property
def absolute_url(self):
return str(self._get_absolute_url())
def _get_edit_url(self):
return '#'
@property
def edit_url(self):
return str(self._get_edit_url())
| [
"kyle@level2designs.com"
] | kyle@level2designs.com |
87c7524501017490341a86012b5d7364f04aacde | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_54/78.py | 1e0afea1344679e1079ae74d8bb54a891e5ad167 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 956 | py | def gcd(a,b):
while (b != 0):
c = a%b
a = b
b = c
return a
def get_gcd(line):
g = line[0]
cnt = len(line)
for i in range(1,cnt):
g = gcd(g,line[i])
return g
def solve(line):
N = int(line.pop(0))
for i in range(0,N):
line[i] = int(line[i])
line.sort()
diffs = list()
for i in range(0,N-1):
diff = line[i+1] - line[i]
diffs.append(diff)
g = pg = get_gcd(diffs)
if g < line[0]:
g = line[0] / pg * pg
if line[0] % pg != 0:
g += pg
ans = g - line[0]
return ans
AnsT = ""
myfile = open("B.in")
T = int(myfile.readline())
for i in range(0,T):
line = myfile.readline()
line = line.split("\n")
print i
ans = solve(line[0].split(" "))
AnsT = AnsT + "Case #"+ str(i+1) +": "+str(ans) + "\n"
outfile = open("B.out","w")
outfile.write(AnsT)
outfile.close()
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
9b041b73b4058ed94e12ca2a03153ad4b7767547 | 3f911aca38f91e56890f5034b31ed81edb31b000 | /protein/FDR 구현실습/test.py | 2f29ade236cf8f8f9d1525d161b6fe892a63d725 | [] | no_license | sochic2/kis | 5dd83fd474176981f49cde967f49763405ed27b3 | 3ab07710c987110224b3fad0cb1ce3a0d6df6d1a | refs/heads/master | 2022-11-07T00:58:37.427148 | 2020-06-18T14:37:01 | 2020-06-18T14:37:01 | 257,481,038 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 33 | py | a = 'abcdefg'
b = a[0:3]
print(b) | [
"netzzang12@gmail.com"
] | netzzang12@gmail.com |
ff1fa756c47a62759082ec1444f5f81912834726 | 7d81a7bba996c4257e47f039ab2524be0fe4bea4 | /theawesomeprice/first/MyAwesomeSite/PageScrape/migrations/0016_auto_20190612_2155.py | b009a44ddaf20d15af55d6720da5f461bd5ef174 | [] | no_license | KushSondhi/theawesomeprice | 38a43f9586a69636446de0b5d055db7a657073d9 | cddc36bf8a64db14dd801865b1c4a61f38c780c6 | refs/heads/master | 2022-11-05T14:08:27.967107 | 2020-07-05T00:30:35 | 2020-07-05T00:30:35 | 269,604,086 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 621 | py | # Generated by Django 2.2b1 on 2019-06-12 16:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('PageScrape', '0015_auto_20190612_2153'),
]
operations = [
migrations.AlterField(
model_name='shopclues',
name='prod_specs_left',
field=models.CharField(default=None, max_length=8000, null=True),
),
migrations.AlterField(
model_name='shopclues',
name='prod_specs_right',
field=models.CharField(default=None, max_length=8000, null=True),
),
]
| [
"root@Kush.theawesomeprice.tk"
] | root@Kush.theawesomeprice.tk |
c1d9e970d2aaa6133afb7698b6b7cd8027825d57 | bdc14f0cd470219fd62bd94d2025cccf553fd905 | /news/serializers.py | a3f7eeaca3c237ade0efe48adc55640633e6c5f6 | [] | no_license | mhsniranmanesh/sporthub-core | b12fb4f4dc180b7fb6b3808c488952623c9364ff | 57ef2caead9e7346a6b02e0d7df0f9b0bb8e5d2e | refs/heads/master | 2020-04-18T07:41:12.531413 | 2019-01-27T20:37:11 | 2019-01-27T20:37:11 | 167,367,077 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 385 | py | from rest_framework import serializers
from news.models import News, NewsTag
class TagSerializer(serializers.ModelSerializer):
class Meta:
model = NewsTag
fields = ['name']
class NewsGetRecentSerializer(serializers.ModelSerializer):
tag = TagSerializer()
class Meta:
model = News
fields = ('uuid', 'title', 'body', 'tag','date_created') | [
"mhsn.iranmanesh@gmail.com"
] | mhsn.iranmanesh@gmail.com |
d5b659372a216b999b788a1e5dbe6d3852e2a1f3 | 474525154a4e1d48ef5242d1f44164d05399b145 | /tensorflow_probability/python/experimental/distributions/mvn_precision_factor_linop_test.py | 47676d4d6f31be7ebf0b5ac98d233982286579c7 | [
"Apache-2.0"
] | permissive | svshivapuja/probability | 9855737790f74a39169688fbfec9671deef804d9 | af7ccb22d972329633530c3b754ed1f49472f6a7 | refs/heads/main | 2023-07-17T04:14:53.703622 | 2021-08-30T17:47:06 | 2021-08-30T17:47:06 | 400,983,015 | 1 | 0 | Apache-2.0 | 2021-08-29T07:51:29 | 2021-08-29T07:51:29 | null | UTF-8 | Python | false | false | 8,157 | py | # Copyright 2020 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for tensorflow_probability.python.experimental.distributions.mvn_precision_factor_linop."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
import tensorflow.compat.v2 as tf
import tensorflow_probability as tfp
from tensorflow_probability.python.internal import prefer_static as ps
from tensorflow_probability.python.internal import test_combinations
from tensorflow_probability.python.internal import test_util
tfd = tfp.distributions
tfd_e = tfp.experimental.distributions
@test_util.test_all_tf_execution_regimes
class MVNPrecisionFactorLinOpTest(test_util.TestCase):
def _random_constant_spd_linop(
self,
event_size,
batch_shape=(),
conditioning=1.2,
dtype=np.float32,
):
"""Randomly generate a constant SPD LinearOperator."""
# The larger conditioning is, the better posed the matrix is.
# With conditioning = 1, it will be on the edge of singular, and likely
# numerically singular if event_size is large enough.
# Conditioning on the small side is best, since then the matrix is not so
# diagonally dominant, and we therefore test use of transpositions better.
assert conditioning >= 1
scale_wishart = tfd.WishartLinearOperator(
df=dtype(conditioning * event_size),
scale=tf.linalg.LinearOperatorIdentity(event_size, dtype=dtype),
input_output_cholesky=False,
)
# Make sure to evaluate here. This ensures that the linear operator is a
# constant rather than a random operator.
matrix = self.evaluate(
scale_wishart.sample(batch_shape, seed=test_util.test_seed()))
return tf.linalg.LinearOperatorFullMatrix(
matrix, is_positive_definite=True, is_self_adjoint=True)
@test_combinations.generate(
test_combinations.combine(
use_loc=[True, False],
use_precision=[True, False],
event_size=[3],
batch_shape=[(), (2,)],
n_samples=[5000],
dtype=[np.float32, np.float64],
),
)
def test_log_prob_and_sample(
self,
use_loc,
use_precision,
event_size,
batch_shape,
dtype,
n_samples,
):
cov = self._random_constant_spd_linop(
event_size, batch_shape=batch_shape, dtype=dtype)
precision = cov.inverse()
precision_factor = precision.cholesky()
# Make sure to evaluate here, else you'll have a random loc vector!
if use_loc:
loc = self.evaluate(
tf.random.normal(
batch_shape + (event_size,),
dtype=dtype,
seed=test_util.test_seed()))
else:
loc = None
mvn_scale = tfd.MultivariateNormalTriL(
loc=loc, scale_tril=cov.cholesky().to_dense())
mvn_precision = tfd_e.MultivariateNormalPrecisionFactorLinearOperator(
loc=loc,
precision_factor=precision_factor,
precision=precision if use_precision else None,
)
point = tf.random.normal(
batch_shape + (event_size,), dtype=dtype, seed=test_util.test_seed())
mvn_scale_log_prob, mvn_precision_log_prob = self.evaluate(
[mvn_scale.log_prob(point),
mvn_precision.log_prob(point)])
self.assertAllClose(
mvn_scale_log_prob, mvn_precision_log_prob, atol=5e-4, rtol=5e-4)
batch_point = tf.random.normal(
(2,) + batch_shape + (event_size,),
dtype=dtype,
seed=test_util.test_seed())
mvn_scale_log_prob, mvn_precision_log_prob = self.evaluate(
[mvn_scale.log_prob(batch_point),
mvn_precision.log_prob(batch_point)])
self.assertAllClose(
mvn_scale_log_prob, mvn_precision_log_prob, atol=5e-4, rtol=5e-4)
samples = mvn_precision.sample(n_samples, seed=test_util.test_seed())
arrs = self.evaluate({
'stddev': tf.sqrt(cov.diag_part()),
'var': cov.diag_part(),
'cov': cov.to_dense(),
'sample_mean': tf.reduce_mean(samples, axis=0),
'sample_var': tfp.stats.variance(samples, sample_axis=0),
'sample_cov': tfp.stats.covariance(samples, sample_axis=0),
})
self.assertAllClose(
arrs['sample_mean'],
loc if loc is not None else np.zeros_like(arrs['cov'][..., 0]),
atol=5 * np.max(arrs['stddev']) / np.sqrt(n_samples))
self.assertAllClose(
arrs['sample_var'],
arrs['var'],
atol=5 * np.sqrt(2) * np.max(arrs['var']) / np.sqrt(n_samples))
self.assertAllClose(
arrs['sample_cov'],
arrs['cov'],
atol=5 * np.sqrt(2) * np.max(arrs['var']) / np.sqrt(n_samples))
def test_dynamic_shape(self):
x = tf.Variable(ps.ones([7, 3]), shape=[7, None])
self.evaluate(x.initializer)
# Check that the shape is actually `None`.
if not tf.executing_eagerly():
last_shape = x.shape[-1]
if last_shape is not None: # This is a `tf.Dimension` in tf1.
last_shape = last_shape.value
self.assertIsNone(last_shape)
dynamic_dist = tfd_e.MultivariateNormalPrecisionFactorLinearOperator(
precision_factor=tf.linalg.LinearOperatorDiag(tf.ones_like(x)))
static_dist = tfd_e.MultivariateNormalPrecisionFactorLinearOperator(
precision_factor=tf.linalg.LinearOperatorDiag(tf.ones([7, 3])))
in_ = tf.zeros([7, 3])
self.assertAllClose(self.evaluate(dynamic_dist.log_prob(in_)),
static_dist.log_prob(in_))
@test_combinations.generate(
test_combinations.combine(
batch_shape=[(), (2,)],
dtype=[np.float32, np.float64],
),
)
def test_mean_and_mode(self, batch_shape, dtype):
event_size = 3
cov = self._random_constant_spd_linop(
event_size, batch_shape=batch_shape, dtype=dtype)
precision_factor = cov.inverse().cholesky()
# Make sure to evaluate here, else you'll have a random loc vector!
loc = self.evaluate(
tf.random.normal(
batch_shape + (event_size,),
dtype=dtype,
seed=test_util.test_seed()))
mvn_precision = tfd_e.MultivariateNormalPrecisionFactorLinearOperator(
loc=loc,
precision_factor=precision_factor)
self.assertAllClose(mvn_precision.mean(), loc)
self.assertAllClose(mvn_precision.mode(), loc)
@test_combinations.generate(
test_combinations.combine(
batch_shape=[(), (2,)],
use_precision=[True, False],
dtype=[np.float32, np.float64],
),
)
def test_cov_var_stddev(self, batch_shape, use_precision, dtype):
event_size = 3
cov = self._random_constant_spd_linop(
event_size, batch_shape=batch_shape, dtype=dtype)
precision = cov.inverse()
precision_factor = precision.cholesky()
# Make sure to evaluate here, else you'll have a random loc vector!
loc = self.evaluate(
tf.random.normal(
batch_shape + (event_size,),
dtype=dtype,
seed=test_util.test_seed()))
mvn_precision = tfd_e.MultivariateNormalPrecisionFactorLinearOperator(
loc=loc,
precision_factor=precision_factor,
precision=precision if use_precision else None)
self.assertAllClose(mvn_precision.covariance(), cov.to_dense(), atol=1e-4)
self.assertAllClose(mvn_precision.variance(), cov.diag_part(), atol=1e-4)
self.assertAllClose(mvn_precision.stddev(), tf.sqrt(cov.diag_part()),
atol=1e-5)
if __name__ == '__main__':
test_util.main()
| [
"gardener@tensorflow.org"
] | gardener@tensorflow.org |
767c0f1bf81724fc490d700d2e61919694707e07 | 823dd69093200d01995c4067ed1ec87194246d40 | /tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py | 8d356f570ac7130a9f2e26e6ec371238fe0143bf | [
"Apache-2.0"
] | permissive | plamut/python-bigquery-reservation | 910c0a5bf70f82968f3db91f3ef1d18270a84548 | 27b256440b2565369c900cd4728e38676f82fcfe | refs/heads/master | 2023-07-15T05:17:15.137418 | 2021-08-13T15:28:12 | 2021-08-13T15:28:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 238,549 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import packaging.version
import grpc
from grpc.experimental import aio
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.bigquery_reservation_v1.services.reservation_service import (
ReservationServiceAsyncClient,
)
from google.cloud.bigquery_reservation_v1.services.reservation_service import (
ReservationServiceClient,
)
from google.cloud.bigquery_reservation_v1.services.reservation_service import pagers
from google.cloud.bigquery_reservation_v1.services.reservation_service import transports
from google.cloud.bigquery_reservation_v1.services.reservation_service.transports.base import (
_GOOGLE_AUTH_VERSION,
)
from google.cloud.bigquery_reservation_v1.types import reservation
from google.cloud.bigquery_reservation_v1.types import reservation as gcbr_reservation
from google.oauth2 import service_account
from google.protobuf import any_pb2 # type: ignore
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
from google.rpc import status_pb2 # type: ignore
import google.auth
# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively
# through google-api-core:
# - Delete the auth "less than" test cases
# - Delete these pytest markers (Make the "greater than or equal to" tests the default).
requires_google_auth_lt_1_25_0 = pytest.mark.skipif(
packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"),
reason="This test requires google-auth < 1.25.0",
)
requires_google_auth_gte_1_25_0 = pytest.mark.skipif(
packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"),
reason="This test requires google-auth >= 1.25.0",
)
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert ReservationServiceClient._get_default_mtls_endpoint(None) is None
assert (
ReservationServiceClient._get_default_mtls_endpoint(api_endpoint)
== api_mtls_endpoint
)
assert (
ReservationServiceClient._get_default_mtls_endpoint(api_mtls_endpoint)
== api_mtls_endpoint
)
assert (
ReservationServiceClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
ReservationServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
assert (
ReservationServiceClient._get_default_mtls_endpoint(non_googleapi)
== non_googleapi
)
@pytest.mark.parametrize(
"client_class", [ReservationServiceClient, ReservationServiceAsyncClient,]
)
def test_reservation_service_client_from_service_account_info(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "bigqueryreservation.googleapis.com:443"
@pytest.mark.parametrize(
"transport_class,transport_name",
[
(transports.ReservationServiceGrpcTransport, "grpc"),
(transports.ReservationServiceGrpcAsyncIOTransport, "grpc_asyncio"),
],
)
def test_reservation_service_client_service_account_always_use_jwt(
transport_class, transport_name
):
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=True)
use_jwt.assert_called_once_with(True)
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=False)
use_jwt.assert_not_called()
@pytest.mark.parametrize(
"client_class", [ReservationServiceClient, ReservationServiceAsyncClient,]
)
def test_reservation_service_client_from_service_account_file(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "bigqueryreservation.googleapis.com:443"
def test_reservation_service_client_get_transport_class():
transport = ReservationServiceClient.get_transport_class()
available_transports = [
transports.ReservationServiceGrpcTransport,
]
assert transport in available_transports
transport = ReservationServiceClient.get_transport_class("grpc")
assert transport == transports.ReservationServiceGrpcTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(ReservationServiceClient, transports.ReservationServiceGrpcTransport, "grpc"),
(
ReservationServiceAsyncClient,
transports.ReservationServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
@mock.patch.object(
ReservationServiceClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(ReservationServiceClient),
)
@mock.patch.object(
ReservationServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(ReservationServiceAsyncClient),
)
def test_reservation_service_client_client_options(
client_class, transport_class, transport_name
):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(ReservationServiceClient, "get_transport_class") as gtc:
transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(ReservationServiceClient, "get_transport_class") as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class()
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
with pytest.raises(ValueError):
client = client_class()
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
(
ReservationServiceClient,
transports.ReservationServiceGrpcTransport,
"grpc",
"true",
),
(
ReservationServiceAsyncClient,
transports.ReservationServiceGrpcAsyncIOTransport,
"grpc_asyncio",
"true",
),
(
ReservationServiceClient,
transports.ReservationServiceGrpcTransport,
"grpc",
"false",
),
(
ReservationServiceAsyncClient,
transports.ReservationServiceGrpcAsyncIOTransport,
"grpc_asyncio",
"false",
),
],
)
@mock.patch.object(
ReservationServiceClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(ReservationServiceClient),
)
@mock.patch.object(
ReservationServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(ReservationServiceAsyncClient),
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_reservation_service_client_mtls_env_auto(
client_class, transport_class, transport_name, use_client_cert_env
):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(ReservationServiceClient, transports.ReservationServiceGrpcTransport, "grpc"),
(
ReservationServiceAsyncClient,
transports.ReservationServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_reservation_service_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(scopes=["1", "2"],)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(ReservationServiceClient, transports.ReservationServiceGrpcTransport, "grpc"),
(
ReservationServiceAsyncClient,
transports.ReservationServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_reservation_service_client_client_options_credentials_file(
client_class, transport_class, transport_name
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
def test_reservation_service_client_client_options_from_dict():
with mock.patch(
"google.cloud.bigquery_reservation_v1.services.reservation_service.transports.ReservationServiceGrpcTransport.__init__"
) as grpc_transport:
grpc_transport.return_value = None
client = ReservationServiceClient(
client_options={"api_endpoint": "squid.clam.whelk"}
)
grpc_transport.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
def test_create_reservation(
transport: str = "grpc", request_type=gcbr_reservation.CreateReservationRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcbr_reservation.Reservation(
name="name_value", slot_capacity=1391, ignore_idle_slots=True,
)
response = client.create_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == gcbr_reservation.CreateReservationRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gcbr_reservation.Reservation)
assert response.name == "name_value"
assert response.slot_capacity == 1391
assert response.ignore_idle_slots is True
def test_create_reservation_from_dict():
test_create_reservation(request_type=dict)
def test_create_reservation_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_reservation), "__call__"
) as call:
client.create_reservation()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == gcbr_reservation.CreateReservationRequest()
@pytest.mark.asyncio
async def test_create_reservation_async(
transport: str = "grpc_asyncio",
request_type=gcbr_reservation.CreateReservationRequest,
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gcbr_reservation.Reservation(
name="name_value", slot_capacity=1391, ignore_idle_slots=True,
)
)
response = await client.create_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == gcbr_reservation.CreateReservationRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gcbr_reservation.Reservation)
assert response.name == "name_value"
assert response.slot_capacity == 1391
assert response.ignore_idle_slots is True
@pytest.mark.asyncio
async def test_create_reservation_async_from_dict():
await test_create_reservation_async(request_type=dict)
def test_create_reservation_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = gcbr_reservation.CreateReservationRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_reservation), "__call__"
) as call:
call.return_value = gcbr_reservation.Reservation()
client.create_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_reservation_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = gcbr_reservation.CreateReservationRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_reservation), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gcbr_reservation.Reservation()
)
await client.create_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_create_reservation_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcbr_reservation.Reservation()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_reservation(
parent="parent_value",
reservation=gcbr_reservation.Reservation(name="name_value"),
reservation_id="reservation_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].reservation == gcbr_reservation.Reservation(name="name_value")
assert args[0].reservation_id == "reservation_id_value"
def test_create_reservation_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_reservation(
gcbr_reservation.CreateReservationRequest(),
parent="parent_value",
reservation=gcbr_reservation.Reservation(name="name_value"),
reservation_id="reservation_id_value",
)
@pytest.mark.asyncio
async def test_create_reservation_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcbr_reservation.Reservation()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gcbr_reservation.Reservation()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_reservation(
parent="parent_value",
reservation=gcbr_reservation.Reservation(name="name_value"),
reservation_id="reservation_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].reservation == gcbr_reservation.Reservation(name="name_value")
assert args[0].reservation_id == "reservation_id_value"
@pytest.mark.asyncio
async def test_create_reservation_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_reservation(
gcbr_reservation.CreateReservationRequest(),
parent="parent_value",
reservation=gcbr_reservation.Reservation(name="name_value"),
reservation_id="reservation_id_value",
)
def test_list_reservations(
transport: str = "grpc", request_type=reservation.ListReservationsRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_reservations), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.ListReservationsResponse(
next_page_token="next_page_token_value",
)
response = client.list_reservations(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.ListReservationsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListReservationsPager)
assert response.next_page_token == "next_page_token_value"
def test_list_reservations_from_dict():
test_list_reservations(request_type=dict)
def test_list_reservations_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_reservations), "__call__"
) as call:
client.list_reservations()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.ListReservationsRequest()
@pytest.mark.asyncio
async def test_list_reservations_async(
transport: str = "grpc_asyncio", request_type=reservation.ListReservationsRequest
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_reservations), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.ListReservationsResponse(
next_page_token="next_page_token_value",
)
)
response = await client.list_reservations(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.ListReservationsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListReservationsAsyncPager)
assert response.next_page_token == "next_page_token_value"
@pytest.mark.asyncio
async def test_list_reservations_async_from_dict():
await test_list_reservations_async(request_type=dict)
def test_list_reservations_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.ListReservationsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_reservations), "__call__"
) as call:
call.return_value = reservation.ListReservationsResponse()
client.list_reservations(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_reservations_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.ListReservationsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_reservations), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.ListReservationsResponse()
)
await client.list_reservations(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_list_reservations_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_reservations), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.ListReservationsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_reservations(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
def test_list_reservations_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_reservations(
reservation.ListReservationsRequest(), parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_reservations_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_reservations), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.ListReservationsResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.ListReservationsResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_reservations(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
@pytest.mark.asyncio
async def test_list_reservations_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_reservations(
reservation.ListReservationsRequest(), parent="parent_value",
)
def test_list_reservations_pager():
client = ReservationServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_reservations), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.ListReservationsResponse(
reservations=[
reservation.Reservation(),
reservation.Reservation(),
reservation.Reservation(),
],
next_page_token="abc",
),
reservation.ListReservationsResponse(
reservations=[], next_page_token="def",
),
reservation.ListReservationsResponse(
reservations=[reservation.Reservation(),], next_page_token="ghi",
),
reservation.ListReservationsResponse(
reservations=[reservation.Reservation(), reservation.Reservation(),],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.list_reservations(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, reservation.Reservation) for i in results)
def test_list_reservations_pages():
client = ReservationServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_reservations), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.ListReservationsResponse(
reservations=[
reservation.Reservation(),
reservation.Reservation(),
reservation.Reservation(),
],
next_page_token="abc",
),
reservation.ListReservationsResponse(
reservations=[], next_page_token="def",
),
reservation.ListReservationsResponse(
reservations=[reservation.Reservation(),], next_page_token="ghi",
),
reservation.ListReservationsResponse(
reservations=[reservation.Reservation(), reservation.Reservation(),],
),
RuntimeError,
)
pages = list(client.list_reservations(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_reservations_async_pager():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_reservations),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.ListReservationsResponse(
reservations=[
reservation.Reservation(),
reservation.Reservation(),
reservation.Reservation(),
],
next_page_token="abc",
),
reservation.ListReservationsResponse(
reservations=[], next_page_token="def",
),
reservation.ListReservationsResponse(
reservations=[reservation.Reservation(),], next_page_token="ghi",
),
reservation.ListReservationsResponse(
reservations=[reservation.Reservation(), reservation.Reservation(),],
),
RuntimeError,
)
async_pager = await client.list_reservations(request={},)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, reservation.Reservation) for i in responses)
@pytest.mark.asyncio
async def test_list_reservations_async_pages():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_reservations),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.ListReservationsResponse(
reservations=[
reservation.Reservation(),
reservation.Reservation(),
reservation.Reservation(),
],
next_page_token="abc",
),
reservation.ListReservationsResponse(
reservations=[], next_page_token="def",
),
reservation.ListReservationsResponse(
reservations=[reservation.Reservation(),], next_page_token="ghi",
),
reservation.ListReservationsResponse(
reservations=[reservation.Reservation(), reservation.Reservation(),],
),
RuntimeError,
)
pages = []
async for page_ in (await client.list_reservations(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
def test_get_reservation(
transport: str = "grpc", request_type=reservation.GetReservationRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_reservation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.Reservation(
name="name_value", slot_capacity=1391, ignore_idle_slots=True,
)
response = client.get_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.GetReservationRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.Reservation)
assert response.name == "name_value"
assert response.slot_capacity == 1391
assert response.ignore_idle_slots is True
def test_get_reservation_from_dict():
test_get_reservation(request_type=dict)
def test_get_reservation_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_reservation), "__call__") as call:
client.get_reservation()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.GetReservationRequest()
@pytest.mark.asyncio
async def test_get_reservation_async(
transport: str = "grpc_asyncio", request_type=reservation.GetReservationRequest
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_reservation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.Reservation(
name="name_value", slot_capacity=1391, ignore_idle_slots=True,
)
)
response = await client.get_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.GetReservationRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.Reservation)
assert response.name == "name_value"
assert response.slot_capacity == 1391
assert response.ignore_idle_slots is True
@pytest.mark.asyncio
async def test_get_reservation_async_from_dict():
await test_get_reservation_async(request_type=dict)
def test_get_reservation_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.GetReservationRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_reservation), "__call__") as call:
call.return_value = reservation.Reservation()
client.get_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_reservation_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.GetReservationRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_reservation), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.Reservation()
)
await client.get_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_get_reservation_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_reservation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.Reservation()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_reservation(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_get_reservation_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_reservation(
reservation.GetReservationRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_get_reservation_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_reservation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.Reservation()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.Reservation()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_reservation(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_get_reservation_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_reservation(
reservation.GetReservationRequest(), name="name_value",
)
def test_delete_reservation(
transport: str = "grpc", request_type=reservation.DeleteReservationRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
response = client.delete_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.DeleteReservationRequest()
# Establish that the response is the type that we expect.
assert response is None
def test_delete_reservation_from_dict():
test_delete_reservation(request_type=dict)
def test_delete_reservation_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_reservation), "__call__"
) as call:
client.delete_reservation()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.DeleteReservationRequest()
@pytest.mark.asyncio
async def test_delete_reservation_async(
transport: str = "grpc_asyncio", request_type=reservation.DeleteReservationRequest
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
response = await client.delete_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.DeleteReservationRequest()
# Establish that the response is the type that we expect.
assert response is None
@pytest.mark.asyncio
async def test_delete_reservation_async_from_dict():
await test_delete_reservation_async(request_type=dict)
def test_delete_reservation_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.DeleteReservationRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_reservation), "__call__"
) as call:
call.return_value = None
client.delete_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_reservation_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.DeleteReservationRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_reservation), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_delete_reservation_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_reservation(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_delete_reservation_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_reservation(
reservation.DeleteReservationRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_delete_reservation_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_reservation(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_delete_reservation_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_reservation(
reservation.DeleteReservationRequest(), name="name_value",
)
def test_update_reservation(
transport: str = "grpc", request_type=gcbr_reservation.UpdateReservationRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcbr_reservation.Reservation(
name="name_value", slot_capacity=1391, ignore_idle_slots=True,
)
response = client.update_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == gcbr_reservation.UpdateReservationRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gcbr_reservation.Reservation)
assert response.name == "name_value"
assert response.slot_capacity == 1391
assert response.ignore_idle_slots is True
def test_update_reservation_from_dict():
test_update_reservation(request_type=dict)
def test_update_reservation_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_reservation), "__call__"
) as call:
client.update_reservation()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == gcbr_reservation.UpdateReservationRequest()
@pytest.mark.asyncio
async def test_update_reservation_async(
transport: str = "grpc_asyncio",
request_type=gcbr_reservation.UpdateReservationRequest,
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gcbr_reservation.Reservation(
name="name_value", slot_capacity=1391, ignore_idle_slots=True,
)
)
response = await client.update_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == gcbr_reservation.UpdateReservationRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gcbr_reservation.Reservation)
assert response.name == "name_value"
assert response.slot_capacity == 1391
assert response.ignore_idle_slots is True
@pytest.mark.asyncio
async def test_update_reservation_async_from_dict():
await test_update_reservation_async(request_type=dict)
def test_update_reservation_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = gcbr_reservation.UpdateReservationRequest()
request.reservation.name = "reservation.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_reservation), "__call__"
) as call:
call.return_value = gcbr_reservation.Reservation()
client.update_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "reservation.name=reservation.name/value",) in kw[
"metadata"
]
@pytest.mark.asyncio
async def test_update_reservation_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = gcbr_reservation.UpdateReservationRequest()
request.reservation.name = "reservation.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_reservation), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gcbr_reservation.Reservation()
)
await client.update_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "reservation.name=reservation.name/value",) in kw[
"metadata"
]
def test_update_reservation_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcbr_reservation.Reservation()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.update_reservation(
reservation=gcbr_reservation.Reservation(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].reservation == gcbr_reservation.Reservation(name="name_value")
assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"])
def test_update_reservation_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_reservation(
gcbr_reservation.UpdateReservationRequest(),
reservation=gcbr_reservation.Reservation(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@pytest.mark.asyncio
async def test_update_reservation_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcbr_reservation.Reservation()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gcbr_reservation.Reservation()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.update_reservation(
reservation=gcbr_reservation.Reservation(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].reservation == gcbr_reservation.Reservation(name="name_value")
assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"])
@pytest.mark.asyncio
async def test_update_reservation_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.update_reservation(
gcbr_reservation.UpdateReservationRequest(),
reservation=gcbr_reservation.Reservation(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
def test_create_capacity_commitment(
transport: str = "grpc", request_type=reservation.CreateCapacityCommitmentRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.CapacityCommitment(
name="name_value",
slot_count=1098,
plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
state=reservation.CapacityCommitment.State.PENDING,
renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
)
response = client.create_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.CreateCapacityCommitmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.CapacityCommitment)
assert response.name == "name_value"
assert response.slot_count == 1098
assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
assert response.state == reservation.CapacityCommitment.State.PENDING
assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
def test_create_capacity_commitment_from_dict():
test_create_capacity_commitment(request_type=dict)
def test_create_capacity_commitment_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_capacity_commitment), "__call__"
) as call:
client.create_capacity_commitment()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.CreateCapacityCommitmentRequest()
@pytest.mark.asyncio
async def test_create_capacity_commitment_async(
transport: str = "grpc_asyncio",
request_type=reservation.CreateCapacityCommitmentRequest,
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.CapacityCommitment(
name="name_value",
slot_count=1098,
plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
state=reservation.CapacityCommitment.State.PENDING,
renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
)
)
response = await client.create_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.CreateCapacityCommitmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.CapacityCommitment)
assert response.name == "name_value"
assert response.slot_count == 1098
assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
assert response.state == reservation.CapacityCommitment.State.PENDING
assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
@pytest.mark.asyncio
async def test_create_capacity_commitment_async_from_dict():
await test_create_capacity_commitment_async(request_type=dict)
def test_create_capacity_commitment_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.CreateCapacityCommitmentRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_capacity_commitment), "__call__"
) as call:
call.return_value = reservation.CapacityCommitment()
client.create_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_capacity_commitment_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.CreateCapacityCommitmentRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_capacity_commitment), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.CapacityCommitment()
)
await client.create_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_create_capacity_commitment_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.CapacityCommitment()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_capacity_commitment(
parent="parent_value",
capacity_commitment=reservation.CapacityCommitment(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].capacity_commitment == reservation.CapacityCommitment(
name="name_value"
)
def test_create_capacity_commitment_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_capacity_commitment(
reservation.CreateCapacityCommitmentRequest(),
parent="parent_value",
capacity_commitment=reservation.CapacityCommitment(name="name_value"),
)
@pytest.mark.asyncio
async def test_create_capacity_commitment_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.CapacityCommitment()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.CapacityCommitment()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_capacity_commitment(
parent="parent_value",
capacity_commitment=reservation.CapacityCommitment(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].capacity_commitment == reservation.CapacityCommitment(
name="name_value"
)
@pytest.mark.asyncio
async def test_create_capacity_commitment_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_capacity_commitment(
reservation.CreateCapacityCommitmentRequest(),
parent="parent_value",
capacity_commitment=reservation.CapacityCommitment(name="name_value"),
)
def test_list_capacity_commitments(
transport: str = "grpc", request_type=reservation.ListCapacityCommitmentsRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_capacity_commitments), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.ListCapacityCommitmentsResponse(
next_page_token="next_page_token_value",
)
response = client.list_capacity_commitments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.ListCapacityCommitmentsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListCapacityCommitmentsPager)
assert response.next_page_token == "next_page_token_value"
def test_list_capacity_commitments_from_dict():
test_list_capacity_commitments(request_type=dict)
def test_list_capacity_commitments_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_capacity_commitments), "__call__"
) as call:
client.list_capacity_commitments()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.ListCapacityCommitmentsRequest()
@pytest.mark.asyncio
async def test_list_capacity_commitments_async(
transport: str = "grpc_asyncio",
request_type=reservation.ListCapacityCommitmentsRequest,
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_capacity_commitments), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.ListCapacityCommitmentsResponse(
next_page_token="next_page_token_value",
)
)
response = await client.list_capacity_commitments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.ListCapacityCommitmentsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListCapacityCommitmentsAsyncPager)
assert response.next_page_token == "next_page_token_value"
@pytest.mark.asyncio
async def test_list_capacity_commitments_async_from_dict():
await test_list_capacity_commitments_async(request_type=dict)
def test_list_capacity_commitments_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.ListCapacityCommitmentsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_capacity_commitments), "__call__"
) as call:
call.return_value = reservation.ListCapacityCommitmentsResponse()
client.list_capacity_commitments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_capacity_commitments_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.ListCapacityCommitmentsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_capacity_commitments), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.ListCapacityCommitmentsResponse()
)
await client.list_capacity_commitments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_list_capacity_commitments_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_capacity_commitments), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.ListCapacityCommitmentsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_capacity_commitments(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
def test_list_capacity_commitments_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_capacity_commitments(
reservation.ListCapacityCommitmentsRequest(), parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_capacity_commitments_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_capacity_commitments), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.ListCapacityCommitmentsResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.ListCapacityCommitmentsResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_capacity_commitments(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
@pytest.mark.asyncio
async def test_list_capacity_commitments_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_capacity_commitments(
reservation.ListCapacityCommitmentsRequest(), parent="parent_value",
)
def test_list_capacity_commitments_pager():
client = ReservationServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_capacity_commitments), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[
reservation.CapacityCommitment(),
reservation.CapacityCommitment(),
reservation.CapacityCommitment(),
],
next_page_token="abc",
),
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[], next_page_token="def",
),
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[reservation.CapacityCommitment(),],
next_page_token="ghi",
),
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[
reservation.CapacityCommitment(),
reservation.CapacityCommitment(),
],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.list_capacity_commitments(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, reservation.CapacityCommitment) for i in results)
def test_list_capacity_commitments_pages():
client = ReservationServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_capacity_commitments), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[
reservation.CapacityCommitment(),
reservation.CapacityCommitment(),
reservation.CapacityCommitment(),
],
next_page_token="abc",
),
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[], next_page_token="def",
),
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[reservation.CapacityCommitment(),],
next_page_token="ghi",
),
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[
reservation.CapacityCommitment(),
reservation.CapacityCommitment(),
],
),
RuntimeError,
)
pages = list(client.list_capacity_commitments(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_capacity_commitments_async_pager():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_capacity_commitments),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[
reservation.CapacityCommitment(),
reservation.CapacityCommitment(),
reservation.CapacityCommitment(),
],
next_page_token="abc",
),
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[], next_page_token="def",
),
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[reservation.CapacityCommitment(),],
next_page_token="ghi",
),
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[
reservation.CapacityCommitment(),
reservation.CapacityCommitment(),
],
),
RuntimeError,
)
async_pager = await client.list_capacity_commitments(request={},)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, reservation.CapacityCommitment) for i in responses)
@pytest.mark.asyncio
async def test_list_capacity_commitments_async_pages():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_capacity_commitments),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[
reservation.CapacityCommitment(),
reservation.CapacityCommitment(),
reservation.CapacityCommitment(),
],
next_page_token="abc",
),
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[], next_page_token="def",
),
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[reservation.CapacityCommitment(),],
next_page_token="ghi",
),
reservation.ListCapacityCommitmentsResponse(
capacity_commitments=[
reservation.CapacityCommitment(),
reservation.CapacityCommitment(),
],
),
RuntimeError,
)
pages = []
async for page_ in (await client.list_capacity_commitments(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
def test_get_capacity_commitment(
transport: str = "grpc", request_type=reservation.GetCapacityCommitmentRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.CapacityCommitment(
name="name_value",
slot_count=1098,
plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
state=reservation.CapacityCommitment.State.PENDING,
renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
)
response = client.get_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.GetCapacityCommitmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.CapacityCommitment)
assert response.name == "name_value"
assert response.slot_count == 1098
assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
assert response.state == reservation.CapacityCommitment.State.PENDING
assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
def test_get_capacity_commitment_from_dict():
test_get_capacity_commitment(request_type=dict)
def test_get_capacity_commitment_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_capacity_commitment), "__call__"
) as call:
client.get_capacity_commitment()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.GetCapacityCommitmentRequest()
@pytest.mark.asyncio
async def test_get_capacity_commitment_async(
transport: str = "grpc_asyncio",
request_type=reservation.GetCapacityCommitmentRequest,
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.CapacityCommitment(
name="name_value",
slot_count=1098,
plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
state=reservation.CapacityCommitment.State.PENDING,
renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
)
)
response = await client.get_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.GetCapacityCommitmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.CapacityCommitment)
assert response.name == "name_value"
assert response.slot_count == 1098
assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
assert response.state == reservation.CapacityCommitment.State.PENDING
assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
@pytest.mark.asyncio
async def test_get_capacity_commitment_async_from_dict():
await test_get_capacity_commitment_async(request_type=dict)
def test_get_capacity_commitment_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.GetCapacityCommitmentRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_capacity_commitment), "__call__"
) as call:
call.return_value = reservation.CapacityCommitment()
client.get_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_capacity_commitment_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.GetCapacityCommitmentRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_capacity_commitment), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.CapacityCommitment()
)
await client.get_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_get_capacity_commitment_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.CapacityCommitment()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_capacity_commitment(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_get_capacity_commitment_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_capacity_commitment(
reservation.GetCapacityCommitmentRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_get_capacity_commitment_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.CapacityCommitment()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.CapacityCommitment()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_capacity_commitment(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_get_capacity_commitment_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_capacity_commitment(
reservation.GetCapacityCommitmentRequest(), name="name_value",
)
def test_delete_capacity_commitment(
transport: str = "grpc", request_type=reservation.DeleteCapacityCommitmentRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
response = client.delete_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.DeleteCapacityCommitmentRequest()
# Establish that the response is the type that we expect.
assert response is None
def test_delete_capacity_commitment_from_dict():
test_delete_capacity_commitment(request_type=dict)
def test_delete_capacity_commitment_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_capacity_commitment), "__call__"
) as call:
client.delete_capacity_commitment()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.DeleteCapacityCommitmentRequest()
@pytest.mark.asyncio
async def test_delete_capacity_commitment_async(
transport: str = "grpc_asyncio",
request_type=reservation.DeleteCapacityCommitmentRequest,
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
response = await client.delete_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.DeleteCapacityCommitmentRequest()
# Establish that the response is the type that we expect.
assert response is None
@pytest.mark.asyncio
async def test_delete_capacity_commitment_async_from_dict():
await test_delete_capacity_commitment_async(request_type=dict)
def test_delete_capacity_commitment_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.DeleteCapacityCommitmentRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_capacity_commitment), "__call__"
) as call:
call.return_value = None
client.delete_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_capacity_commitment_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.DeleteCapacityCommitmentRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_capacity_commitment), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_delete_capacity_commitment_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_capacity_commitment(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_delete_capacity_commitment_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_capacity_commitment(
reservation.DeleteCapacityCommitmentRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_delete_capacity_commitment_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_capacity_commitment(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_delete_capacity_commitment_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_capacity_commitment(
reservation.DeleteCapacityCommitmentRequest(), name="name_value",
)
def test_update_capacity_commitment(
transport: str = "grpc", request_type=reservation.UpdateCapacityCommitmentRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.CapacityCommitment(
name="name_value",
slot_count=1098,
plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
state=reservation.CapacityCommitment.State.PENDING,
renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
)
response = client.update_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.UpdateCapacityCommitmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.CapacityCommitment)
assert response.name == "name_value"
assert response.slot_count == 1098
assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
assert response.state == reservation.CapacityCommitment.State.PENDING
assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
def test_update_capacity_commitment_from_dict():
test_update_capacity_commitment(request_type=dict)
def test_update_capacity_commitment_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_capacity_commitment), "__call__"
) as call:
client.update_capacity_commitment()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.UpdateCapacityCommitmentRequest()
@pytest.mark.asyncio
async def test_update_capacity_commitment_async(
transport: str = "grpc_asyncio",
request_type=reservation.UpdateCapacityCommitmentRequest,
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.CapacityCommitment(
name="name_value",
slot_count=1098,
plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
state=reservation.CapacityCommitment.State.PENDING,
renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
)
)
response = await client.update_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.UpdateCapacityCommitmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.CapacityCommitment)
assert response.name == "name_value"
assert response.slot_count == 1098
assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
assert response.state == reservation.CapacityCommitment.State.PENDING
assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
@pytest.mark.asyncio
async def test_update_capacity_commitment_async_from_dict():
await test_update_capacity_commitment_async(request_type=dict)
def test_update_capacity_commitment_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.UpdateCapacityCommitmentRequest()
request.capacity_commitment.name = "capacity_commitment.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_capacity_commitment), "__call__"
) as call:
call.return_value = reservation.CapacityCommitment()
client.update_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"capacity_commitment.name=capacity_commitment.name/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_update_capacity_commitment_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.UpdateCapacityCommitmentRequest()
request.capacity_commitment.name = "capacity_commitment.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_capacity_commitment), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.CapacityCommitment()
)
await client.update_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"capacity_commitment.name=capacity_commitment.name/value",
) in kw["metadata"]
def test_update_capacity_commitment_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.CapacityCommitment()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.update_capacity_commitment(
capacity_commitment=reservation.CapacityCommitment(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].capacity_commitment == reservation.CapacityCommitment(
name="name_value"
)
assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"])
def test_update_capacity_commitment_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_capacity_commitment(
reservation.UpdateCapacityCommitmentRequest(),
capacity_commitment=reservation.CapacityCommitment(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@pytest.mark.asyncio
async def test_update_capacity_commitment_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.CapacityCommitment()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.CapacityCommitment()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.update_capacity_commitment(
capacity_commitment=reservation.CapacityCommitment(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].capacity_commitment == reservation.CapacityCommitment(
name="name_value"
)
assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"])
@pytest.mark.asyncio
async def test_update_capacity_commitment_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.update_capacity_commitment(
reservation.UpdateCapacityCommitmentRequest(),
capacity_commitment=reservation.CapacityCommitment(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
def test_split_capacity_commitment(
transport: str = "grpc", request_type=reservation.SplitCapacityCommitmentRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.split_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.SplitCapacityCommitmentResponse()
response = client.split_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.SplitCapacityCommitmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.SplitCapacityCommitmentResponse)
def test_split_capacity_commitment_from_dict():
test_split_capacity_commitment(request_type=dict)
def test_split_capacity_commitment_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.split_capacity_commitment), "__call__"
) as call:
client.split_capacity_commitment()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.SplitCapacityCommitmentRequest()
@pytest.mark.asyncio
async def test_split_capacity_commitment_async(
transport: str = "grpc_asyncio",
request_type=reservation.SplitCapacityCommitmentRequest,
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.split_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.SplitCapacityCommitmentResponse()
)
response = await client.split_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.SplitCapacityCommitmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.SplitCapacityCommitmentResponse)
@pytest.mark.asyncio
async def test_split_capacity_commitment_async_from_dict():
await test_split_capacity_commitment_async(request_type=dict)
def test_split_capacity_commitment_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.SplitCapacityCommitmentRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.split_capacity_commitment), "__call__"
) as call:
call.return_value = reservation.SplitCapacityCommitmentResponse()
client.split_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_split_capacity_commitment_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.SplitCapacityCommitmentRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.split_capacity_commitment), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.SplitCapacityCommitmentResponse()
)
await client.split_capacity_commitment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_split_capacity_commitment_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.split_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.SplitCapacityCommitmentResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.split_capacity_commitment(
name="name_value", slot_count=1098,
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
assert args[0].slot_count == 1098
def test_split_capacity_commitment_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.split_capacity_commitment(
reservation.SplitCapacityCommitmentRequest(),
name="name_value",
slot_count=1098,
)
@pytest.mark.asyncio
async def test_split_capacity_commitment_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.split_capacity_commitment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.SplitCapacityCommitmentResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.SplitCapacityCommitmentResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.split_capacity_commitment(
name="name_value", slot_count=1098,
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
assert args[0].slot_count == 1098
@pytest.mark.asyncio
async def test_split_capacity_commitment_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.split_capacity_commitment(
reservation.SplitCapacityCommitmentRequest(),
name="name_value",
slot_count=1098,
)
def test_merge_capacity_commitments(
transport: str = "grpc", request_type=reservation.MergeCapacityCommitmentsRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.merge_capacity_commitments), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.CapacityCommitment(
name="name_value",
slot_count=1098,
plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
state=reservation.CapacityCommitment.State.PENDING,
renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
)
response = client.merge_capacity_commitments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.MergeCapacityCommitmentsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.CapacityCommitment)
assert response.name == "name_value"
assert response.slot_count == 1098
assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
assert response.state == reservation.CapacityCommitment.State.PENDING
assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
def test_merge_capacity_commitments_from_dict():
test_merge_capacity_commitments(request_type=dict)
def test_merge_capacity_commitments_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.merge_capacity_commitments), "__call__"
) as call:
client.merge_capacity_commitments()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.MergeCapacityCommitmentsRequest()
@pytest.mark.asyncio
async def test_merge_capacity_commitments_async(
transport: str = "grpc_asyncio",
request_type=reservation.MergeCapacityCommitmentsRequest,
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.merge_capacity_commitments), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.CapacityCommitment(
name="name_value",
slot_count=1098,
plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
state=reservation.CapacityCommitment.State.PENDING,
renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX,
)
)
response = await client.merge_capacity_commitments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.MergeCapacityCommitmentsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.CapacityCommitment)
assert response.name == "name_value"
assert response.slot_count == 1098
assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
assert response.state == reservation.CapacityCommitment.State.PENDING
assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX
@pytest.mark.asyncio
async def test_merge_capacity_commitments_async_from_dict():
await test_merge_capacity_commitments_async(request_type=dict)
def test_merge_capacity_commitments_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.MergeCapacityCommitmentsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.merge_capacity_commitments), "__call__"
) as call:
call.return_value = reservation.CapacityCommitment()
client.merge_capacity_commitments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_merge_capacity_commitments_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.MergeCapacityCommitmentsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.merge_capacity_commitments), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.CapacityCommitment()
)
await client.merge_capacity_commitments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_merge_capacity_commitments_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.merge_capacity_commitments), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.CapacityCommitment()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.merge_capacity_commitments(
parent="parent_value",
capacity_commitment_ids=["capacity_commitment_ids_value"],
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].capacity_commitment_ids == ["capacity_commitment_ids_value"]
def test_merge_capacity_commitments_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.merge_capacity_commitments(
reservation.MergeCapacityCommitmentsRequest(),
parent="parent_value",
capacity_commitment_ids=["capacity_commitment_ids_value"],
)
@pytest.mark.asyncio
async def test_merge_capacity_commitments_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.merge_capacity_commitments), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.CapacityCommitment()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.CapacityCommitment()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.merge_capacity_commitments(
parent="parent_value",
capacity_commitment_ids=["capacity_commitment_ids_value"],
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].capacity_commitment_ids == ["capacity_commitment_ids_value"]
@pytest.mark.asyncio
async def test_merge_capacity_commitments_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.merge_capacity_commitments(
reservation.MergeCapacityCommitmentsRequest(),
parent="parent_value",
capacity_commitment_ids=["capacity_commitment_ids_value"],
)
def test_create_assignment(
transport: str = "grpc", request_type=reservation.CreateAssignmentRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_assignment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.Assignment(
name="name_value",
assignee="assignee_value",
job_type=reservation.Assignment.JobType.PIPELINE,
state=reservation.Assignment.State.PENDING,
)
response = client.create_assignment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.CreateAssignmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.Assignment)
assert response.name == "name_value"
assert response.assignee == "assignee_value"
assert response.job_type == reservation.Assignment.JobType.PIPELINE
assert response.state == reservation.Assignment.State.PENDING
def test_create_assignment_from_dict():
test_create_assignment(request_type=dict)
def test_create_assignment_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_assignment), "__call__"
) as call:
client.create_assignment()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.CreateAssignmentRequest()
@pytest.mark.asyncio
async def test_create_assignment_async(
transport: str = "grpc_asyncio", request_type=reservation.CreateAssignmentRequest
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_assignment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.Assignment(
name="name_value",
assignee="assignee_value",
job_type=reservation.Assignment.JobType.PIPELINE,
state=reservation.Assignment.State.PENDING,
)
)
response = await client.create_assignment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.CreateAssignmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.Assignment)
assert response.name == "name_value"
assert response.assignee == "assignee_value"
assert response.job_type == reservation.Assignment.JobType.PIPELINE
assert response.state == reservation.Assignment.State.PENDING
@pytest.mark.asyncio
async def test_create_assignment_async_from_dict():
await test_create_assignment_async(request_type=dict)
def test_create_assignment_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.CreateAssignmentRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_assignment), "__call__"
) as call:
call.return_value = reservation.Assignment()
client.create_assignment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_assignment_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.CreateAssignmentRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_assignment), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.Assignment()
)
await client.create_assignment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_create_assignment_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_assignment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.Assignment()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_assignment(
parent="parent_value", assignment=reservation.Assignment(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].assignment == reservation.Assignment(name="name_value")
def test_create_assignment_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_assignment(
reservation.CreateAssignmentRequest(),
parent="parent_value",
assignment=reservation.Assignment(name="name_value"),
)
@pytest.mark.asyncio
async def test_create_assignment_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_assignment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.Assignment()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.Assignment()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_assignment(
parent="parent_value", assignment=reservation.Assignment(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].assignment == reservation.Assignment(name="name_value")
@pytest.mark.asyncio
async def test_create_assignment_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_assignment(
reservation.CreateAssignmentRequest(),
parent="parent_value",
assignment=reservation.Assignment(name="name_value"),
)
def test_list_assignments(
transport: str = "grpc", request_type=reservation.ListAssignmentsRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_assignments), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.ListAssignmentsResponse(
next_page_token="next_page_token_value",
)
response = client.list_assignments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.ListAssignmentsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListAssignmentsPager)
assert response.next_page_token == "next_page_token_value"
def test_list_assignments_from_dict():
test_list_assignments(request_type=dict)
def test_list_assignments_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_assignments), "__call__") as call:
client.list_assignments()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.ListAssignmentsRequest()
@pytest.mark.asyncio
async def test_list_assignments_async(
transport: str = "grpc_asyncio", request_type=reservation.ListAssignmentsRequest
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_assignments), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.ListAssignmentsResponse(
next_page_token="next_page_token_value",
)
)
response = await client.list_assignments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.ListAssignmentsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListAssignmentsAsyncPager)
assert response.next_page_token == "next_page_token_value"
@pytest.mark.asyncio
async def test_list_assignments_async_from_dict():
await test_list_assignments_async(request_type=dict)
def test_list_assignments_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.ListAssignmentsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_assignments), "__call__") as call:
call.return_value = reservation.ListAssignmentsResponse()
client.list_assignments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_assignments_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.ListAssignmentsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_assignments), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.ListAssignmentsResponse()
)
await client.list_assignments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_list_assignments_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_assignments), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.ListAssignmentsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_assignments(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
def test_list_assignments_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_assignments(
reservation.ListAssignmentsRequest(), parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_assignments_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_assignments), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.ListAssignmentsResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.ListAssignmentsResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_assignments(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
@pytest.mark.asyncio
async def test_list_assignments_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_assignments(
reservation.ListAssignmentsRequest(), parent="parent_value",
)
def test_list_assignments_pager():
client = ReservationServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_assignments), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.ListAssignmentsResponse(
assignments=[
reservation.Assignment(),
reservation.Assignment(),
reservation.Assignment(),
],
next_page_token="abc",
),
reservation.ListAssignmentsResponse(assignments=[], next_page_token="def",),
reservation.ListAssignmentsResponse(
assignments=[reservation.Assignment(),], next_page_token="ghi",
),
reservation.ListAssignmentsResponse(
assignments=[reservation.Assignment(), reservation.Assignment(),],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.list_assignments(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, reservation.Assignment) for i in results)
def test_list_assignments_pages():
client = ReservationServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_assignments), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.ListAssignmentsResponse(
assignments=[
reservation.Assignment(),
reservation.Assignment(),
reservation.Assignment(),
],
next_page_token="abc",
),
reservation.ListAssignmentsResponse(assignments=[], next_page_token="def",),
reservation.ListAssignmentsResponse(
assignments=[reservation.Assignment(),], next_page_token="ghi",
),
reservation.ListAssignmentsResponse(
assignments=[reservation.Assignment(), reservation.Assignment(),],
),
RuntimeError,
)
pages = list(client.list_assignments(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_assignments_async_pager():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_assignments), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.ListAssignmentsResponse(
assignments=[
reservation.Assignment(),
reservation.Assignment(),
reservation.Assignment(),
],
next_page_token="abc",
),
reservation.ListAssignmentsResponse(assignments=[], next_page_token="def",),
reservation.ListAssignmentsResponse(
assignments=[reservation.Assignment(),], next_page_token="ghi",
),
reservation.ListAssignmentsResponse(
assignments=[reservation.Assignment(), reservation.Assignment(),],
),
RuntimeError,
)
async_pager = await client.list_assignments(request={},)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, reservation.Assignment) for i in responses)
@pytest.mark.asyncio
async def test_list_assignments_async_pages():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_assignments), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.ListAssignmentsResponse(
assignments=[
reservation.Assignment(),
reservation.Assignment(),
reservation.Assignment(),
],
next_page_token="abc",
),
reservation.ListAssignmentsResponse(assignments=[], next_page_token="def",),
reservation.ListAssignmentsResponse(
assignments=[reservation.Assignment(),], next_page_token="ghi",
),
reservation.ListAssignmentsResponse(
assignments=[reservation.Assignment(), reservation.Assignment(),],
),
RuntimeError,
)
pages = []
async for page_ in (await client.list_assignments(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
def test_delete_assignment(
transport: str = "grpc", request_type=reservation.DeleteAssignmentRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_assignment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
response = client.delete_assignment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.DeleteAssignmentRequest()
# Establish that the response is the type that we expect.
assert response is None
def test_delete_assignment_from_dict():
test_delete_assignment(request_type=dict)
def test_delete_assignment_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_assignment), "__call__"
) as call:
client.delete_assignment()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.DeleteAssignmentRequest()
@pytest.mark.asyncio
async def test_delete_assignment_async(
transport: str = "grpc_asyncio", request_type=reservation.DeleteAssignmentRequest
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_assignment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
response = await client.delete_assignment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.DeleteAssignmentRequest()
# Establish that the response is the type that we expect.
assert response is None
@pytest.mark.asyncio
async def test_delete_assignment_async_from_dict():
await test_delete_assignment_async(request_type=dict)
def test_delete_assignment_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.DeleteAssignmentRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_assignment), "__call__"
) as call:
call.return_value = None
client.delete_assignment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_assignment_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.DeleteAssignmentRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_assignment), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_assignment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_delete_assignment_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_assignment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_assignment(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_delete_assignment_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_assignment(
reservation.DeleteAssignmentRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_delete_assignment_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_assignment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_assignment(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_delete_assignment_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_assignment(
reservation.DeleteAssignmentRequest(), name="name_value",
)
def test_search_assignments(
transport: str = "grpc", request_type=reservation.SearchAssignmentsRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_assignments), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.SearchAssignmentsResponse(
next_page_token="next_page_token_value",
)
response = client.search_assignments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.SearchAssignmentsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.SearchAssignmentsPager)
assert response.next_page_token == "next_page_token_value"
def test_search_assignments_from_dict():
test_search_assignments(request_type=dict)
def test_search_assignments_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_assignments), "__call__"
) as call:
client.search_assignments()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.SearchAssignmentsRequest()
@pytest.mark.asyncio
async def test_search_assignments_async(
transport: str = "grpc_asyncio", request_type=reservation.SearchAssignmentsRequest
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_assignments), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.SearchAssignmentsResponse(
next_page_token="next_page_token_value",
)
)
response = await client.search_assignments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.SearchAssignmentsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.SearchAssignmentsAsyncPager)
assert response.next_page_token == "next_page_token_value"
@pytest.mark.asyncio
async def test_search_assignments_async_from_dict():
await test_search_assignments_async(request_type=dict)
def test_search_assignments_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.SearchAssignmentsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_assignments), "__call__"
) as call:
call.return_value = reservation.SearchAssignmentsResponse()
client.search_assignments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_search_assignments_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.SearchAssignmentsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_assignments), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.SearchAssignmentsResponse()
)
await client.search_assignments(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_search_assignments_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_assignments), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.SearchAssignmentsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.search_assignments(
parent="parent_value", query="query_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].query == "query_value"
def test_search_assignments_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.search_assignments(
reservation.SearchAssignmentsRequest(),
parent="parent_value",
query="query_value",
)
@pytest.mark.asyncio
async def test_search_assignments_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_assignments), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.SearchAssignmentsResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.SearchAssignmentsResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.search_assignments(
parent="parent_value", query="query_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].query == "query_value"
@pytest.mark.asyncio
async def test_search_assignments_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.search_assignments(
reservation.SearchAssignmentsRequest(),
parent="parent_value",
query="query_value",
)
def test_search_assignments_pager():
client = ReservationServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_assignments), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.SearchAssignmentsResponse(
assignments=[
reservation.Assignment(),
reservation.Assignment(),
reservation.Assignment(),
],
next_page_token="abc",
),
reservation.SearchAssignmentsResponse(
assignments=[], next_page_token="def",
),
reservation.SearchAssignmentsResponse(
assignments=[reservation.Assignment(),], next_page_token="ghi",
),
reservation.SearchAssignmentsResponse(
assignments=[reservation.Assignment(), reservation.Assignment(),],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.search_assignments(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, reservation.Assignment) for i in results)
def test_search_assignments_pages():
client = ReservationServiceClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_assignments), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.SearchAssignmentsResponse(
assignments=[
reservation.Assignment(),
reservation.Assignment(),
reservation.Assignment(),
],
next_page_token="abc",
),
reservation.SearchAssignmentsResponse(
assignments=[], next_page_token="def",
),
reservation.SearchAssignmentsResponse(
assignments=[reservation.Assignment(),], next_page_token="ghi",
),
reservation.SearchAssignmentsResponse(
assignments=[reservation.Assignment(), reservation.Assignment(),],
),
RuntimeError,
)
pages = list(client.search_assignments(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_search_assignments_async_pager():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_assignments),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.SearchAssignmentsResponse(
assignments=[
reservation.Assignment(),
reservation.Assignment(),
reservation.Assignment(),
],
next_page_token="abc",
),
reservation.SearchAssignmentsResponse(
assignments=[], next_page_token="def",
),
reservation.SearchAssignmentsResponse(
assignments=[reservation.Assignment(),], next_page_token="ghi",
),
reservation.SearchAssignmentsResponse(
assignments=[reservation.Assignment(), reservation.Assignment(),],
),
RuntimeError,
)
async_pager = await client.search_assignments(request={},)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, reservation.Assignment) for i in responses)
@pytest.mark.asyncio
async def test_search_assignments_async_pages():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_assignments),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
reservation.SearchAssignmentsResponse(
assignments=[
reservation.Assignment(),
reservation.Assignment(),
reservation.Assignment(),
],
next_page_token="abc",
),
reservation.SearchAssignmentsResponse(
assignments=[], next_page_token="def",
),
reservation.SearchAssignmentsResponse(
assignments=[reservation.Assignment(),], next_page_token="ghi",
),
reservation.SearchAssignmentsResponse(
assignments=[reservation.Assignment(), reservation.Assignment(),],
),
RuntimeError,
)
pages = []
async for page_ in (await client.search_assignments(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
def test_move_assignment(
transport: str = "grpc", request_type=reservation.MoveAssignmentRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.move_assignment), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.Assignment(
name="name_value",
assignee="assignee_value",
job_type=reservation.Assignment.JobType.PIPELINE,
state=reservation.Assignment.State.PENDING,
)
response = client.move_assignment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.MoveAssignmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.Assignment)
assert response.name == "name_value"
assert response.assignee == "assignee_value"
assert response.job_type == reservation.Assignment.JobType.PIPELINE
assert response.state == reservation.Assignment.State.PENDING
def test_move_assignment_from_dict():
test_move_assignment(request_type=dict)
def test_move_assignment_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.move_assignment), "__call__") as call:
client.move_assignment()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.MoveAssignmentRequest()
@pytest.mark.asyncio
async def test_move_assignment_async(
transport: str = "grpc_asyncio", request_type=reservation.MoveAssignmentRequest
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.move_assignment), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.Assignment(
name="name_value",
assignee="assignee_value",
job_type=reservation.Assignment.JobType.PIPELINE,
state=reservation.Assignment.State.PENDING,
)
)
response = await client.move_assignment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.MoveAssignmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.Assignment)
assert response.name == "name_value"
assert response.assignee == "assignee_value"
assert response.job_type == reservation.Assignment.JobType.PIPELINE
assert response.state == reservation.Assignment.State.PENDING
@pytest.mark.asyncio
async def test_move_assignment_async_from_dict():
await test_move_assignment_async(request_type=dict)
def test_move_assignment_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.MoveAssignmentRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.move_assignment), "__call__") as call:
call.return_value = reservation.Assignment()
client.move_assignment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_move_assignment_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.MoveAssignmentRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.move_assignment), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.Assignment()
)
await client.move_assignment(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_move_assignment_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.move_assignment), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.Assignment()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.move_assignment(
name="name_value", destination_id="destination_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
assert args[0].destination_id == "destination_id_value"
def test_move_assignment_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.move_assignment(
reservation.MoveAssignmentRequest(),
name="name_value",
destination_id="destination_id_value",
)
@pytest.mark.asyncio
async def test_move_assignment_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.move_assignment), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.Assignment()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.Assignment()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.move_assignment(
name="name_value", destination_id="destination_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
assert args[0].destination_id == "destination_id_value"
@pytest.mark.asyncio
async def test_move_assignment_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.move_assignment(
reservation.MoveAssignmentRequest(),
name="name_value",
destination_id="destination_id_value",
)
def test_get_bi_reservation(
transport: str = "grpc", request_type=reservation.GetBiReservationRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_bi_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.BiReservation(name="name_value", size=443,)
response = client.get_bi_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.GetBiReservationRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.BiReservation)
assert response.name == "name_value"
assert response.size == 443
def test_get_bi_reservation_from_dict():
test_get_bi_reservation(request_type=dict)
def test_get_bi_reservation_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_bi_reservation), "__call__"
) as call:
client.get_bi_reservation()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.GetBiReservationRequest()
@pytest.mark.asyncio
async def test_get_bi_reservation_async(
transport: str = "grpc_asyncio", request_type=reservation.GetBiReservationRequest
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_bi_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.BiReservation(name="name_value", size=443,)
)
response = await client.get_bi_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.GetBiReservationRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.BiReservation)
assert response.name == "name_value"
assert response.size == 443
@pytest.mark.asyncio
async def test_get_bi_reservation_async_from_dict():
await test_get_bi_reservation_async(request_type=dict)
def test_get_bi_reservation_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.GetBiReservationRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_bi_reservation), "__call__"
) as call:
call.return_value = reservation.BiReservation()
client.get_bi_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_bi_reservation_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.GetBiReservationRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_bi_reservation), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.BiReservation()
)
await client.get_bi_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_get_bi_reservation_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_bi_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.BiReservation()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_bi_reservation(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_get_bi_reservation_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_bi_reservation(
reservation.GetBiReservationRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_get_bi_reservation_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_bi_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.BiReservation()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.BiReservation()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_bi_reservation(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_get_bi_reservation_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_bi_reservation(
reservation.GetBiReservationRequest(), name="name_value",
)
def test_update_bi_reservation(
transport: str = "grpc", request_type=reservation.UpdateBiReservationRequest
):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_bi_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.BiReservation(name="name_value", size=443,)
response = client.update_bi_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.UpdateBiReservationRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.BiReservation)
assert response.name == "name_value"
assert response.size == 443
def test_update_bi_reservation_from_dict():
test_update_bi_reservation(request_type=dict)
def test_update_bi_reservation_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_bi_reservation), "__call__"
) as call:
client.update_bi_reservation()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.UpdateBiReservationRequest()
@pytest.mark.asyncio
async def test_update_bi_reservation_async(
transport: str = "grpc_asyncio", request_type=reservation.UpdateBiReservationRequest
):
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_bi_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.BiReservation(name="name_value", size=443,)
)
response = await client.update_bi_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == reservation.UpdateBiReservationRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, reservation.BiReservation)
assert response.name == "name_value"
assert response.size == 443
@pytest.mark.asyncio
async def test_update_bi_reservation_async_from_dict():
await test_update_bi_reservation_async(request_type=dict)
def test_update_bi_reservation_field_headers():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.UpdateBiReservationRequest()
request.bi_reservation.name = "bi_reservation.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_bi_reservation), "__call__"
) as call:
call.return_value = reservation.BiReservation()
client.update_bi_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"bi_reservation.name=bi_reservation.name/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_update_bi_reservation_field_headers_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = reservation.UpdateBiReservationRequest()
request.bi_reservation.name = "bi_reservation.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_bi_reservation), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.BiReservation()
)
await client.update_bi_reservation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"bi_reservation.name=bi_reservation.name/value",
) in kw["metadata"]
def test_update_bi_reservation_flattened():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_bi_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.BiReservation()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.update_bi_reservation(
bi_reservation=reservation.BiReservation(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].bi_reservation == reservation.BiReservation(name="name_value")
assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"])
def test_update_bi_reservation_flattened_error():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_bi_reservation(
reservation.UpdateBiReservationRequest(),
bi_reservation=reservation.BiReservation(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@pytest.mark.asyncio
async def test_update_bi_reservation_flattened_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_bi_reservation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = reservation.BiReservation()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
reservation.BiReservation()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.update_bi_reservation(
bi_reservation=reservation.BiReservation(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].bi_reservation == reservation.BiReservation(name="name_value")
assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"])
@pytest.mark.asyncio
async def test_update_bi_reservation_flattened_error_async():
client = ReservationServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.update_bi_reservation(
reservation.UpdateBiReservationRequest(),
bi_reservation=reservation.BiReservation(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.ReservationServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.ReservationServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ReservationServiceClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide scopes and a transport instance.
transport = transports.ReservationServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ReservationServiceClient(
client_options={"scopes": ["1", "2"]}, transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.ReservationServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = ReservationServiceClient(transport=transport)
assert client.transport is transport
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.ReservationServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.ReservationServiceGrpcAsyncIOTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
@pytest.mark.parametrize(
"transport_class",
[
transports.ReservationServiceGrpcTransport,
transports.ReservationServiceGrpcAsyncIOTransport,
],
)
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
assert isinstance(client.transport, transports.ReservationServiceGrpcTransport,)
def test_reservation_service_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.ReservationServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_reservation_service_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.bigquery_reservation_v1.services.reservation_service.transports.ReservationServiceTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.ReservationServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
"create_reservation",
"list_reservations",
"get_reservation",
"delete_reservation",
"update_reservation",
"create_capacity_commitment",
"list_capacity_commitments",
"get_capacity_commitment",
"delete_capacity_commitment",
"update_capacity_commitment",
"split_capacity_commitment",
"merge_capacity_commitments",
"create_assignment",
"list_assignments",
"delete_assignment",
"search_assignments",
"move_assignment",
"get_bi_reservation",
"update_bi_reservation",
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
@requires_google_auth_gte_1_25_0
def test_reservation_service_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.bigquery_reservation_v1.services.reservation_service.transports.ReservationServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ReservationServiceTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id="octopus",
)
@requires_google_auth_lt_1_25_0
def test_reservation_service_base_transport_with_credentials_file_old_google_auth():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.bigquery_reservation_v1.services.reservation_service.transports.ReservationServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ReservationServiceTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=(
"https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id="octopus",
)
def test_reservation_service_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
"google.cloud.bigquery_reservation_v1.services.reservation_service.transports.ReservationServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ReservationServiceTransport()
adc.assert_called_once()
@requires_google_auth_gte_1_25_0
def test_reservation_service_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
ReservationServiceClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id=None,
)
@requires_google_auth_lt_1_25_0
def test_reservation_service_auth_adc_old_google_auth():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
ReservationServiceClient()
adc.assert_called_once_with(
scopes=(
"https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id=None,
)
@pytest.mark.parametrize(
"transport_class",
[
transports.ReservationServiceGrpcTransport,
transports.ReservationServiceGrpcAsyncIOTransport,
],
)
@requires_google_auth_gte_1_25_0
def test_reservation_service_transport_auth_adc(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
adc.assert_called_once_with(
scopes=["1", "2"],
default_scopes=(
"https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class",
[
transports.ReservationServiceGrpcTransport,
transports.ReservationServiceGrpcAsyncIOTransport,
],
)
@requires_google_auth_lt_1_25_0
def test_reservation_service_transport_auth_adc_old_google_auth(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus")
adc.assert_called_once_with(
scopes=(
"https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class,grpc_helpers",
[
(transports.ReservationServiceGrpcTransport, grpc_helpers),
(transports.ReservationServiceGrpcAsyncIOTransport, grpc_helpers_async),
],
)
def test_reservation_service_transport_create_channel(transport_class, grpc_helpers):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(
google.auth, "default", autospec=True
) as adc, mock.patch.object(
grpc_helpers, "create_channel", autospec=True
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
create_channel.assert_called_with(
"bigqueryreservation.googleapis.com:443",
credentials=creds,
credentials_file=None,
quota_project_id="octopus",
default_scopes=(
"https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/cloud-platform",
),
scopes=["1", "2"],
default_host="bigqueryreservation.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize(
"transport_class",
[
transports.ReservationServiceGrpcTransport,
transports.ReservationServiceGrpcAsyncIOTransport,
],
)
def test_reservation_service_grpc_transport_client_cert_source_for_mtls(
transport_class,
):
cred = ga_credentials.AnonymousCredentials()
# Check ssl_channel_credentials is used if provided.
with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
mock_ssl_channel_creds = mock.Mock()
transport_class(
host="squid.clam.whelk",
credentials=cred,
ssl_channel_credentials=mock_ssl_channel_creds,
)
mock_create_channel.assert_called_once_with(
"squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_channel_creds,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
# is used.
with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
transport_class(
credentials=cred,
client_cert_source_for_mtls=client_cert_source_callback,
)
expected_cert, expected_key = client_cert_source_callback()
mock_ssl_cred.assert_called_once_with(
certificate_chain=expected_cert, private_key=expected_key
)
def test_reservation_service_host_no_port():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="bigqueryreservation.googleapis.com"
),
)
assert client.transport._host == "bigqueryreservation.googleapis.com:443"
def test_reservation_service_host_with_port():
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="bigqueryreservation.googleapis.com:8000"
),
)
assert client.transport._host == "bigqueryreservation.googleapis.com:8000"
def test_reservation_service_grpc_transport_channel():
channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.ReservationServiceGrpcTransport(
host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
def test_reservation_service_grpc_asyncio_transport_channel():
channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.ReservationServiceGrpcAsyncIOTransport(
host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
transports.ReservationServiceGrpcTransport,
transports.ReservationServiceGrpcAsyncIOTransport,
],
)
def test_reservation_service_transport_channel_mtls_with_client_cert_source(
transport_class,
):
with mock.patch(
"grpc.ssl_channel_credentials", autospec=True
) as grpc_ssl_channel_cred:
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
cred = ga_credentials.AnonymousCredentials()
with pytest.warns(DeprecationWarning):
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (cred, None)
transport = transport_class(
host="squid.clam.whelk",
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
adc.assert_called_once()
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
assert transport._ssl_channel_credentials == mock_ssl_cred
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
transports.ReservationServiceGrpcTransport,
transports.ReservationServiceGrpcAsyncIOTransport,
],
)
def test_reservation_service_transport_channel_mtls_with_adc(transport_class):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
mock_cred = mock.Mock()
with pytest.warns(DeprecationWarning):
transport = transport_class(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
def test_assignment_path():
project = "squid"
location = "clam"
reservation = "whelk"
assignment = "octopus"
expected = "projects/{project}/locations/{location}/reservations/{reservation}/assignments/{assignment}".format(
project=project,
location=location,
reservation=reservation,
assignment=assignment,
)
actual = ReservationServiceClient.assignment_path(
project, location, reservation, assignment
)
assert expected == actual
def test_parse_assignment_path():
expected = {
"project": "oyster",
"location": "nudibranch",
"reservation": "cuttlefish",
"assignment": "mussel",
}
path = ReservationServiceClient.assignment_path(**expected)
# Check that the path construction is reversible.
actual = ReservationServiceClient.parse_assignment_path(path)
assert expected == actual
def test_bi_reservation_path():
project = "winkle"
location = "nautilus"
expected = "projects/{project}/locations/{location}/bireservation".format(
project=project, location=location,
)
actual = ReservationServiceClient.bi_reservation_path(project, location)
assert expected == actual
def test_parse_bi_reservation_path():
expected = {
"project": "scallop",
"location": "abalone",
}
path = ReservationServiceClient.bi_reservation_path(**expected)
# Check that the path construction is reversible.
actual = ReservationServiceClient.parse_bi_reservation_path(path)
assert expected == actual
def test_capacity_commitment_path():
project = "squid"
location = "clam"
capacity_commitment = "whelk"
expected = "projects/{project}/locations/{location}/capacityCommitments/{capacity_commitment}".format(
project=project, location=location, capacity_commitment=capacity_commitment,
)
actual = ReservationServiceClient.capacity_commitment_path(
project, location, capacity_commitment
)
assert expected == actual
def test_parse_capacity_commitment_path():
expected = {
"project": "octopus",
"location": "oyster",
"capacity_commitment": "nudibranch",
}
path = ReservationServiceClient.capacity_commitment_path(**expected)
# Check that the path construction is reversible.
actual = ReservationServiceClient.parse_capacity_commitment_path(path)
assert expected == actual
def test_reservation_path():
project = "cuttlefish"
location = "mussel"
reservation = "winkle"
expected = "projects/{project}/locations/{location}/reservations/{reservation}".format(
project=project, location=location, reservation=reservation,
)
actual = ReservationServiceClient.reservation_path(project, location, reservation)
assert expected == actual
def test_parse_reservation_path():
expected = {
"project": "nautilus",
"location": "scallop",
"reservation": "abalone",
}
path = ReservationServiceClient.reservation_path(**expected)
# Check that the path construction is reversible.
actual = ReservationServiceClient.parse_reservation_path(path)
assert expected == actual
def test_common_billing_account_path():
billing_account = "squid"
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
actual = ReservationServiceClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "clam",
}
path = ReservationServiceClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = ReservationServiceClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "whelk"
expected = "folders/{folder}".format(folder=folder,)
actual = ReservationServiceClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "octopus",
}
path = ReservationServiceClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = ReservationServiceClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "oyster"
expected = "organizations/{organization}".format(organization=organization,)
actual = ReservationServiceClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "nudibranch",
}
path = ReservationServiceClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = ReservationServiceClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "cuttlefish"
expected = "projects/{project}".format(project=project,)
actual = ReservationServiceClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "mussel",
}
path = ReservationServiceClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = ReservationServiceClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "winkle"
location = "nautilus"
expected = "projects/{project}/locations/{location}".format(
project=project, location=location,
)
actual = ReservationServiceClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "scallop",
"location": "abalone",
}
path = ReservationServiceClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = ReservationServiceClient.parse_common_location_path(path)
assert expected == actual
def test_client_withDEFAULT_CLIENT_INFO():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.ReservationServiceTransport, "_prep_wrapped_messages"
) as prep:
client = ReservationServiceClient(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.ReservationServiceTransport, "_prep_wrapped_messages"
) as prep:
transport_class = ReservationServiceClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
| [
"noreply@github.com"
] | plamut.noreply@github.com |
c629e0e16b3e985e068c51df8646c388991624fe | 193247e2d09b2ceb003a07630b93b671a88aaddd | /reference-implementations/air-c2-cop/AirC2Weather/Tools/MultidimensionSupplementalTools/Scripts/mds/tools/get_variable_statistics_over_dimension.py | e7a2e4598f3ba4900ee4b06accb210a6b2c51ecd | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Esri/defense-solutions-proofs-of-concept | 5044c062e0bca762b95bb7dbe6aa7b27170923f1 | b0d796e0c36f8ee3a17dfe57b19f8de930b08414 | refs/heads/master | 2023-08-28T10:18:33.364318 | 2022-10-26T15:27:46 | 2022-10-26T15:27:46 | 102,737,392 | 15 | 21 | Apache-2.0 | 2022-09-30T18:11:37 | 2017-09-07T13:00:26 | Java | UTF-8 | Python | false | false | 11,190 | py | # -*- coding: utf-8 -*-
import arcpy
import mds
import mds.messages
import numpy
import netCDF4
import os.path
#
# LIMITATIONS:
# > Attributes:
# Attribute values are copied wholesale from the original variable. Hence,
# if these values describe the the values in the new variable, i.e. as with
# valid_range, actual_range, unpacked_range, they will be incorrect and should
# be manually altered. This affects all statistics types, but is only
# problematic with the RANGE, STD, SUM, and VARIANCE.
#
class GetVariableStatisticsOverDimension(object):
def __init__(self):
"""Define the tool (tool name is the name of the class)."""
self.label = "Get Variable Statistics Over Dimension"
self.description = "Calculates statistics for a variable in a " + \
"multidimensional dataset, such as netCDF or HDF, over a specified" + \
"dimension. "
self.canRunInBackground = False
# Statistics choices
statistics_numpy = {'MAXIMUM':'max', \
'MEAN':'mean', \
'MINIMUM':'min', \
'RANGE':'ptp', \
'STD':'std', \
'SUM':'sum', \
'VARIANCE':'var'}
# List of dictionaries of statistics
# Sublist elements indices:
# 0: object
# 1: dictionary defined by 'displayname':'methodname'
# where object.methodname() is valid and displayname is what is
# shown to the user
self.statistics = [[numpy.ma, statistics_numpy]]
self.default_statistic = "MEAN"
def getParameterInfo(self):
"""Define parameter definitions"""
parameters = []
# Input parameter
parameters.append(arcpy.Parameter(
displayName="Input File or URL String",
name="in_file",
datatype=["DEFile","GPString"],
parameterType="Required",
direction="Input"))
# Variable parameter
parameters.append(arcpy.Parameter(
displayName="Variable",
name="variable",
datatype="GPString",
parameterType="Required",
direction="Input"))
parameters[-1].parameterDependencies = [parameters[-2].name]
# Dimension parameter
parameters.append(arcpy.Parameter(
displayName="Dimension",
name="dimension",
datatype="GPString",
parameterType="Required",
direction="Input"))
parameters[-1].parameterDependencies = [parameters[-2].name]
# Output parameter
parameters.append(arcpy.Parameter(
displayName="Output netCDF File",
name="out_netcdf_file",
datatype="DEFile",
multiValue=False,
parameterType="Required",
direction="Output"))
# Output variable parameter
parameters.append(arcpy.Parameter(
displayName="Output Variable Name",
name="out_variable",
datatype="GPString",
multiValue=False,
parameterType="Optional",
direction="Output"))
# Type parameter
parameters.append(arcpy.Parameter(
displayName="Statistic Type",
name="statistic_type",
datatype="GPString",
parameterType="Optional",
direction="Input"))
parameters[-1].filter.type = "ValueList"
parameters[-1].filter.list = sorted([key for stat in \
self.statistics for key in stat[1].keys()])
parameters[-1].value = self.default_statistic
return parameters
def isLicensed(self):
"""Set whether tool is licensed to execute."""
return True
def updateParameters(self, parameters):
"""Modify the values and properties of parameters before internal
validation is performed. This method is called whenever a parameter
has been changed."""
return
def updateMessages(self, parameters):
"""Modify the messages created by internal validation for each tool
parameter. This method is called after internal validation."""
input_parameter = parameters[0]
variable_parameter = parameters[1]
dimension_parameter = parameters[2]
output_parameter = parameters[3]
output_var_parameter = parameters[4]
type_parameter = parameters[5]
dataset = None
# Open dataset and populate variable names
if input_parameter.value is not None:
try:
dataset = mds.netcdf.Dataset(input_parameter.valueAsText, '')
except RuntimeError, exception:
if "No such file or directory" in str(exception) or \
"Invalid argument" in str(exception):
input_parameter.setErrorMessage(
mds.messages.INPUT_DATASET_DOES_NOT_RESOLVE_TO_FILENAME.format(
input_parameter.valueAsText))
elif "Malformed or inaccessible DAP DDS" in str(exception):
input_parameter.setErrorMessage(
mds.messages.INPUT_DATASET_URL_MALFORMED.format(
input_parameter.valueAsText))
else:
input_parameter.setErrorMessage(
mds.messages.INPUT_DATASET_GENERIC_ERROR.format(
input_parameter.valueAsText, str(exception)))
except Exception, exception:
input_parameter.setErrorMessage(
mds.messages.INPUT_DATASET_GENERIC_ERROR.format(
input_parameter.valueAsText, str(exception)))
if dataset is not None:
# Fill variable list
variable_parameter.filter.type = "ValueList"
variable_parameter.filter.list = list(dataset.variable_names())
else:
# Clear variable list if no input specified
variable_parameter.filter.type = "ValueList"
variable_parameter.filter.list = []
variable_parameter.value = ""
# Clear dimension list if no input specified
dimension_parameter.filter.type = "ValueList"
dimension_parameter.filter.list = []
dimension_parameter.value = ""
# Update dimension list
if (variable_parameter.value is not None) and (dataset is not None):
# Fill dimensions list
dimension_parameter.filter.type = "ValueList"
dimension_parameter.filter.list = list(
dataset.variable_dimension_names(variable_parameter.valueAsText))
else:
# Clear dimension list if no input specified
dimension_parameter.filter.type = "ValueList"
dimension_parameter.filter.list = []
dimension_parameter.value = ""
# Ensure an output variable name is entered
if (output_var_parameter.altered) and (output_var_parameter.value is None):
output_var_parameter.setErrorMessage(
'%s: Must input a variable name.' % output_var_parameter.name)
# Ensure output variable name is not the same as an existing variable's
if (output_var_parameter.value is not None) and \
(dataset is not None) and (output_var_parameter.value in \
dataset.variable_names()):
output_var_parameter.setErrorMessage(
'%s: Name cannot be the same as that of an existing variable.' \
% output_var_parameter.name)
# Populate a default output variable name and update it with changes
# to other parameters as long as the user hasn't modified it themself
if (variable_parameter.value is not None) and \
(dimension_parameter.value is not None) and \
(not output_var_parameter.altered):
if type_parameter.value is None:
output_var_parameter.value = variable_parameter.value + \
"_MEAN" + dimension_parameter.value
else:
output_var_parameter.value = variable_parameter.value + \
"_" + type_parameter.value + dimension_parameter.value
# Ensure output file has a .nc extension
if output_parameter.value is not None:
output_filename = output_parameter.valueAsText
if os.path.splitext(output_filename)[1] != ".nc":
output_parameter.setErrorMessage(
mds.messages.OUTPUT_FILE_EXTENSION_MUST_BE_NC)
return
# ---------------------------------------------------------
# Statistics
def calculate_statistic(self, variable, dimension, statistic):
# Apply statistic
for stat in self.statistics:
if statistic in stat[1]:
func = getattr(stat[0], stat[1][statistic])
break
else:
# Default
func = getattr(numpy.ma, 'mean')
return func(variable, axis=dimension)
# ---------------------------------------------------------
def execute(self, parameters, messages):
"""The source code of the tool."""
input_parameter = parameters[0]
variable_parameter = parameters[1]
dimension_parameter = parameters[2]
output_parameter = parameters[3]
output_var_parameter = parameters[4]
type_parameter = parameters[5]
dataset_name = input_parameter.valueAsText
# Open dataset
try:
dataset = mds.netcdf.Dataset(dataset_name,'')
except RuntimeError, exception:
# Handle errors not detected by updateMessages.
messages.addErrorMessage(str(exception))
raise arcpy.ExecuteError
# Variable of interest
var1 = dataset.variable(variable_parameter.valueAsText)
# Dimension of interest
dim1 = var1.dimensions.index(dimension_parameter.valueAsText)
# Perform statistic
result1 = self.calculate_statistic(var1[:], dim1, \
type_parameter.valueAsText)
# Collect output dataset information
output_dims = list(dataset.variable_dimension_names(
variable_parameter.valueAsText))
output_dims.remove(dimension_parameter.valueAsText)
output_dims = tuple(output_dims)
output_filename = output_parameter.valueAsText
output_name = output_var_parameter.valueAsText
# Create new dataset
dataset.xcopy(dataset.data_variable_names(), output_filename)
# Create new variable in dataset
with netCDF4.Dataset(output_filename, mode="a") as newdataset:
newvar = newdataset.createVariable(output_name, var1.dtype, \
output_dims)
for attribute_name in var1.ncattrs():
newvar.setncattr(attribute_name, var1.getncattr(attribute_name))
newvar[:] = result1
# Output new variable name
arcpy.SetParameter(5, output_name)
return | [
"jbayles@esri.com"
] | jbayles@esri.com |
80b3358be81f5d974d6f594abfd81b0f94056eea | 6701eae4550c7cd3d8703565c7be3a5e26248676 | /test/functional/txn_clone.py | 960fbddca9ce3e61f1041d65c503eba4502734fa | [
"MIT"
] | permissive | stochastic-thread/bootstrapping-ellocash | 91de56a330090c004af31d861e6d4cfb8d8b9e36 | 9495f1e3741c7f893457e4f6602d6ef0d84b7b3d | refs/heads/master | 2021-09-05T05:00:19.403780 | 2018-01-24T08:09:44 | 2018-01-24T08:09:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,600 | py | #!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the wallet accounts properly when there are cloned transactions with malleated scriptsigs."""
from test_framework.test_framework import EllocashTestFramework
from test_framework.util import *
class TxnMallTest(EllocashTestFramework):
def set_test_params(self):
self.num_nodes = 4
def add_options(self, parser):
parser.add_option("--mineblock", dest="mine_block", default=False, action="store_true",
help="Test double-spend of 1-confirmed transaction")
def setup_network(self):
# Start with split network:
super(TxnMallTest, self).setup_network()
disconnect_nodes(self.nodes[1], 2)
disconnect_nodes(self.nodes[2], 1)
def run_test(self):
# All nodes should start with 1,250 LOC:
starting_balance = 1250
for i in range(4):
assert_equal(self.nodes[i].getbalance(), starting_balance)
self.nodes[i].getnewaddress("") # bug workaround, coins generated assigned to first getnewaddress!
# Assign coins to foo and bar accounts:
self.nodes[0].settxfee(.001)
node0_address_foo = self.nodes[0].getnewaddress("foo")
fund_foo_txid = self.nodes[0].sendfrom("", node0_address_foo, 1219)
fund_foo_tx = self.nodes[0].gettransaction(fund_foo_txid)
node0_address_bar = self.nodes[0].getnewaddress("bar")
fund_bar_txid = self.nodes[0].sendfrom("", node0_address_bar, 29)
fund_bar_tx = self.nodes[0].gettransaction(fund_bar_txid)
assert_equal(self.nodes[0].getbalance(""),
starting_balance - 1219 - 29 + fund_foo_tx["fee"] + fund_bar_tx["fee"])
# Coins are sent to node1_address
node1_address = self.nodes[1].getnewaddress("from0")
# Send tx1, and another transaction tx2 that won't be cloned
txid1 = self.nodes[0].sendfrom("foo", node1_address, 40, 0)
txid2 = self.nodes[0].sendfrom("bar", node1_address, 20, 0)
# Construct a clone of tx1, to be malleated
rawtx1 = self.nodes[0].getrawtransaction(txid1,1)
clone_inputs = [{"txid":rawtx1["vin"][0]["txid"],"vout":rawtx1["vin"][0]["vout"]}]
clone_outputs = {rawtx1["vout"][0]["scriptPubKey"]["addresses"][0]:rawtx1["vout"][0]["value"],
rawtx1["vout"][1]["scriptPubKey"]["addresses"][0]:rawtx1["vout"][1]["value"]}
clone_locktime = rawtx1["locktime"]
clone_raw = self.nodes[0].createrawtransaction(clone_inputs, clone_outputs, clone_locktime)
# createrawtransaction randomizes the order of its outputs, so swap them if necessary.
# output 0 is at version+#inputs+input+sigstub+sequence+#outputs
# 40 LOC serialized is 00286bee00000000
pos0 = 2*(4+1+36+1+4+1)
hex40 = "00286bee00000000"
output_len = 16 + 2 + 2 * int("0x" + clone_raw[pos0 + 16 : pos0 + 16 + 2], 0)
if (rawtx1["vout"][0]["value"] == 40 and clone_raw[pos0 : pos0 + 16] != hex40 or
rawtx1["vout"][0]["value"] != 40 and clone_raw[pos0 : pos0 + 16] == hex40):
output0 = clone_raw[pos0 : pos0 + output_len]
output1 = clone_raw[pos0 + output_len : pos0 + 2 * output_len]
clone_raw = clone_raw[:pos0] + output1 + output0 + clone_raw[pos0 + 2 * output_len:]
# Use a different signature hash type to sign. This creates an equivalent but malleated clone.
# Don't send the clone anywhere yet
tx1_clone = self.nodes[0].signrawtransaction(clone_raw, None, None, "ALL|ANYONECANPAY")
assert_equal(tx1_clone["complete"], True)
# Have node0 mine a block, if requested:
if (self.options.mine_block):
self.nodes[0].generate(1)
sync_blocks(self.nodes[0:2])
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
# Node0's balance should be starting balance, plus 50LOC for another
# matured block, minus tx1 and tx2 amounts, and minus transaction fees:
expected = starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"]
if self.options.mine_block: expected += 50
expected += tx1["amount"] + tx1["fee"]
expected += tx2["amount"] + tx2["fee"]
assert_equal(self.nodes[0].getbalance(), expected)
# foo and bar accounts should be debited:
assert_equal(self.nodes[0].getbalance("foo", 0), 1219 + tx1["amount"] + tx1["fee"])
assert_equal(self.nodes[0].getbalance("bar", 0), 29 + tx2["amount"] + tx2["fee"])
if self.options.mine_block:
assert_equal(tx1["confirmations"], 1)
assert_equal(tx2["confirmations"], 1)
# Node1's "from0" balance should be both transaction amounts:
assert_equal(self.nodes[1].getbalance("from0"), -(tx1["amount"] + tx2["amount"]))
else:
assert_equal(tx1["confirmations"], 0)
assert_equal(tx2["confirmations"], 0)
# Send clone and its parent to miner
self.nodes[2].sendrawtransaction(fund_foo_tx["hex"])
txid1_clone = self.nodes[2].sendrawtransaction(tx1_clone["hex"])
# ... mine a block...
self.nodes[2].generate(1)
# Reconnect the split network, and sync chain:
connect_nodes(self.nodes[1], 2)
self.nodes[2].sendrawtransaction(fund_bar_tx["hex"])
self.nodes[2].sendrawtransaction(tx2["hex"])
self.nodes[2].generate(1) # Mine another block to make sure we sync
sync_blocks(self.nodes)
# Re-fetch transaction info:
tx1 = self.nodes[0].gettransaction(txid1)
tx1_clone = self.nodes[0].gettransaction(txid1_clone)
tx2 = self.nodes[0].gettransaction(txid2)
# Verify expected confirmations
assert_equal(tx1["confirmations"], -2)
assert_equal(tx1_clone["confirmations"], 2)
assert_equal(tx2["confirmations"], 1)
# Check node0's total balance; should be same as before the clone, + 100 LOC for 2 matured,
# less possible orphaned matured subsidy
expected += 100
if (self.options.mine_block):
expected -= 50
assert_equal(self.nodes[0].getbalance(), expected)
assert_equal(self.nodes[0].getbalance("*", 0), expected)
# Check node0's individual account balances.
# "foo" should have been debited by the equivalent clone of tx1
assert_equal(self.nodes[0].getbalance("foo"), 1219 + tx1["amount"] + tx1["fee"])
# "bar" should have been debited by (possibly unconfirmed) tx2
assert_equal(self.nodes[0].getbalance("bar", 0), 29 + tx2["amount"] + tx2["fee"])
# "" should have starting balance, less funding txes, plus subsidies
assert_equal(self.nodes[0].getbalance("", 0), starting_balance
- 1219
+ fund_foo_tx["fee"]
- 29
+ fund_bar_tx["fee"]
+ 100)
# Node1's "from0" account balance
assert_equal(self.nodes[1].getbalance("from0", 0), -(tx1["amount"] + tx2["amount"]))
if __name__ == '__main__':
TxnMallTest().main()
| [
"arthurcolle@Arthurs-MacBook-Pro.local"
] | arthurcolle@Arthurs-MacBook-Pro.local |
c7f7d61a99c4af08ce8617ff03de87e252df99b8 | fd143272c9e958fa814692454ddb4a2eced2ab92 | /S4.2.py | 8bbe73c5cbcca0a385bda40a62e76e2558d95687 | [] | no_license | taowenyin/HelloSLM | febf3c4b4ce515da27ec307f8184d3b11e43a0dd | adced132a187c0e2d708ca9858535e8cebe664b4 | refs/heads/master | 2022-05-27T09:13:59.122326 | 2020-04-26T14:01:47 | 2020-04-26T14:01:47 | 259,044,868 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,413 | py | import numpy as np
from sklearn.naive_bayes import GaussianNB, BernoulliNB, MultinomialNB
if __name__ == '__main__':
data = np.array([
[1, 'S', -1], [1, 'M', -1], [1, 'M', 1], [1, 'S', 1], [1, 'S', -1],
[2, 'S', -1], [2, 'M', -1], [2, 'M', 1], [2, 'L', 1], [2, 'L', 1],
[3, 'L', 1], [3, 'M', 1], [3, 'M', 1], [3, 'L', 1], [3, 'L', -1],
])
# 数据
train_x = data[:, [0, 1]]
# 标签
train_y = data[:, 2].astype(np.int)
# 批量替换
train_x[train_x == 'S'] = 4
train_x[train_x == 'M'] = 5
train_x[train_x == 'L'] = 6
# 类型转换
train_x = train_x.astype(np.int)
# 测试数据
x = np.array([[2, 'S']])
x[x == 'S'] = 4
x = x.astype(np.int)
# 高斯朴素贝叶斯对象
gaussianNB = GaussianNB()
gaussianNB.fit(train_x, train_y)
print('Gaussian Test X = ', gaussianNB.predict(x))
print('Gaussian Test X = ', gaussianNB.predict_proba(x))
# 伯努利朴素贝叶斯
bernoulliNB = BernoulliNB()
bernoulliNB.fit(train_x, train_y)
print('Bernoulli Test X = ', bernoulliNB.predict(x))
print('Bernoulli Test X = ', bernoulliNB.predict_proba(x))
# 多项式朴素贝叶斯
multinomialNB = MultinomialNB()
multinomialNB.fit(train_x, train_y)
print('Multinomial Test X = ', multinomialNB.predict(x))
print('Multinomial Test X = ', multinomialNB.predict_proba(x))
| [
"wenyin.tao@163.com"
] | wenyin.tao@163.com |
bd374ed841b18e22b1108b9e8b2c12dac786d446 | 971e0efcc68b8f7cfb1040c38008426f7bcf9d2e | /tests/artificial/transf_Anscombe/trend_MovingMedian/cycle_30/ar_12/test_artificial_128_Anscombe_MovingMedian_30_12_100.py | ccc8235516ad5f149b1dacaabb8d05d4860cb57f | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | antoinecarme/pyaf | a105d172c2e7544f8d580d75f28b751351dd83b6 | b12db77cb3fa9292e774b2b33db8ce732647c35e | refs/heads/master | 2023-09-01T09:30:59.967219 | 2023-07-28T20:15:53 | 2023-07-28T20:15:53 | 70,790,978 | 457 | 77 | BSD-3-Clause | 2023-03-08T21:45:40 | 2016-10-13T09:30:30 | Python | UTF-8 | Python | false | false | 269 | py | import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "MovingMedian", cycle_length = 30, transform = "Anscombe", sigma = 0.0, exog_count = 100, ar_order = 12); | [
"antoine.carme@laposte.net"
] | antoine.carme@laposte.net |
a98dbf4c9d9e1172ec1e655a3091b12b5af3dea1 | 795cdbb2192e069a808e59d9a8c098637e738ccb | /app/upload_file.py | 785974f435d3b9b36c82957c375bf85a7a246e8c | [
"MIT"
] | permissive | terrainthesky-hub/Human_Rights_Asylum_Seekers | ccc09b72caac0e4684b04a7bc10b7699d3e2833e | 5f2715c0fc4d2b48922d37527fa3860a20e9c72d | refs/heads/main | 2023-06-22T20:50:07.487285 | 2021-07-12T22:26:39 | 2021-07-12T22:26:39 | 385,399,231 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,692 | py | # All Required Imports
from fastapi import APIRouter
from fastapi import FastAPI, File, UploadFile
import pandas
import urllib.request
import logging
import boto3
from botocore.config import Config
import shutil
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import Callable
from botocore.exceptions import ClientError
import sqlalchemy
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from dotenv import load_dotenv
import os
import aiofile
from starlette.responses import FileResponse
from fnmatch import fnmatch
# loads secret credentials
load_dotenv()
# Connect POST request ot the api routor
router = APIRouter()
# parse case_url and scrape relivent data off of it
def case_urls(str):
# case url data for web to no name of document on S3 buckets to view
case_url = str
index = str[-19:-4].find('-')
hearing_date = str[(len(str)-19):-4]
hearing_date = hearing_date[index+1:]
decision_date = hearing_date
index = str.find('-')
indexend = str.find(hearing_date)
a = str[indexend-8:indexend-1].find('-') +1
str[indexend-8+a:indexend-1]
department=str[indexend-8+a:indexend-1]
b = str.find(department)
c=str[:b].find('-')+1
urlforloop = str[c:indexend-9+a]
l = []
for i in range(7,len(urlforloop)):
if str[i:i+1].find('-') == -1 and str[i+2:i+3].isnumeric():
l.append(i)
h= min(l) - 10
case_id = urlforloop[h:]
t = urlforloop.find(case_id)
refugee = urlforloop[:t+1]
return case_id, case_url,hearing_date,decision_date,department,refugee
def save_upload_file(upload_file: UploadFile, destination: Path) -> None:
try:
with destination.open("wb") as buffer:
shutil.copyfileobj(upload_file.file, buffer)
finally:
upload_file.file.close()
def save_upload_file_tmp(upload_file: UploadFile) -> Path:
try:
suffix = Path(upload_file.filename).suffix
with NamedTemporaryFile(delete=False, suffix=suffix) as tmp:
shutil.copyfileobj(upload_file.file, tmp)
tmp_path = Path(tmp.name)
finally:
upload_file.file.close()
return tmp_path
def handle_upload_file(
upload_file: UploadFile, handler: Callable[[Path], None]
) -> None:
tmp_path = save_upload_file_tmp(upload_file)
try:
handler(tmp_path) # Do something with the saved temp file
finally:
tmp_path.unlink() # Delete the temp file
# file uploaders
@router.post("/upload/pdf")
async def pdf(file: UploadFile = File(...)):
filename = file.filename
if len(filename) >= 1:
# add these varibles to table's scrapes all the data we need from initional upload
case_id, case_url,hearing_date, decision_date, department, refugee = case_urls(file.filename)
# helper functions to handle file correctly
#save_upload_file_tmp(file)
#handle_upload_file(file, tmp_path)
# Uploads File to S3 and downloads to scipts folder
path = 'app/'+file.filename
key = os.getenv('access_key')
secret_access_key = os.getenv('secret_access_key')
with open(path, 'wb') as file_object:
shutil.copyfileobj(file.file, file_object)
s3 = boto3.resource(
's3',
aws_access_key_id = key,
aws_secret_access_key = secret_access_key,
config = Config(signature_version ='s3v4')
)
data = open(path, 'rb')
s3.Bucket('hrf-asylum-dsa-documents').put_object(Key='pdf/'+file.filename, Body=data)
# scipts to scrape pdf into free text amd get judge name
# scipt to delete pdf after being scraped
for dirpath, dirnames, filenames in os.walk(os.curdir):
for file in filenames:
if fnmatch(file, '*.pdf'):
os.remove(os.path.join(dirpath, file))
return {"filename": case_url,
"case_id" : case_id,
"case_url" : case_url,
"hearing_date" : hearing_date,
"decision_date" : decision_date,
"department": department,
"refugee": refugee,
"s3": "Viewable"}
# deals with data from csv
def csv_data(df):
return ""
# This route is not working yet, so don't include it
# @router.post("/upload/file")
async def not_pdf(file: UploadFile = File(...)):
#if len(file.filename) >= 1:
# add these varibles to table's
#df = pd.read_csv(file)
#varibles = csv_data(df)
return {"filename": file.filename}
# This route is not working yet, so don't include it
# @router.post("/connect/db")
async def get_db() -> sqlalchemy.engine.base.Connection:
"""Get a SQLAlchemy database connection.
grab this from group b due to are database not working and nobody connect the scipts and tables together
Uses this environment variable if it exists:
DATABASE_URL=dialect://user:password@host/dbname
Otherwise uses a SQLite database for initial local development.
"""
database_url = os.getenv('DATABASE_URL')
engine = sqlalchemy.create_engine(database_url)
connection = engine.connect()
session_local = sessionmaker(autocommit=False, autoflush=False, bind=engine)()
try:
yield connection
finally:
connection.close()
# the postgres database has never work yet
# load_dotenv()
# database_url = os.getenv('DATABASE_URL')
# engine = sqlalchemy.create_engine(database_url, pool_pre_ping=True)
# connection = engine.connect()
# session_local = sessionmaker(autocommit=False, autoflush=False, bind=engine)()
Base = declarative_base() | [
"lesley.t.rich@gmail.com"
] | lesley.t.rich@gmail.com |
b4fbc50150b419753f6d6e59d82190c93aac1e3f | ef31e4986b524f73a1cc7300327f28d0e7fd1960 | /venv/Scripts/pyi-makespec-script.py | b5f8e8f7834bff66fba68d49892f0987cf07502d | [] | no_license | aurvandel/pdfcombine | a5348150ef8f5649c09c689f5f5966a045cd49b5 | 7900faa2c823005adc46ee1f434cebf575632d24 | refs/heads/master | 2023-04-15T08:13:26.170775 | 2019-06-05T16:22:51 | 2019-06-05T16:22:51 | 362,185,595 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 450 | py | #!C:\Users\pwatkin1\PycharmProjects\pdfcombine\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'PyInstaller==3.4','console_scripts','pyi-makespec'
__requires__ = 'PyInstaller==3.4'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('PyInstaller==3.4', 'console_scripts', 'pyi-makespec')()
)
| [
"parkergw@gmail.com"
] | parkergw@gmail.com |
f0c8e4a7e7eedd40041bc507e96e9ebd1d7c55c0 | 3e713a67f370d1cc1ba0882159a03b673bd22f9a | /DataStructure and Alogorithms/[HACKERRANK]-cats and a mouse .py | d1edad7dc3eaf287f6fbb70ca5520a5f5a091571 | [] | no_license | s-abhishek2399/competitive-progamming--PYTHON | 739797ffea0b92cc2781559e7d4eed1d274678a6 | 29f9e63cfc05c01fa605c14fb8a3a55920296d43 | refs/heads/master | 2023-03-08T02:40:00.962109 | 2021-02-16T15:07:52 | 2021-02-16T15:07:52 | 328,732,345 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 239 | py | n = int(input())
for i in range(n):
l=[int(x) for x in input().split()]
a = l[0]-l[2]
b = l[1]-l[2]
if abs(a)<abs(b):
print("Cat A")
elif abs(b)<abs(a):
print("Cat B")
else:
print("Mouse C")
| [
"s.abhishek2399@gmail.com"
] | s.abhishek2399@gmail.com |
ed3c4924a869f8d4d77bcbdfcb1d637c1d9150bf | 40c94870c9bbc5d3ca5fccccbbe648f17167be3e | /additional script/Dodelson3.4.py | cf7e2638b65c9f8b9d65438aabcfea16f5d0a53b | [] | no_license | zsw6666/ASTRO_script | 62b9cfef756723a7a5ba3d16fb0205eb7c5eb725 | 89de215edb86b516781192b349eddf15a55114b1 | refs/heads/version1.0 | 2020-04-16T12:31:09.935061 | 2019-10-16T11:41:39 | 2019-10-16T11:41:39 | 165,577,794 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,150 | py | import numpy as np
import astropy.constants as const
import astropy.units as u
from matplotlib import pyplot as plt
def lamda(x):
s=(255/(886.7*(x**5)))*(12.+6.*x+x**2)
return s
def H(x):
Q=((1.293*u.MeV).to(u.J)).value
g_star=10.75
s=(2*np.pi/3)*np.sqrt((g_star*np.pi*const.G.value)/(5.))*((Q**2)/(x**2))
return s
def Mu(x):
tao=886.7
Q=((1.293*u.MeV).to(u.J)).value
s1=(-255./(tao*Q))*(4*(np.pi**3)*const.G.value*(Q**2)*10.75/45.)**(-0.5)
s2=(4/x**3)+(3/x**2)+(1/x)+np.exp(-x)*((4/x**3)+(3/x**2))
return s1*s2
def Intfunc(x,x0):
s1=(lamda(x)*np.exp(-x))/(x*H(x))
s2=np.exp(Mu(x)-Mu(x0))
return s1*s2
def Integrator(func,interval,x0):
x=np.linspace(interval[0],interval[1],1000)
deta=x[1]-x[0]
y=Intfunc(x,x0)
s=np.sum(y*deta)
return s
def Kai_n(x):
s=Integrator(Intfunc,[0.1,x],x)
return s
def Run():
xlist=np.linspace(1,1000,1000)
ylist=[]
for x in xlist:
ylist.append(Kai_n(x))
ylist=np.array(ylist)
plt.plot(xlist,ylist)
plt.yscale('log')
plt.xlabel(r'$x$')
plt.ylabel(r'$X_{n}$')
plt.show()
return None
Run()
| [
"zsw18@mails.tsinghua.edu.cn"
] | zsw18@mails.tsinghua.edu.cn |
00e199a0bfedc87514bce6f71c530ade6ab7313b | 299355248bb243427667e8192e93f378907150b9 | /envs/robots/minitaur.py | 81915b6897aa89b6de918f27ad285fe02c415e07 | [
"MIT"
] | permissive | FrankTianTT/pytorch_sac | d35f60579a31e0063ffbdba44e13db16de2e38ae | 770a632b25dc00419faef3ba672eb4982f950d2a | refs/heads/master | 2022-11-24T12:48:23.181504 | 2020-07-31T15:30:11 | 2020-07-31T15:30:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 52,723 | py | # coding=utf-8
# Copyright 2020 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This file implements the functionalities of a minitaur using pybullet."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import copy
import math
import re
import numpy as np
from robots import minitaur_constants
from robots import minitaur_motor
from robots import robot_config
from robots import action_filter
from robots import kinematics
INIT_POSITION = [0, 0, .2]
INIT_RACK_POSITION = [0, 0, 1]
INIT_ORIENTATION = [0, 0, 0, 1]
KNEE_CONSTRAINT_POINT_RIGHT = [0, 0.005, 0.2]
KNEE_CONSTRAINT_POINT_LEFT = [0, 0.01, 0.2]
OVERHEAT_SHUTDOWN_TORQUE = 2.45
OVERHEAT_SHUTDOWN_TIME = 1.0
LEG_POSITION = ["front_left", "back_left", "front_right", "back_right"]
MOTOR_NAMES = [
"motor_front_leftL_joint", "motor_front_leftR_joint",
"motor_back_leftL_joint", "motor_back_leftR_joint",
"motor_front_rightL_joint", "motor_front_rightR_joint",
"motor_back_rightL_joint", "motor_back_rightR_joint"
]
_CHASSIS_NAME_PATTERN = re.compile(r"chassis\D*center")
_MOTOR_NAME_PATTERN = re.compile(r"motor\D*joint")
_KNEE_NAME_PATTERN = re.compile(r"knee\D*")
_BRACKET_NAME_PATTERN = re.compile(r"motor\D*_bracket_joint")
_LEG_NAME_PATTERN1 = re.compile(r"hip\D*joint")
_LEG_NAME_PATTERN2 = re.compile(r"hip\D*link")
_LEG_NAME_PATTERN3 = re.compile(r"motor\D*link")
SENSOR_NOISE_STDDEV = (0.0, 0.0, 0.0, 0.0, 0.0)
MINITAUR_DEFAULT_MOTOR_DIRECTIONS = (-1, -1, -1, -1, 1, 1, 1, 1)
MINITAUR_DEFAULT_MOTOR_OFFSETS = (0, 0, 0, 0, 0, 0, 0, 0)
MINITAUR_NUM_MOTORS = 8
TWO_PI = 2 * math.pi
MINITAUR_DOFS_PER_LEG = 2
def MapToMinusPiToPi(angles):
"""Maps a list of angles to [-pi, pi].
Args:
angles: A list of angles in rad.
Returns:
A list of angle mapped to [-pi, pi].
"""
mapped_angles = copy.deepcopy(angles)
for i in range(len(angles)):
mapped_angles[i] = math.fmod(angles[i], TWO_PI)
if mapped_angles[i] >= math.pi:
mapped_angles[i] -= TWO_PI
elif mapped_angles[i] < -math.pi:
mapped_angles[i] += TWO_PI
return mapped_angles
class Minitaur(object):
"""The minitaur class that simulates a quadruped robot from Ghost Robotics."""
def __init__(self,
pybullet_client,
num_motors=MINITAUR_NUM_MOTORS,
dofs_per_leg=MINITAUR_DOFS_PER_LEG,
time_step=0.01,
action_repeat=1,
self_collision_enabled=False,
motor_control_mode=robot_config.MotorControlMode.POSITION,
motor_model_class=minitaur_motor.MotorModel,
motor_kp=1.0,
motor_kd=0.02,
motor_torque_limits=None,
pd_latency=0.0,
control_latency=0.0,
observation_noise_stdev=SENSOR_NOISE_STDDEV,
motor_overheat_protection=False,
motor_direction=MINITAUR_DEFAULT_MOTOR_DIRECTIONS,
motor_offset=MINITAUR_DEFAULT_MOTOR_OFFSETS,
on_rack=False,
reset_at_current_position=False,
sensors=None,
enable_action_interpolation=False,
enable_action_filter=False):
"""Constructs a minitaur and reset it to the initial states.
Args:
pybullet_client: The instance of BulletClient to manage different
simulations.
num_motors: The number of the motors on the robot.
dofs_per_leg: The number of degrees of freedom for each leg.
time_step: The time step of the simulation.
action_repeat: The number of ApplyAction() for each control step.
self_collision_enabled: Whether to enable self collision.
motor_control_mode: Enum. Can either be POSITION, TORQUE, or HYBRID.
motor_model_class: We can choose from simple pd model to more accureate DC
motor models.
motor_kp: proportional gain for the motors.
motor_kd: derivative gain for the motors.
motor_torque_limits: Torque limits for the motors. Can be a single float
or a list of floats specifying different limits for different robots. If
not provided, the default limit of the robot is used.
pd_latency: The latency of the observations (in seconds) used to calculate
PD control. On the real hardware, it is the latency between the
microcontroller and the motor controller.
control_latency: The latency of the observations (in second) used to
calculate action. On the real hardware, it is the latency from the motor
controller, the microcontroller to the host (Nvidia TX2).
observation_noise_stdev: The standard deviation of a Gaussian noise model
for the sensor. It should be an array for separate sensors in the
following order [motor_angle, motor_velocity, motor_torque,
base_roll_pitch_yaw, base_angular_velocity]
motor_overheat_protection: Whether to shutdown the motor that has exerted
large torque (OVERHEAT_SHUTDOWN_TORQUE) for an extended amount of time
(OVERHEAT_SHUTDOWN_TIME). See ApplyAction() in minitaur.py for more
details.
motor_direction: A list of direction values, either 1 or -1, to compensate
the axis difference of motors between the simulation and the real robot.
motor_offset: A list of offset value for the motor angles. This is used to
compensate the angle difference between the simulation and the real
robot.
on_rack: Whether to place the minitaur on rack. This is only used to debug
the walking gait. In this mode, the minitaur's base is hanged midair so
that its walking gait is clearer to visualize.
reset_at_current_position: Whether to reset the minitaur at the current
position and orientation. This is for simulating the reset behavior in
the real world.
sensors: a list of sensors that are attached to the robot.
enable_action_interpolation: Whether to interpolate the current action
with the previous action in order to produce smoother motions
enable_action_filter: Boolean specifying if a lowpass filter should be
used to smooth actions.
"""
self.num_motors = num_motors
self.num_legs = self.num_motors // dofs_per_leg
self._pybullet_client = pybullet_client
self._action_repeat = action_repeat
self._self_collision_enabled = self_collision_enabled
self._motor_direction = motor_direction
self._motor_offset = motor_offset
self._observed_motor_torques = np.zeros(self.num_motors)
self._applied_motor_torques = np.zeros(self.num_motors)
self._max_force = 3.5
self._pd_latency = pd_latency
self._control_latency = control_latency
self._observation_noise_stdev = observation_noise_stdev
self._observation_history = collections.deque(maxlen=100)
self._control_observation = []
self._chassis_link_ids = [-1]
self._leg_link_ids = []
self._motor_link_ids = []
self._foot_link_ids = []
self._motor_overheat_protection = motor_overheat_protection
self._on_rack = on_rack
self._reset_at_current_position = reset_at_current_position
self.SetAllSensors(sensors if sensors is not None else list())
self._is_safe = True
self._enable_action_interpolation = enable_action_interpolation
self._enable_action_filter = enable_action_filter
self._last_action = None
if not motor_model_class:
raise ValueError("Must provide a motor model class!")
if self._on_rack and self._reset_at_current_position:
raise ValueError("on_rack and reset_at_current_position "
"cannot be enabled together")
if isinstance(motor_kp, (collections.Sequence, np.ndarray)):
self._motor_kps = np.asarray(motor_kp)
else:
self._motor_kps = np.full(num_motors, motor_kp)
if isinstance(motor_kd, (collections.Sequence, np.ndarray)):
self._motor_kds = np.asarray(motor_kd)
else:
self._motor_kds = np.full(num_motors, motor_kd)
if isinstance(motor_torque_limits, (collections.Sequence, np.ndarray)):
self._motor_torque_limits = np.asarray(motor_torque_limits)
elif motor_torque_limits is None:
self._motor_torque_limits = None
else:
self._motor_torque_limits = motor_torque_limits
self._motor_control_mode = motor_control_mode
self._motor_model = motor_model_class(
kp=motor_kp,
kd=motor_kd,
torque_limits=self._motor_torque_limits,
motor_control_mode=motor_control_mode)
self.time_step = time_step
self._step_counter = 0
# This also includes the time spent during the Reset motion.
self._state_action_counter = 0
_, self._init_orientation_inv = self._pybullet_client.invertTransform(
position=[0, 0, 0], orientation=self._GetDefaultInitOrientation())
if self._enable_action_filter:
self._action_filter = self._BuildActionFilter()
# reset_time=-1.0 means skipping the reset motion.
# See Reset for more details.
self.Reset(reset_time=-1.0)
self.ReceiveObservation()
return
def GetTimeSinceReset(self):
return self._step_counter * self.time_step
def _StepInternal(self, action, motor_control_mode=None):
self.ApplyAction(action, motor_control_mode)
self._pybullet_client.stepSimulation()
self.ReceiveObservation()
self._state_action_counter += 1
return
def Step(self, action):
"""Steps simulation."""
if self._enable_action_filter:
action = self._FilterAction(action)
for i in range(self._action_repeat):
proc_action = self.ProcessAction(action, i)
self._StepInternal(proc_action)
self._step_counter += 1
self._last_action = action
return
def Terminate(self):
pass
def GetFootLinkIDs(self):
"""Get list of IDs for all foot links."""
return self._foot_link_ids
def _RecordMassInfoFromURDF(self):
"""Records the mass information from the URDF file."""
self._base_mass_urdf = []
for chassis_id in self._chassis_link_ids:
self._base_mass_urdf.append(
self._pybullet_client.getDynamicsInfo(self.quadruped, chassis_id)[0])
self._leg_masses_urdf = []
for leg_id in self._leg_link_ids:
self._leg_masses_urdf.append(
self._pybullet_client.getDynamicsInfo(self.quadruped, leg_id)[0])
for motor_id in self._motor_link_ids:
self._leg_masses_urdf.append(
self._pybullet_client.getDynamicsInfo(self.quadruped, motor_id)[0])
def _RecordInertiaInfoFromURDF(self):
"""Record the inertia of each body from URDF file."""
self._link_urdf = []
num_bodies = self._pybullet_client.getNumJoints(self.quadruped)
for body_id in range(-1, num_bodies): # -1 is for the base link.
inertia = self._pybullet_client.getDynamicsInfo(self.quadruped,
body_id)[2]
self._link_urdf.append(inertia)
# We need to use id+1 to index self._link_urdf because it has the base
# (index = -1) at the first element.
self._base_inertia_urdf = [
self._link_urdf[chassis_id + 1] for chassis_id in self._chassis_link_ids
]
self._leg_inertia_urdf = [
self._link_urdf[leg_id + 1] for leg_id in self._leg_link_ids
]
self._leg_inertia_urdf.extend(
[self._link_urdf[motor_id + 1] for motor_id in self._motor_link_ids])
def _BuildJointNameToIdDict(self):
num_joints = self._pybullet_client.getNumJoints(self.quadruped)
self._joint_name_to_id = {}
for i in range(num_joints):
joint_info = self._pybullet_client.getJointInfo(self.quadruped, i)
self._joint_name_to_id[joint_info[1].decode("UTF-8")] = joint_info[0]
def _BuildUrdfIds(self):
"""Build the link Ids from its name in the URDF file.
Raises:
ValueError: Unknown category of the joint name.
"""
num_joints = self._pybullet_client.getNumJoints(self.quadruped)
self._chassis_link_ids = [-1]
# The self._leg_link_ids include both the upper and lower links of the leg.
self._leg_link_ids = []
self._motor_link_ids = []
self._foot_link_ids = []
self._bracket_link_ids = []
for i in range(num_joints):
joint_info = self._pybullet_client.getJointInfo(self.quadruped, i)
joint_name = joint_info[1].decode("UTF-8")
joint_id = self._joint_name_to_id[joint_name]
if _CHASSIS_NAME_PATTERN.match(joint_name):
self._chassis_link_ids.append(joint_id)
elif _BRACKET_NAME_PATTERN.match(joint_name):
self._bracket_link_ids.append(joint_id)
elif _MOTOR_NAME_PATTERN.match(joint_name):
self._motor_link_ids.append(joint_id)
elif _KNEE_NAME_PATTERN.match(joint_name):
self._foot_link_ids.append(joint_id)
elif (_LEG_NAME_PATTERN1.match(joint_name) or
_LEG_NAME_PATTERN2.match(joint_name) or
_LEG_NAME_PATTERN3.match(joint_name)):
self._leg_link_ids.append(joint_id)
else:
raise ValueError("Unknown category of joint %s" % joint_name)
self._leg_link_ids.extend(self._foot_link_ids)
self._chassis_link_ids.sort()
self._motor_link_ids.sort()
self._foot_link_ids.sort()
self._leg_link_ids.sort()
self._bracket_link_ids.sort()
def _RemoveDefaultJointDamping(self):
num_joints = self._pybullet_client.getNumJoints(self.quadruped)
for i in range(num_joints):
joint_info = self._pybullet_client.getJointInfo(self.quadruped, i)
self._pybullet_client.changeDynamics(
joint_info[0], -1, linearDamping=0, angularDamping=0)
def _BuildMotorIdList(self):
self._motor_id_list = [
self._joint_name_to_id[motor_name]
for motor_name in self._GetMotorNames()
]
def _CreateRackConstraint(self, init_position, init_orientation):
"""Create a constraint that keeps the chassis at a fixed frame.
This frame is defined by init_position and init_orientation.
Args:
init_position: initial position of the fixed frame.
init_orientation: initial orientation of the fixed frame in quaternion
format [x,y,z,w].
Returns:
Return the constraint id.
"""
fixed_constraint = self._pybullet_client.createConstraint(
parentBodyUniqueId=self.quadruped,
parentLinkIndex=-1,
childBodyUniqueId=-1,
childLinkIndex=-1,
jointType=self._pybullet_client.JOINT_FIXED,
jointAxis=[0, 0, 0],
parentFramePosition=[0, 0, 0],
childFramePosition=init_position,
childFrameOrientation=init_orientation)
return fixed_constraint
def IsObservationValid(self):
"""Whether the observation is valid for the current time step.
In simulation, observations are always valid. In real hardware, it may not
be valid from time to time when communication error happens between the
Nvidia TX2 and the microcontroller.
Returns:
Whether the observation is valid for the current time step.
"""
return True
def Reset(self, reload_urdf=True, default_motor_angles=None, reset_time=3.0):
"""Reset the minitaur to its initial states.
Args:
reload_urdf: Whether to reload the urdf file. If not, Reset() just place
the minitaur back to its starting position.
default_motor_angles: The default motor angles. If it is None, minitaur
will hold a default pose (motor angle math.pi / 2) for 100 steps. In
torque control mode, the phase of holding the default pose is skipped.
reset_time: The duration (in seconds) to hold the default motor angles. If
reset_time <= 0 or in torque control mode, the phase of holding the
default pose is skipped.
"""
if reload_urdf:
self._LoadRobotURDF()
if self._on_rack:
self.rack_constraint = (
self._CreateRackConstraint(self._GetDefaultInitPosition(),
self._GetDefaultInitOrientation()))
self._BuildJointNameToIdDict()
self._BuildUrdfIds()
self._RemoveDefaultJointDamping()
self._BuildMotorIdList()
self._RecordMassInfoFromURDF()
self._RecordInertiaInfoFromURDF()
self.ResetPose(add_constraint=True)
else:
self._pybullet_client.resetBasePositionAndOrientation(
self.quadruped, self._GetDefaultInitPosition(),
self._GetDefaultInitOrientation())
self._pybullet_client.resetBaseVelocity(self.quadruped, [0, 0, 0],
[0, 0, 0])
self.ResetPose(add_constraint=False)
self._overheat_counter = np.zeros(self.num_motors)
self._motor_enabled_list = [True] * self.num_motors
self._observation_history.clear()
self._step_counter = 0
self._state_action_counter = 0
self._is_safe = True
self._last_action = None
self._SettleDownForReset(default_motor_angles, reset_time)
if self._enable_action_filter:
self._ResetActionFilter()
return
def _LoadRobotURDF(self):
"""Loads the URDF file for the robot."""
urdf_file = self.GetURDFFile()
if self._self_collision_enabled:
self.quadruped = self._pybullet_client.loadURDF(
urdf_file,
self._GetDefaultInitPosition(),
self._GetDefaultInitOrientation(),
flags=self._pybullet_client.URDF_USE_SELF_COLLISION)
else:
self.quadruped = self._pybullet_client.loadURDF(
urdf_file, self._GetDefaultInitPosition(),
self._GetDefaultInitOrientation())
def _SettleDownForReset(self, default_motor_angles, reset_time):
"""Sets the default motor angles and waits for the robot to settle down.
The reset is skipped is reset_time is less than zereo.
Args:
default_motor_angles: A list of motor angles that the robot will achieve
at the end of the reset phase.
reset_time: The time duration for the reset phase.
"""
if reset_time <= 0:
return
# Important to fill the observation buffer.
self.ReceiveObservation()
for _ in range(100):
self._StepInternal(
[math.pi / 2] * self.num_motors,
motor_control_mode=robot_config.MotorControlMode.POSITION)
# Don't continue to reset if a safety error has occurred.
if not self._is_safe:
return
if default_motor_angles is None:
return
num_steps_to_reset = int(reset_time / self.time_step)
for _ in range(num_steps_to_reset):
self._StepInternal(
default_motor_angles,
motor_control_mode=robot_config.MotorControlMode.POSITION)
# Don't continue to reset if a safety error has occurred.
if not self._is_safe:
return
def _SetMotorTorqueById(self, motor_id, torque):
self._pybullet_client.setJointMotorControl2(
bodyIndex=self.quadruped,
jointIndex=motor_id,
controlMode=self._pybullet_client.TORQUE_CONTROL,
force=torque)
def _SetMotorTorqueByIds(self, motor_ids, torques):
self._pybullet_client.setJointMotorControlArray(
bodyIndex=self.quadruped,
jointIndices=motor_ids,
controlMode=self._pybullet_client.TORQUE_CONTROL,
forces=torques)
def _SetDesiredMotorAngleByName(self, motor_name, desired_angle):
self._SetDesiredMotorAngleById(self._joint_name_to_id[motor_name],
desired_angle)
def GetURDFFile(self):
return None
def ResetPose(self, add_constraint):
"""Reset the pose of the minitaur.
Args:
add_constraint: Whether to add a constraint at the joints of two feet.
"""
for i in range(self.num_legs):
self._ResetPoseForLeg(i, add_constraint)
def _ResetPoseForLeg(self, leg_id, add_constraint):
"""Reset the initial pose for the leg.
Args:
leg_id: It should be 0, 1, 2, or 3, which represents the leg at
front_left, back_left, front_right and back_right.
add_constraint: Whether to add a constraint at the joints of two feet.
"""
knee_friction_force = 0
half_pi = math.pi / 2.0
knee_angle = -2.1834
leg_position = LEG_POSITION[leg_id]
self._pybullet_client.resetJointState(
self.quadruped,
self._joint_name_to_id["motor_" + leg_position + "L_joint"],
self._motor_direction[2 * leg_id] * half_pi,
targetVelocity=0)
self._pybullet_client.resetJointState(
self.quadruped,
self._joint_name_to_id["knee_" + leg_position + "L_link"],
self._motor_direction[2 * leg_id] * knee_angle,
targetVelocity=0)
self._pybullet_client.resetJointState(
self.quadruped,
self._joint_name_to_id["motor_" + leg_position + "R_joint"],
self._motor_direction[2 * leg_id + 1] * half_pi,
targetVelocity=0)
self._pybullet_client.resetJointState(
self.quadruped,
self._joint_name_to_id["knee_" + leg_position + "R_link"],
self._motor_direction[2 * leg_id + 1] * knee_angle,
targetVelocity=0)
if add_constraint:
self._pybullet_client.createConstraint(
self.quadruped,
self._joint_name_to_id["knee_" + leg_position + "R_link"],
self.quadruped,
self._joint_name_to_id["knee_" + leg_position + "L_link"],
self._pybullet_client.JOINT_POINT2POINT, [0, 0, 0],
KNEE_CONSTRAINT_POINT_RIGHT, KNEE_CONSTRAINT_POINT_LEFT)
# Disable the default motor in pybullet.
self._pybullet_client.setJointMotorControl2(
bodyIndex=self.quadruped,
jointIndex=(self._joint_name_to_id["motor_" + leg_position +
"L_joint"]),
controlMode=self._pybullet_client.VELOCITY_CONTROL,
targetVelocity=0,
force=knee_friction_force)
self._pybullet_client.setJointMotorControl2(
bodyIndex=self.quadruped,
jointIndex=(self._joint_name_to_id["motor_" + leg_position +
"R_joint"]),
controlMode=self._pybullet_client.VELOCITY_CONTROL,
targetVelocity=0,
force=knee_friction_force)
self._pybullet_client.setJointMotorControl2(
bodyIndex=self.quadruped,
jointIndex=(self._joint_name_to_id["knee_" + leg_position + "L_link"]),
controlMode=self._pybullet_client.VELOCITY_CONTROL,
targetVelocity=0,
force=knee_friction_force)
self._pybullet_client.setJointMotorControl2(
bodyIndex=self.quadruped,
jointIndex=(self._joint_name_to_id["knee_" + leg_position + "R_link"]),
controlMode=self._pybullet_client.VELOCITY_CONTROL,
targetVelocity=0,
force=knee_friction_force)
def GetBasePosition(self):
"""Get the position of minitaur's base.
Returns:
The position of minitaur's base.
"""
return self._base_position
def GetBaseVelocity(self):
"""Get the linear velocity of minitaur's base.
Returns:
The velocity of minitaur's base.
"""
velocity, _ = self._pybullet_client.getBaseVelocity(self.quadruped)
return velocity
def GetTrueBaseRollPitchYaw(self):
"""Get minitaur's base orientation in euler angle in the world frame.
Returns:
A tuple (roll, pitch, yaw) of the base in world frame.
"""
orientation = self.GetTrueBaseOrientation()
roll_pitch_yaw = self._pybullet_client.getEulerFromQuaternion(orientation)
return np.asarray(roll_pitch_yaw)
def GetBaseRollPitchYaw(self):
"""Get minitaur's base orientation in euler angle in the world frame.
This function mimicks the noisy sensor reading and adds latency.
Returns:
A tuple (roll, pitch, yaw) of the base in world frame polluted by noise
and latency.
"""
delayed_orientation = np.array(
self._control_observation[3 * self.num_motors:3 * self.num_motors + 4])
delayed_roll_pitch_yaw = self._pybullet_client.getEulerFromQuaternion(
delayed_orientation)
roll_pitch_yaw = self._AddSensorNoise(
np.array(delayed_roll_pitch_yaw), self._observation_noise_stdev[3])
return roll_pitch_yaw
def GetHipPositionsInBaseFrame(self):
"""Get the hip joint positions of the robot within its base frame."""
raise NotImplementedError("Not implemented for Minitaur.")
def ComputeMotorAnglesFromFootLocalPosition(self, leg_id,
foot_local_position):
"""Use IK to compute the motor angles, given the foot link's local position.
Args:
leg_id: The leg index.
foot_local_position: The foot link's position in the base frame.
Returns:
A tuple. The position indices and the angles for all joints along the
leg. The position indices is consistent with the joint orders as returned
by GetMotorAngles API.
"""
assert len(self._foot_link_ids) == self.num_legs
toe_id = self._foot_link_ids[leg_id]
motors_per_leg = self.num_motors // self.num_legs
joint_position_idxs = [
i for i in range(leg_id * motors_per_leg, leg_id * motors_per_leg +
motors_per_leg)
]
joint_angles = kinematics.joint_angles_from_link_position(
robot=self,
link_position=foot_local_position,
link_id=toe_id,
joint_ids=joint_position_idxs,
)
# Joint offset is necessary for Laikago.
joint_angles = np.multiply(
np.asarray(joint_angles) -
np.asarray(self._motor_offset)[joint_position_idxs],
self._motor_direction[joint_position_idxs])
# Return the joing index (the same as when calling GetMotorAngles) as well
# as the angles.
return joint_position_idxs, joint_angles.tolist()
def ComputeJacobian(self, leg_id):
"""Compute the Jacobian for a given leg."""
# Does not work for Minitaur which has the four bar mechanism for now.
assert len(self._foot_link_ids) == self.num_legs
return kinematics.compute_jacobian(
robot=self,
link_id=self._foot_link_ids[leg_id],
)
def MapContactForceToJointTorques(self, leg_id, contact_force):
"""Maps the foot contact force to the leg joint torques."""
jv = self.ComputeJacobian(leg_id)
all_motor_torques = np.matmul(contact_force, jv)
motor_torques = {}
motors_per_leg = self.num_motors // self.num_legs
com_dof = 6
for joint_id in range(leg_id * motors_per_leg,
(leg_id + 1) * motors_per_leg):
motor_torques[joint_id] = all_motor_torques[
com_dof + joint_id] * self._motor_direction[joint_id]
return motor_torques
def GetFootContacts(self):
"""Get minitaur's foot contact situation with the ground.
Returns:
A list of 4 booleans. The ith boolean is True if leg i is in contact with
ground.
"""
contacts = []
for leg_idx in range(MINITAUR_NUM_MOTORS // 2):
link_id_1 = self._foot_link_ids[leg_idx * 2]
link_id_2 = self._foot_link_ids[leg_idx * 2 + 1]
contact_1 = bool(
self._pybullet_client.getContactPoints(
bodyA=0,
bodyB=self.quadruped,
linkIndexA=-1,
linkIndexB=link_id_1))
contact_2 = bool(
self._pybullet_client.getContactPoints(
bodyA=0,
bodyB=self.quadruped,
linkIndexA=-1,
linkIndexB=link_id_2))
contacts.append(contact_1 or contact_2)
return contacts
def GetFootPositionsInBaseFrame(self):
"""Get the robot's foot position in the base frame."""
assert len(self._foot_link_ids) == self.num_legs
foot_positions = []
for foot_id in self.GetFootLinkIDs():
foot_positions.append(
kinematics.link_position_in_base_frame(
robot=self,
link_id=foot_id,
))
return np.array(foot_positions)
def GetTrueMotorAngles(self):
"""Gets the eight motor angles at the current moment, mapped to [-pi, pi].
Returns:
Motor angles, mapped to [-pi, pi].
"""
motor_angles = [state[0] for state in self._joint_states]
motor_angles = np.multiply(
np.asarray(motor_angles) - np.asarray(self._motor_offset),
self._motor_direction)
return motor_angles
def GetMotorAngles(self):
"""Gets the eight motor angles.
This function mimicks the noisy sensor reading and adds latency. The motor
angles that are delayed, noise polluted, and mapped to [-pi, pi].
Returns:
Motor angles polluted by noise and latency, mapped to [-pi, pi].
"""
motor_angles = self._AddSensorNoise(
np.array(self._control_observation[0:self.num_motors]),
self._observation_noise_stdev[0])
return MapToMinusPiToPi(motor_angles)
def GetTrueMotorVelocities(self):
"""Get the velocity of all eight motors.
Returns:
Velocities of all eight motors.
"""
motor_velocities = [state[1] for state in self._joint_states]
motor_velocities = np.multiply(motor_velocities, self._motor_direction)
return motor_velocities
def GetMotorVelocities(self):
"""Get the velocity of all eight motors.
This function mimicks the noisy sensor reading and adds latency.
Returns:
Velocities of all eight motors polluted by noise and latency.
"""
return self._AddSensorNoise(
np.array(self._control_observation[self.num_motors:2 *
self.num_motors]),
self._observation_noise_stdev[1])
def GetTrueMotorTorques(self):
"""Get the amount of torque the motors are exerting.
Returns:
Motor torques of all eight motors.
"""
return self._observed_motor_torques
def GetMotorTorques(self):
"""Get the amount of torque the motors are exerting.
This function mimicks the noisy sensor reading and adds latency.
Returns:
Motor torques of all eight motors polluted by noise and latency.
"""
return self._AddSensorNoise(
np.array(self._control_observation[2 * self.num_motors:3 *
self.num_motors]),
self._observation_noise_stdev[2])
def GetEnergyConsumptionPerControlStep(self):
"""Get the amount of energy used in last one time step.
Returns:
Energy Consumption based on motor velocities and torques (Nm^2/s).
"""
return np.abs(np.dot(
self.GetMotorTorques(),
self.GetMotorVelocities())) * self.time_step * self._action_repeat
def GetTrueBaseOrientation(self):
"""Get the orientation of minitaur's base, represented as quaternion.
Returns:
The orientation of minitaur's base.
"""
return self._base_orientation
def GetBaseOrientation(self):
"""Get the orientation of minitaur's base, represented as quaternion.
This function mimicks the noisy sensor reading and adds latency.
Returns:
The orientation of minitaur's base polluted by noise and latency.
"""
return self._pybullet_client.getQuaternionFromEuler(
self.GetBaseRollPitchYaw())
def GetTrueBaseRollPitchYawRate(self):
"""Get the rate of orientation change of the minitaur's base in euler angle.
Returns:
rate of (roll, pitch, yaw) change of the minitaur's base.
"""
angular_velocity = self._pybullet_client.getBaseVelocity(self.quadruped)[1]
orientation = self.GetTrueBaseOrientation()
return self.TransformAngularVelocityToLocalFrame(angular_velocity,
orientation)
def TransformAngularVelocityToLocalFrame(self, angular_velocity, orientation):
"""Transform the angular velocity from world frame to robot's frame.
Args:
angular_velocity: Angular velocity of the robot in world frame.
orientation: Orientation of the robot represented as a quaternion.
Returns:
angular velocity of based on the given orientation.
"""
# Treat angular velocity as a position vector, then transform based on the
# orientation given by dividing (or multiplying with inverse).
# Get inverse quaternion assuming the vector is at 0,0,0 origin.
_, orientation_inversed = self._pybullet_client.invertTransform([0, 0, 0],
orientation)
# Transform the angular_velocity at neutral orientation using a neutral
# translation and reverse of the given orientation.
relative_velocity, _ = self._pybullet_client.multiplyTransforms(
[0, 0, 0], orientation_inversed, angular_velocity,
self._pybullet_client.getQuaternionFromEuler([0, 0, 0]))
return np.asarray(relative_velocity)
def GetBaseRollPitchYawRate(self):
"""Get the rate of orientation change of the minitaur's base in euler angle.
This function mimicks the noisy sensor reading and adds latency.
Returns:
rate of (roll, pitch, yaw) change of the minitaur's base polluted by noise
and latency.
"""
return self._AddSensorNoise(
np.array(self._control_observation[3 * self.num_motors +
4:3 * self.num_motors + 7]),
self._observation_noise_stdev[4])
def GetActionDimension(self):
"""Get the length of the action list.
Returns:
The length of the action list.
"""
return self.num_motors
def _ApplyOverheatProtection(self, actual_torque):
if self._motor_overheat_protection:
for i in range(self.num_motors):
if abs(actual_torque[i]) > OVERHEAT_SHUTDOWN_TORQUE:
self._overheat_counter[i] += 1
else:
self._overheat_counter[i] = 0
if (self._overheat_counter[i] >
OVERHEAT_SHUTDOWN_TIME / self.time_step):
self._motor_enabled_list[i] = False
def ApplyAction(self, motor_commands, motor_control_mode=None):
"""Apply the motor commands using the motor model.
Args:
motor_commands: np.array. Can be motor angles, torques, hybrid commands,
or motor pwms (for Minitaur only).
motor_control_mode: A MotorControlMode enum.
"""
self.last_action_time = self._state_action_counter * self.time_step
control_mode = motor_control_mode
if control_mode is None:
control_mode = self._motor_control_mode
motor_commands = np.asarray(motor_commands)
q, qdot = self._GetPDObservation()
qdot_true = self.GetTrueMotorVelocities()
actual_torque, observed_torque = self._motor_model.convert_to_torque(
motor_commands, q, qdot, qdot_true, control_mode)
# May turn off the motor
self._ApplyOverheatProtection(actual_torque)
# The torque is already in the observation space because we use
# GetMotorAngles and GetMotorVelocities.
self._observed_motor_torques = observed_torque
# Transform into the motor space when applying the torque.
self._applied_motor_torque = np.multiply(actual_torque,
self._motor_direction)
motor_ids = []
motor_torques = []
for motor_id, motor_torque, motor_enabled in zip(self._motor_id_list,
self._applied_motor_torque,
self._motor_enabled_list):
if motor_enabled:
motor_ids.append(motor_id)
motor_torques.append(motor_torque)
else:
motor_ids.append(motor_id)
motor_torques.append(0)
self._SetMotorTorqueByIds(motor_ids, motor_torques)
def ConvertFromLegModel(self, actions):
"""Convert the actions that use leg model to the real motor actions.
Args:
actions: The theta, phi of the leg model.
Returns:
The eight desired motor angles that can be used in ApplyActions().
"""
motor_angle = copy.deepcopy(actions)
scale_for_singularity = 1
offset_for_singularity = 1.5
half_num_motors = self.num_motors // 2
quater_pi = math.pi / 4
for i in range(self.num_motors):
action_idx = i // 2
forward_backward_component = (
-scale_for_singularity * quater_pi *
(actions[action_idx + half_num_motors] + offset_for_singularity))
extension_component = (-1)**i * quater_pi * actions[action_idx]
if i >= half_num_motors:
extension_component = -extension_component
motor_angle[i] = (
math.pi + forward_backward_component + extension_component)
return motor_angle
def GetBaseMassesFromURDF(self):
"""Get the mass of the base from the URDF file."""
return self._base_mass_urdf
def GetBaseInertiasFromURDF(self):
"""Get the inertia of the base from the URDF file."""
return self._base_inertia_urdf
def GetLegMassesFromURDF(self):
"""Get the mass of the legs from the URDF file."""
return self._leg_masses_urdf
def GetLegInertiasFromURDF(self):
"""Get the inertia of the legs from the URDF file."""
return self._leg_inertia_urdf
def SetBaseMasses(self, base_mass):
"""Set the mass of minitaur's base.
Args:
base_mass: A list of masses of each body link in CHASIS_LINK_IDS. The
length of this list should be the same as the length of CHASIS_LINK_IDS.
Raises:
ValueError: It is raised when the length of base_mass is not the same as
the length of self._chassis_link_ids.
"""
if len(base_mass) != len(self._chassis_link_ids):
raise ValueError(
"The length of base_mass {} and self._chassis_link_ids {} are not "
"the same.".format(len(base_mass), len(self._chassis_link_ids)))
for chassis_id, chassis_mass in zip(self._chassis_link_ids, base_mass):
self._pybullet_client.changeDynamics(
self.quadruped, chassis_id, mass=chassis_mass)
def SetLegMasses(self, leg_masses):
"""Set the mass of the legs.
A leg includes leg_link and motor. 4 legs contain 16 links (4 links each)
and 8 motors. First 16 numbers correspond to link masses, last 8 correspond
to motor masses (24 total).
Args:
leg_masses: The leg and motor masses for all the leg links and motors.
Raises:
ValueError: It is raised when the length of masses is not equal to number
of links + motors.
"""
if len(leg_masses) != len(self._leg_link_ids) + len(self._motor_link_ids):
raise ValueError("The number of values passed to SetLegMasses are "
"different than number of leg links and motors.")
for leg_id, leg_mass in zip(self._leg_link_ids, leg_masses):
self._pybullet_client.changeDynamics(
self.quadruped, leg_id, mass=leg_mass)
motor_masses = leg_masses[len(self._leg_link_ids):]
for link_id, motor_mass in zip(self._motor_link_ids, motor_masses):
self._pybullet_client.changeDynamics(
self.quadruped, link_id, mass=motor_mass)
def SetBaseInertias(self, base_inertias):
"""Set the inertias of minitaur's base.
Args:
base_inertias: A list of inertias of each body link in CHASIS_LINK_IDS.
The length of this list should be the same as the length of
CHASIS_LINK_IDS.
Raises:
ValueError: It is raised when the length of base_inertias is not the same
as the length of self._chassis_link_ids and base_inertias contains
negative values.
"""
if len(base_inertias) != len(self._chassis_link_ids):
raise ValueError(
"The length of base_inertias {} and self._chassis_link_ids {} are "
"not the same.".format(
len(base_inertias), len(self._chassis_link_ids)))
for chassis_id, chassis_inertia in zip(self._chassis_link_ids,
base_inertias):
for inertia_value in chassis_inertia:
if (np.asarray(inertia_value) < 0).any():
raise ValueError("Values in inertia matrix should be non-negative.")
self._pybullet_client.changeDynamics(
self.quadruped, chassis_id, localInertiaDiagonal=chassis_inertia)
def SetLegInertias(self, leg_inertias):
"""Set the inertias of the legs.
A leg includes leg_link and motor. 4 legs contain 16 links (4 links each)
and 8 motors. First 16 numbers correspond to link inertia, last 8 correspond
to motor inertia (24 total).
Args:
leg_inertias: The leg and motor inertias for all the leg links and motors.
Raises:
ValueError: It is raised when the length of inertias is not equal to
the number of links + motors or leg_inertias contains negative values.
"""
if len(leg_inertias) != len(self._leg_link_ids) + len(self._motor_link_ids):
raise ValueError("The number of values passed to SetLegMasses are "
"different than number of leg links and motors.")
for leg_id, leg_inertia in zip(self._leg_link_ids, leg_inertias):
for inertia_value in leg_inertias:
if (np.asarray(inertia_value) < 0).any():
raise ValueError("Values in inertia matrix should be non-negative.")
self._pybullet_client.changeDynamics(
self.quadruped, leg_id, localInertiaDiagonal=leg_inertia)
motor_inertias = leg_inertias[len(self._leg_link_ids):]
for link_id, motor_inertia in zip(self._motor_link_ids, motor_inertias):
for inertia_value in motor_inertias:
if (np.asarray(inertia_value) < 0).any():
raise ValueError("Values in inertia matrix should be non-negative.")
self._pybullet_client.changeDynamics(
self.quadruped, link_id, localInertiaDiagonal=motor_inertia)
def SetFootFriction(self, foot_friction):
"""Set the lateral friction of the feet.
Args:
foot_friction: The lateral friction coefficient of the foot. This value is
shared by all four feet.
"""
for link_id in self._foot_link_ids:
self._pybullet_client.changeDynamics(
self.quadruped, link_id, lateralFriction=foot_friction)
def SetFootRestitution(self, foot_restitution):
"""Set the coefficient of restitution at the feet.
Args:
foot_restitution: The coefficient of restitution (bounciness) of the feet.
This value is shared by all four feet.
"""
for link_id in self._foot_link_ids:
self._pybullet_client.changeDynamics(
self.quadruped, link_id, restitution=foot_restitution)
def SetJointFriction(self, joint_frictions):
for knee_joint_id, friction in zip(self._foot_link_ids, joint_frictions):
self._pybullet_client.setJointMotorControl2(
bodyIndex=self.quadruped,
jointIndex=knee_joint_id,
controlMode=self._pybullet_client.VELOCITY_CONTROL,
targetVelocity=0,
force=friction)
def GetNumKneeJoints(self):
return len(self._foot_link_ids)
def SetBatteryVoltage(self, voltage):
self._motor_model.set_voltage(voltage)
def SetMotorViscousDamping(self, viscous_damping):
self._motor_model.set_viscous_damping(viscous_damping)
def GetTrueObservation(self):
observation = []
observation.extend(self.GetTrueMotorAngles())
observation.extend(self.GetTrueMotorVelocities())
observation.extend(self.GetTrueMotorTorques())
observation.extend(self.GetTrueBaseOrientation())
observation.extend(self.GetTrueBaseRollPitchYawRate())
return observation
def ReceiveObservation(self):
"""Receive the observation from sensors.
This function is called once per step. The observations are only updated
when this function is called.
"""
self._joint_states = self._pybullet_client.getJointStates(
self.quadruped, self._motor_id_list)
self._base_position, orientation = (
self._pybullet_client.getBasePositionAndOrientation(self.quadruped))
# Computes the relative orientation relative to the robot's
# initial_orientation.
_, self._base_orientation = self._pybullet_client.multiplyTransforms(
positionA=[0, 0, 0],
orientationA=orientation,
positionB=[0, 0, 0],
orientationB=self._init_orientation_inv)
self._observation_history.appendleft(self.GetTrueObservation())
self._control_observation = self._GetControlObservation()
self.last_state_time = self._state_action_counter * self.time_step
def _GetDelayedObservation(self, latency):
"""Get observation that is delayed by the amount specified in latency.
Args:
latency: The latency (in seconds) of the delayed observation.
Returns:
observation: The observation which was actually latency seconds ago.
"""
if latency <= 0 or len(self._observation_history) == 1:
observation = self._observation_history[0]
else:
n_steps_ago = int(latency / self.time_step)
if n_steps_ago + 1 >= len(self._observation_history):
return self._observation_history[-1]
remaining_latency = latency - n_steps_ago * self.time_step
blend_alpha = remaining_latency / self.time_step
observation = (
(1.0 - blend_alpha) * np.array(self._observation_history[n_steps_ago])
+ blend_alpha * np.array(self._observation_history[n_steps_ago + 1]))
return observation
def _GetPDObservation(self):
pd_delayed_observation = self._GetDelayedObservation(self._pd_latency)
q = pd_delayed_observation[0:self.num_motors]
qdot = pd_delayed_observation[self.num_motors:2 * self.num_motors]
return (np.array(q), np.array(qdot))
def _GetControlObservation(self):
control_delayed_observation = self._GetDelayedObservation(
self._control_latency)
return control_delayed_observation
def _AddSensorNoise(self, sensor_values, noise_stdev):
if noise_stdev <= 0:
return sensor_values
observation = sensor_values + np.random.normal(
scale=noise_stdev, size=sensor_values.shape)
return observation
def SetControlLatency(self, latency):
"""Set the latency of the control loop.
It measures the duration between sending an action from Nvidia TX2 and
receiving the observation from microcontroller.
Args:
latency: The latency (in seconds) of the control loop.
"""
self._control_latency = latency
def GetControlLatency(self):
"""Get the control latency.
Returns:
The latency (in seconds) between when the motor command is sent and when
the sensor measurements are reported back to the controller.
"""
return self._control_latency
def SetMotorGains(self, kp, kd):
"""Set the gains of all motors.
These gains are PD gains for motor positional control. kp is the
proportional gain and kd is the derivative gain.
Args:
kp: proportional gain(s) of the motors.
kd: derivative gain(s) of the motors.
"""
if isinstance(kp, (collections.Sequence, np.ndarray)):
self._motor_kps = np.asarray(kp)
else:
self._motor_kps = np.full(self.num_motors, kp)
if isinstance(kd, (collections.Sequence, np.ndarray)):
self._motor_kds = np.asarray(kd)
else:
self._motor_kds = np.full(self.num_motors, kd)
self._motor_model.set_motor_gains(kp, kd)
def GetMotorGains(self):
"""Get the gains of the motor.
Returns:
The proportional gain.
The derivative gain.
"""
return self._motor_kps, self._motor_kds
def GetMotorPositionGains(self):
"""Get the position gains of the motor.
Returns:
The proportional gain.
"""
return self._motor_kps
def GetMotorVelocityGains(self):
"""Get the velocity gains of the motor.
Returns:
The derivative gain.
"""
return self._motor_kds
def SetMotorStrengthRatio(self, ratio):
"""Set the strength of all motors relative to the default value.
Args:
ratio: The relative strength. A scalar range from 0.0 to 1.0.
"""
self._motor_model.set_strength_ratios([ratio] * self.num_motors)
def SetMotorStrengthRatios(self, ratios):
"""Set the strength of each motor relative to the default value.
Args:
ratios: The relative strength. A numpy array ranging from 0.0 to 1.0.
"""
self._motor_model.set_strength_ratios(ratios)
def SetTimeSteps(self, action_repeat, simulation_step):
"""Set the time steps of the control and simulation.
Args:
action_repeat: The number of simulation steps that the same action is
repeated.
simulation_step: The simulation time step.
"""
self.time_step = simulation_step
self._action_repeat = action_repeat
def _GetMotorNames(self):
return MOTOR_NAMES
def _GetDefaultInitPosition(self):
"""Returns the init position of the robot.
It can be either 1) origin (INIT_POSITION), 2) origin with a rack
(INIT_RACK_POSITION), or 3) the previous position.
"""
# If we want continuous resetting and is not the first episode.
if self._reset_at_current_position and self._observation_history:
x, y, _ = self.GetBasePosition()
_, _, z = INIT_POSITION
return [x, y, z]
if self._on_rack:
return INIT_RACK_POSITION
else:
return INIT_POSITION
def _GetDefaultInitOrientation(self):
"""Returns the init position of the robot.
It can be either 1) INIT_ORIENTATION or 2) the previous rotation in yaw.
"""
# If we want continuous resetting and is not the first episode.
if self._reset_at_current_position and self._observation_history:
_, _, yaw = self.GetBaseRollPitchYaw()
return self._pybullet_client.getQuaternionFromEuler([0.0, 0.0, yaw])
return INIT_ORIENTATION
@property
def chassis_link_ids(self):
return self._chassis_link_ids
def SetAllSensors(self, sensors):
"""set all sensors to this robot and move the ownership to this robot.
Args:
sensors: a list of sensors to this robot.
"""
for s in sensors:
s.set_robot(self)
self._sensors = sensors
def GetAllSensors(self):
"""get all sensors associated with this robot.
Returns:
sensors: a list of all sensors.
"""
return self._sensors
def GetSensor(self, name):
"""get the first sensor with the given name.
This function return None if a sensor with the given name does not exist.
Args:
name: the name of the sensor we are looking
Returns:
sensor: a sensor with the given name. None if not exists.
"""
for s in self._sensors:
if s.get_name() == name:
return s
return None
@property
def is_safe(self):
return self._is_safe
@property
def last_action(self):
return self._last_action
def ProcessAction(self, action, substep_count):
"""If enabled, interpolates between the current and previous actions.
Args:
action: current action.
substep_count: the step count should be between [0, self.__action_repeat).
Returns:
If interpolation is enabled, returns interpolated action depending on
the current action repeat substep.
"""
if self._enable_action_interpolation:
if self._last_action is not None:
prev_action = self._last_action
else:
prev_action = self.GetMotorAngles()
lerp = float(substep_count + 1) / self._action_repeat
proc_action = prev_action + lerp * (action - prev_action)
else:
proc_action = action
return proc_action
def _BuildActionFilter(self):
sampling_rate = 1 / (self.time_step * self._action_repeat)
num_joints = self.GetActionDimension()
a_filter = action_filter.ActionFilterButter(
sampling_rate=sampling_rate, num_joints=num_joints)
return a_filter
def _ResetActionFilter(self):
self._action_filter.reset()
return
def _FilterAction(self, action):
# initialize the filter history, since resetting the filter will fill
# the history with zeros and this can cause sudden movements at the start
# of each episode
if self._step_counter == 0:
default_action = self.GetMotorAngles()
self._action_filter.init_history(default_action)
filtered_action = self._action_filter.filter(action)
return filtered_action
@property
def pybullet_client(self):
return self._pybullet_client
@property
def joint_states(self):
return self._joint_states
@classmethod
def GetConstants(cls):
del cls
return minitaur_constants | [
"franktian424@qq.com"
] | franktian424@qq.com |
c785fc4d347223457e4644dfcffd8c08364a742c | 7700f9014b8f34a2ab9795ce51572dc0c311aae4 | /source/nc_data_tools/data_tools/reformat_raster.py | f9b5e02b9b55b38cb47f49377410edccbf10ef9c | [] | no_license | geoneric/nc_data_tools | 59c057476dfaaccc6a30348d7aecfc22d026c3dc | 553828836a616e307ca5d60d7f845e5e90ee2429 | refs/heads/master | 2021-01-11T22:46:47.139938 | 2017-06-02T08:25:35 | 2017-06-02T08:25:35 | 79,031,535 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 622 | py | import rasterio
from .driver import driver_by_pathname
def reformat_raster(
source_raster_pathname,
target_raster_pathname,
override_crs=None):
target_driver = driver_by_pathname(target_raster_pathname)
with rasterio.open(source_raster_pathname) as source_raster:
profile = source_raster.profile
profile["driver"] = target_driver
if override_crs is not None:
profile["crs"] = override_crs
with rasterio.open(target_raster_pathname, "w", **profile) as \
target_raster:
target_raster.write(source_raster.read())
| [
"kor@jemig.eu"
] | kor@jemig.eu |
a6ca45275323f2440e95e9be09e07f653e6250ef | f9e4c2e9cd4a95dc228b384e2e8abadc9f1b0bda | /fratevents/settings.py | 22d7c2df522fd15d68bce7043a05c6b6fa4c9fe0 | [] | no_license | sanchitbareja/fratevents | 227adddd77c9a0055ccd74d5e0bf6f771790f8d3 | f50c8ccb40b8c9124b40e70d90c9190ef27a2fb7 | refs/heads/master | 2016-09-06T15:36:45.443412 | 2013-02-16T21:13:36 | 2013-02-16T21:13:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,645 | py | # Django settings for fratevents project.
import os, os.path, social_auth
if os.environ.has_key('DATABASE_URL'):
DEBUG = True
else:
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('Sanchit Bareja', 'sanchitbareja@gmail.com'),
)
MANAGERS = ADMINS
if os.environ.has_key('DATABASE_URL'):
import dj_database_url
DATABASES = {'default': dj_database_url.config(default='postgres://localhost')}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'fratevents', # Or path to database file if using sqlite3.
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': 'root', # Not used with sqlite3.
'HOST': 'localhost', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Los_Angeles'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = False
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = '/static/'
SEND_BROKEN_LINK_EMAILS = True
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = '/static/'
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = os.path.join(os.path.dirname(__file__), 'static/').replace('\\','/')
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(os.path.dirname(__file__), 'static/').replace('\\','/'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'rsx9)l1^_bsmeyipfk9u#t#gdt%@po-i-hr+#8ensmg012!kpn'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'social_auth.middleware.SocialAuthExceptionMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'fratevents.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'fratevents.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(os.path.dirname(__file__), 'views').replace('\\','/'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'south',
'gunicorn',
'events',
'clubs',
'rage',
'userprofile',
'social_auth',
'storages',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
# EMAIL SETTINGS
EMAIL_USE_TLS = True
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_HOST_USER = 'caleventsinfo@gmail.com'
EMAIL_HOST_PASSWORD = 'qwaszx12,'
EMAIL_PORT = 587
EVENT_MASTERS = ['sanchitbareja@gmail.com','hahardikagrawal@gmail.com','caleventsinfo@gmail.com']
# Facebook Integration Settings
AUTHENTICATION_BACKENDS = (
'social_auth.backends.facebook.FacebookBackend',
'django.contrib.auth.backends.ModelBackend',
)
# userprofile creation
AUTH_PROFILE_MODULE = 'userprofile.UserProfile'
FACEBOOK_APP_ID = '343708889077375'
FACEBOOK_API_SECRET = '0bd34d3dbb482579fb990805860267bd'
FACEBOOK_EXTENDED_PERMISSIONS = ['email', 'user_birthday', 'user_interests', 'user_events', 'manage_pages']
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.contrib.messages.context_processors.messages',
'social_auth.context_processors.social_auth_by_type_backends',
)
LOGIN_REDIRECT_URL = '/'
SOCIAL_AUTH_PIPELINE = (
'social_auth.backends.pipeline.social.social_auth_user',
#'social_auth.backends.pipeline.associate.associate_by_email',
'social_auth.backends.pipeline.user.get_username',
'social_auth.backends.pipeline.user.create_user',
'social_auth.backends.pipeline.social.associate_user',
'social_auth.backends.pipeline.social.load_extra_data',
'social_auth.backends.pipeline.user.update_user_details',
'fratevents.pipeline.create_user_profile',
'fratevents.pipeline.get_user_profile_pic',
'fratevents.pipeline.get_user_events',
'fratevents.pipeline.get_user_network',
'fratevents.pipeline.get_user_pages',
)
SOCIAL_AUTH_CREATE_USERS = True
SOCIAL_AUTH_FORCE_RANDOM_USERNAME = False
SOCIAL_AUTH_DEFAULT_USERNAME = 'socialauth_user'
SOCIAL_AUTH_COMPLETE_URL_NAME = 'socialauth_complete'
LOGIN_ERROR_URL = '/login/error/'
SOCIAL_AUTH_ERROR_KEY = 'socialauth_error'
SOCIAL_AUTH_FORCE_POST_DISCONNECT = True
#AWS S3 Credentials - django-storages
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
AWS_ACCESS_KEY_ID = 'AKIAISDEISAIY3LRYY3Q'
AWS_SECRET_ACCESS_KEY = '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'
AWS_STORAGE_BUCKET_NAME = 'calevents'
BUCKET_NAME = 'calevents'
from S3 import CallingFormat
AWS_CALLING_FORMAT = CallingFormat.SUBDOMAIN
AWS_UPLOAD_DESTINATION = "http://s3.amazonaws.com/"+str(BUCKET_NAME)+"/"
| [
"sanchitbareja@gmail.com"
] | sanchitbareja@gmail.com |
0a9382e84f0c116e1aed9e3976dd20cc21d50e62 | 47e447b0b8e422656d95e6f8707d8e24a4c6fda3 | /mainplot.py | 108f8dfbc08b9b6edc480e0db5e92ec1f43328bb | [] | no_license | HareshKarnan/TD3 | 92b2911bbdccda027da670d2854048907ccc2fee | 0bf5949f59d4865c685ec6113208a49c63ed455a | refs/heads/master | 2023-04-10T20:51:02.648579 | 2021-04-02T14:45:52 | 2021-04-02T14:45:52 | 212,244,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,073 | py | import glob
import csv
import numpy as np
import matplotlib.pyplot as plt
import random
# files = glob.glob('logs/lambda_expts/*')
# files = glob.glob('logs/model_free/*')
# files = glob.glob('logs/fwd_inv_model/*')
files = glob.glob('logs/fwd_model/*')
def extract_expt_data(files):
data = {}
for fileNamefull in files:
fileName = fileNamefull.split('/')[-1]
# print(fileName)
totalTimesteps = fileName.split('_')[6]
modelIters = fileName.split('_')[9]
modelGrads = fileName.split('_')[12]
seed = fileName.split('_')[13]
# print(modelIters, modelGrads, seed)
# first branch of dict -> model iterations
if modelIters not in data.keys(): data[modelIters] = {}
# second branch of dict -> model gradient steps
if modelGrads not in data[modelIters].keys(): data[modelIters][modelGrads] = {}
# third branch of dict -> experiment seed
if seed not in data[modelIters][modelGrads].keys(): data[modelIters][modelGrads][seed] = {}
# store the data of all seeds here
with open(fileNamefull+'/log.csv', 'r') as csvfile:
csvreader = csv.reader(csvfile)
episode, episode_rewards = [], []
curr_episode = 0
for row in csvreader:
reward, done, episode_num, episode_reward, episode_timesteps, total_timesteps = row
if done == 'True':
episode.append(int(episode_num))
episode_rewards.append(float(episode_reward))
data[modelIters][modelGrads][seed]['episode_rewards'] = episode_rewards
data[modelIters][modelGrads][seed]['episode'] = episode
return data
fwd_data = extract_expt_data(glob.glob('logs/fwd_model/*'))
fwd_inv_data = extract_expt_data(glob.glob('logs/dual_final/*'))
model_free_data = extract_expt_data(glob.glob('logs/model_free/*'))
# find the mean and std across expts
ax = plt.subplot()
for i, data in enumerate([fwd_data, fwd_inv_data, model_free_data]):
for modelIters in data.keys():
for modelGrads in data[modelIters].keys():
# select the right experiment here
if i == 0 and (modelIters != '1' or modelGrads != '3'): continue
if i == 1 and (modelIters != '1' or modelGrads != '2'): continue
print(i, modelIters, modelGrads, type(modelIters), type(modelGrads))
color = (random.random(), random.random(), random.random())
# print('found seeds :: ', data[modelIters][modelGrads].keys())
dataX, dataY = [], []
for seed in data[modelIters][modelGrads].keys():
episode = data[modelIters][modelGrads][seed]['episode']
episodeRew = data[modelIters][modelGrads][seed]['episode_rewards']
dataY.append(episodeRew)
dataX.append(episode)
minX = min([val[-1] for val in dataX]) + 1
dataX = [datX[:minX] for datX in dataX]
dataY = [datY[:minX] for datY in dataY]
dataY = np.asarray(dataY)
if i==0:
label = 'Forward Model'
color = (0, 0, 1)
exptnums = 10
# continue
elif i==1:
label = 'Forward + Inverse Model'
color = (0, 1, 0)
exptnums = 5
continue
elif i==2:
label = 'Model Free'
color = (1, 0, 0)
exptnums = 10
ax.plot(dataX[0], np.mean(dataY, axis=0), color=color, label=label)
ax.fill_between( dataX[0],
np.mean(dataY, axis=0) - np.std(dataY, axis=0)/np.sqrt(exptnums),
np.mean(dataY, axis=0) + np.std(dataY, axis=0)/np.sqrt(exptnums),
alpha=0.25,
color=color)
plt.legend()
plt.ylim([0, 900])
plt.xlim([0, 250])
plt.xlabel('Episodes')
plt.ylabel('Episode Returns')
plt.savefig('fwd_model.png')
plt.show()
| [
"haresh.miriyala@gmail.com"
] | haresh.miriyala@gmail.com |
bfe99fb243985b17460e1aa571a5d05a72641616 | 91049acdda426be9c8f22de19500a40e366fef81 | /Client/modules/code_execution/code_execution.py | 0f9142463a8563cdc1280343d2605234888597f3 | [] | no_license | VictorAlonsoCM/Project_RAT | f3e55e9525e05175fa8502fa23f5bfd31ec4957f | 01d3079586ab08a92c8e7c9d2d1053710cac7505 | refs/heads/master | 2020-09-30T02:00:23.395275 | 2019-12-10T17:01:45 | 2019-12-10T17:01:45 | 227,172,602 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,603 | py | import os
import re
import subprocess
from modules.sockets.socket import Network
class CodeExecution:
def __init__(self, nt):
self = self
self.nt = nt
def dir(self):
cmd = "dir"
stream = os.popen(cmd)
output = stream.readlines()
data = ''
data = ''
for item in output:
data += str(item)
print(data)
return data
def systeminfo(self):
cmd = "systeminfo"
stream = os.popen(cmd)
output = stream.readlines()
data = ''
for item in output:
data += str(item)
print(data)
return data
def whoami(self):
cmd = "whoami"
stream = os.popen(cmd)
output = stream.readlines()
data = ''
for item in output:
data += str(item)
print(data)
return data
def net_users(self):
cmd = "net users"
stream = os.popen(cmd)
output = stream.readlines()
data = ''
for item in output:
data += str(item)
print(data)
return data
def net_localgroups(self):
cmd = "net localgroups"
stream = os.popen(cmd)
output = stream.readlines()
data = ''
for item in output:
data += str(item)
print(data)
return data
def custom(self):
self.nt.connectionSend("Insert your command: ".encode("utf-8"))
cmd = str(self.nt.connectionRecv())
stream = os.popen(cmd)
output = stream.readlines()
data = ''
for item in output:
data += str(item)
if not data:
return "\r\nSyntax error\r\n"
return data
def get_shell(self):
return "Shell goes here....."
def code_execution(self):
options_list = ["2. dir", "3. systeminfo", "4. whoami", "5. net users", "6. net localgroup", "7. custom", "8. get a shell"]
data = ""
for item in options_list:
print(item)
data += item+"\r\n"
self.nt.connectionSend("Getting RCE...".encode("utf-8"))
self.nt.connectionSend(data.encode("utf-8"))
option = int(self.nt.connectionRecv())
if(option == 1):
self.nt.connectionClose()
switcher = {
2: self.dir,
3: self.systeminfo,
4: self.whoami,
5: self.net_users,
6: self.net_localgroups,
7: self.custom
}
callback = switcher.get(option, "Invalid option")
return callback() | [
"vacontrerasmeza@gmail.com"
] | vacontrerasmeza@gmail.com |
31fdb70e09d27fcc19feed1b7e8b826a584a5fe3 | abf33d1b30906c35a700bb24478efd6eb3979692 | /interviewbit/count_number_of_duplicates_in_a_list.py | 11ba129d5f086da11bb6f1c030623dcc3d25d934 | [] | no_license | soniya-mi/python | ac57c7e3be6bc05241e8af4e8b074e5311390f7d | 2a87ceeebb0403468c4fc2522b59573478e10ea4 | refs/heads/master | 2023-06-26T12:51:53.882774 | 2023-06-21T04:45:35 | 2023-06-21T04:45:35 | 137,060,314 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 335 | py | list = [10, 20, 30, 20, 20, 30, 40, 50, -20, 60, 60, -20, -20]
occur ={}
new_list=[]
for item in list:
count = 1
if item in occur.keys():
count=occur[item]
occur[item]=count +1
else:
occur[item] = count
for key in occur.keys():
if occur[key] > 1:
new_list.append(key)
print new_list
| [
"noreply@github.com"
] | soniya-mi.noreply@github.com |
090f82f82fdc3d02c3bb17c3ee32ed6c85c8c08e | 0a25ea42bd8aff27c939b7de9d9a8ea036b0c66f | /thrift/thrift-utils/test/ezpz/__init__.py | 6b1fb9fe954c52e463b0d180312d8dccde9dae94 | [
"Apache-2.0"
] | permissive | ezbake/ezbake-common-python | 118a20e2f88aaa29f95459b6bb163d0a828407d0 | fc82fb71852750cc2cfcbd7af0cb6843fad13b89 | refs/heads/master | 2021-01-01T05:38:30.502302 | 2015-03-02T20:08:32 | 2015-03-02T20:08:32 | 31,560,413 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 623 | py | # Copyright (C) 2013-2015 Computer Sciences Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| [
"jhastings@42six.com"
] | jhastings@42six.com |
5f93b1e2b6d1ad818e179659e266a4b51598a5bb | 34092feef434547abc852dfa1802c5f2178fb9ca | /python/searchRange.py | ce594aefa0cd98115b600f4ad02ab2d054772517 | [] | no_license | achyudh/leetcode-solutions | c59535650cc42e3b629fa12627aec0768325d2b8 | 3997b8bfa90a27cf8ccda10cdd34e9db3afebd7a | refs/heads/master | 2021-06-26T01:25:38.691813 | 2020-10-26T00:03:48 | 2020-10-26T00:03:48 | 147,906,102 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 860 | py | class Solution(object):
def searchRange(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
if len(nums) == 0:
return -1, -1
ptr_u = len(nums) - 1
ptr_l = 0
while ptr_u >= ptr_l:
ptr_m = (ptr_u + ptr_l) // 2
if nums[ptr_m] <= target:
ptr_l = ptr_m + 1
else:
ptr_u = ptr_m - 1
ptr_a = ptr_l - 1
if nums[ptr_a] != target:
return -1, -1
ptr_u = len(nums) - 1
ptr_l = 0
while ptr_u >= ptr_l:
ptr_m = (ptr_u + ptr_l) // 2
if nums[ptr_m] >= target:
ptr_u = ptr_m - 1
else:
ptr_l = ptr_m + 1
return ptr_l, ptr_a
| [
"noreply@github.com"
] | achyudh.noreply@github.com |
a28b2a3cf60f98bd998c8924c10a1f170376436f | 9bc228372e586a1f90bb0685c43e744be9638ecd | /18_정은서/session08/catproject/catproject/asgi.py | ff0fcedfade84d46b53bbf8fe87c19d61c08bf30 | [
"MIT"
] | permissive | LikeLionSCH/9th_ASSIGNMENT | 3e58862a76e3232aed7e19e8939da23330ff2e22 | c211995ad12f404833ffec7fd80e1229b82a3bfa | refs/heads/master | 2023-07-03T10:27:11.843177 | 2021-08-02T14:52:02 | 2021-08-02T14:52:02 | 379,633,279 | 7 | 18 | MIT | 2021-08-02T14:52:03 | 2021-06-23T14:36:59 | Python | UTF-8 | Python | false | false | 397 | py | """
ASGI config for catproject project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'catproject.settings')
application = get_asgi_application()
| [
"em4784@gmail.com"
] | em4784@gmail.com |
3aee32793c6ba419d0594d5a9e2bcaa214a87ed3 | 9edfd50576323ab50517d2694efa7d970a5a487e | /bin/shotgun_pickTask.py | 4cd5957d581ff10f1cf674cd3f75846f41f45a3f | [] | no_license | agudmund/util | 136de9961c3f1cc86faf183bb5c576c55f73417c | 457c8471296de944252fa6f474fea3ac07376d51 | refs/heads/master | 2023-08-11T00:21:53.795855 | 2023-07-25T12:00:46 | 2023-07-25T12:00:46 | 79,660,992 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,435 | py | #!/usr/bin/env C:/Python27/python.exe
import os
import sys
import random
import json
import shotgun_api3
SERVER_PATH = os.getenv("SHOTGUN_SERVER_PATH")
SCRIPT_USER = os.getenv("SHOTGUN_SCRIPT_USER")
SCRIPT_KEY = os.getenv("SHOTGUN_SCRIPT_KEY")
sg = shotgun_api3.Shotgun(SERVER_PATH, SCRIPT_USER, SCRIPT_KEY)
proj = sys.argv[-1]
def listProjects():
x=sg.find('Project',[],['name'])
for n in x:
print(n)
def getProject(sg,project):
filters = [['name', 'is', project]]
fields = ['id', 'name']
print (sg.find_one('Project', filters, fields))
def find_asset_tasks(sg, project, asset):
print ("searching:", project, "for Asset called:", asset)
filters = [
['project.Project.name', 'is', project],
['entity.Asset.code', 'is', asset],
]
fields = ['content', 'id','name']
sg_tasks = sg.find("Task", filters, fields)
return sg_tasks
def pickOne():
filters = [
["sg_status_list", "is_not", "fin"],
["sg_status_list", "is_not", "hld"],
["sg_status_list", "is_not", "omt"],
{"filter_operator": "any",
"filters": [
['project.Project.name', 'is', proj]
]}]
result = sg.find("Asset", filters,['content', 'id','project','code','type'])
rez = random.choice(result)
print (" ".join(['Still stuff to do on', rez['code'], 'in', rez['type']]))
def uploadAssetThumbnails():
filters = [
["sg_status_list", "is_not", "fin"],
["sg_status_list", "is_not", "hld"],
["sg_status_list", "is_not", "omt"],
{"filter_operator": "any",
"filters": [
['project.Project.name', 'is', proj]
]}]
result = sg.find("Asset", filters,['content', 'id','project','code','type'])
rez = random.choice(result)
thumbpath = r'C:\Users\normal\Projects\Match Dot Com\Documents'
thumbs = [ os.path.join(thumbpath,n ) for n in os.listdir(thumbpath) if n.endswith(".JPG")]
for asset in result:
for thumb in thumbs:
if asset['code'] == thumb.split('\\')[-1].split('.')[0]:
print ('x',asset)
sg.upload_thumbnail('Asset',asset['id'] , thumb ) # Needs python 3 apparently
continue
def createShots():
root = r'C:\Users\normal\Projects\Darth Kindergarten\Maya\images\shots'
for shot in os.listdir(root):
filters = {
'project': {"type":"Project","id": "insert project id"},
'code': shot.split(".")[0],
'sg_status_list': 'ip'
}
result = sg.create('Shot', data)
filters = [
{"filter_operator": "any",
"filters": [
['project.Project.name', 'is', "insert project name"]
]}]
result = sg.find('Shot', filters, ['id','code'])
for r in result:
if r['code'] == shot.split('.')[0]:
sg.upload_thumbnail('Shot',r['id'] , os.path.join(root,shot) )
if __name__ == '__main__':
pickOne()
# for asset in result:
# print find_asset_tasks(sg, "Match Dot Com", asset['code'])
# print find_asset_tasks(sg, "Match Dot Com", 1344)
# getProject(sg,"Match Dot Com")
# sg.summarize(entity_type='Task',
# filters = [
# ['entity.Asset.sg_sequence', 'is', {'type': 'Sequence', 'id': 2}],
# ['sg_status_list', 'is_not', 'na']],
# summary_fields=[{'field': 'id', 'type': 'count'}, {'field': 'due_date', 'type': 'latest'}],
# grouping=[{'field': 'entity', 'type': 'exact', 'direction': 'asc'}]) | [
"aevar.gudmundsson@gmail.com"
] | aevar.gudmundsson@gmail.com |
3fa07e5008b46020f7867d26769152465c99df3f | 07ffe8db66fbd50f87315df34074e20b3ce67f0e | /about/models.py | 80a8e89e5bba77662e330b6c74d3a6e0a8d8a48a | [] | no_license | jakiiii/jtro-ecommerce | 9acc6d37797e409a79921358958e50d66f20a0b4 | e6e5ae04c7756e99f862634ad21f1d3877b501ab | refs/heads/master | 2023-01-22T09:44:47.891286 | 2020-12-01T23:32:19 | 2020-12-01T23:32:19 | 316,202,084 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 488 | py | from django.db import models
from ckeditor_uploader.fields import RichTextUploadingField
from jtro_ecommerce.utils import upload_image_path
class About(models.Model):
title = models.CharField(max_length=150)
image = models.ImageField(upload_to=upload_image_path, null=True, blank=True)
description = RichTextUploadingField()
timestamp = models.DateField(auto_now_add=True)
update = models.DateField(auto_now=True)
def __str__(self):
return "ABOUT US"
| [
"me.jaki@outlook.com"
] | me.jaki@outlook.com |
039e3c3ed7b402181e322dec3cbb5fe416969706 | 486e486ffea2feb8601cad443d86854a77e1f390 | /todoapp/todos/feeds.py | aefdded24c2f6b902f1669d3f127f7e7a54d7e8d | [] | no_license | saikirananumalla/todoapp | ddfbea6b983d1d22e17f6ab2772e18ce26aa387a | 71aab46c277bdd1f72e0ec5c115f6656ff3a6551 | refs/heads/master | 2020-04-09T17:21:05.972821 | 2018-12-05T07:55:25 | 2018-12-05T07:55:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 594 | py | from django.contrib.syndication.views import Feed
from django.core.urlresolvers import reverse
from .models import Users
from . import views
class LatestEntryField(Feed):
title = " Updates "
link = "/news-sites/"
description = "Updates on Users"
def items(self):
return Users.objects.order_by ('num')
def item_title(self, item):
return item.user
def item_description(self, item):
return item.nickname
#item link is only needed if NewsItem has no get_absolute_url method
def item_link(self, item):
return reverse(views.index)
| [
"noreply@github.com"
] | saikirananumalla.noreply@github.com |
c66fb03d7a1a952c8183b3246618837ac0c1a7f5 | 9e0cec9c4a4d080dc41a3e1b2bdc92b45be56e15 | /legacy/autoMNIfilesAnalysis.py | bf308600d33f26f2cf111634a7db9c2426beb8e0 | [] | no_license | FabianRei/neuro_detect | 373e42cbcd0a6b53d06c23edd14ef51cc4d9ee7d | 2ea3558617d742d6f22f00256daf9220221589d2 | refs/heads/master | 2020-04-04T11:41:53.427101 | 2018-12-21T17:51:47 | 2018-12-21T17:51:47 | 155,901,128 | 0 | 1 | null | 2018-12-21T00:55:47 | 2018-11-02T17:38:34 | Python | UTF-8 | Python | false | false | 4,090 | py | import numpy as np
import pickle
import torch
from src.data.preprocess import getTensorList_general
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from skimage.measure import block_reduce
from scipy.ndimage import zoom
from dipy.reconst.dti import fractional_anisotropy
class simpleNet(nn.Module):
def __init__(self):
super(simpleNet, self).__init__()
self.fc1 = nn.Linear(12*12*12*6, 200)
self.fc2 = nn.Linear(200, 4)
def forward(self, x):
x = F.relu(self.fc1(x))
x = self.fc2(x)
return F.log_softmax(x, dim=1)
def six2mat(voxel6):
return voxel6[[0, 1, 3, 1, 2, 4, 3, 4, 5]].reshape(3,3)
def getFaArr(arr):
matArr = np.apply_along_axis(six2mat, -1, arr)
evalArr, evecArr = np.linalg.eig(matArr)
faArr = fractional_anisotropy(evalArr)
return faArr
def resizeTensors(tensors, wantedShape):
resizedTensors = []
count = 0
oldShapes = []
newShapes = []
print("Resizing..")
for t in tensors:
if t.shape != wantedShape:
count += 1
oldShapes.append(t.shape)
zoomFactor = []
for i, s in enumerate(t.shape):
zoomFactor.append(wantedShape[i]/s)
t = zoom(t, zoomFactor)
newShapes.append(t.shape)
resizedTensors.append(t)
for i in range(count):
print(f"{oldShapes[i]} -> {newShapes[i]}")
print(f"Resized {count} out of {len(tensors)} tensors. (This takes quite long)")
return resizedTensors
def flipAxes(arr, axis):
# axes is len 3 array, [1,0,0], is flip x, [1,0,1] means flip x and z and so on
if axis == 1:
arr[:, :, :, [1, 3]] = -arr[:, :, :, [1, 3]]
if axis == 2:
arr[:, :, :, [1, 4]] = -arr[:, :, :, [1, 4]]
if axis == 3:
arr[:, :, :, [3, 4]] = -arr[:, :, :, [3, 4]]
return arr
def cropBlockResize(tensors, resizeFactor, crop):
result = []
rf = resizeFactor
for t in tensors:
t = t[crop]
t = block_reduce(t, block_size=(rf, rf, rf, 1), func=np.mean)
result.append(t)
return result
def normalizeByMean(tensors):
result = []
for t in tensors:
t = t/np.mean(t)
result.append(t)
print(f"Normalized {len(tensors)} tensors")
return result
tensorDir = '/black/localhome/reith/Desktop/projects/Tensors/test/'
networkWeights = 'trained_simplenet.torch'
wantedShape = (81, 106, 76, 6)
crop = (slice(7, 55), slice(40, 88), slice(14, 62))
resizeFactor = 4
net = simpleNet()
net.load_state_dict(torch.load(networkWeights))
tensors, names = getTensorList_general(tensorDir, giveNames=True)
# tensors = resizeTensors(tensors, wantedShape)
# for t, n in zip(tensors,names):
# if n[-11:] == 'oMNI.nii.gz':
# print(n)
# print(t[35,35,66])
# print(getFaArr(t[35,35,66]))
t = tensors[3] # YflipAutoMNI
# One of the highest Fas in the area (0.48)
# for t, n in zip(tensors,names):
# if n[-11:] == 'oMNI.nii.gz':
# print(n)
# print(t[40,50, 42])
# print(getFaArr(t[40,50, 42]))
# One of the highest Fas in the area (0.44)
rightTensors = []
rightNames = []
for t, n in zip(tensors,names):
if n[-11:] == 'oMNI.nii.gz':
rightTensors.append(t)
rightNames.append(n)
tensors = rightTensors
names = rightNames
xflip = tensors[2]
yflip = tensors[0]
xflipx = np.copy(xflip)
xflipx = flipAxes(xflipx, 1)
xflipy = np.copy(xflip)
xflipy = np.copy(flipAxes(xflipy, 2))
xflipz = np.copy(xflip)
xflipz = np.copy(flipAxes(xflipz, 3))
print('xflip\n', np.linalg.eig(six2mat(xflip[45,55,47])))
print('yflip\n', np.linalg.eig(six2mat(yflip[45,55,47])))
print('xflipx\n', np.linalg.eig(six2mat(xflipx[45,55,47])))
print('xflipy\n', np.linalg.eig(six2mat(xflipy[45,55,47])))
print('xflipz\n', np.linalg.eig(six2mat(xflipz[45,55,47])))
for t, n in zip(tensors,names):
if n[-11:] == 'oMNI.nii.gz':
print(n)
print(t[45,55, 47])
print(np.linalg.eig(six2mat(t[45,55,47]))[1])
print(getFaArr(t[45,55, 47]))
print("done!")
| [
"f.abi.an@gmx.de"
] | f.abi.an@gmx.de |
bd4c1df790a65e3af952ca5f291241677dd3a7c6 | e02cdc2908ea54bfe1232ccb4c515bb355372320 | /eventex/urls.py | 451b393b79f77411e24635476b72ed952943c818 | [] | no_license | iveinbox/wttd | 3924a2f73ccee7f43edc44bcd3920d27c27404d2 | 0ad64a5814fa59ed9877123592a6b5fd65c64c69 | refs/heads/master | 2019-07-16T13:05:56.778667 | 2017-11-05T04:06:55 | 2017-11-05T04:06:55 | 93,279,371 | 0 | 1 | null | 2017-06-05T15:12:35 | 2017-06-03T23:35:15 | CSS | UTF-8 | Python | false | false | 847 | py | """eventex URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
from eventex.core.views import home
from eventex.subscriptions.views import subscribe
urlpatterns = [
url(r'^$', home),
url(r'^inscricao/$', subscribe),
url(r'^admin/', include(admin.site.urls)),
]
| [
"iveelaureane@gmail.com"
] | iveelaureane@gmail.com |
d165d12587bb3716d7a0ae23d765ebc98ee1ba39 | 7bcba33e06f1fff4b2639aed5c556a79b7a51269 | /plot_graph2.py | c98f32a462f19b57135d4b42d60cac094112522b | [] | no_license | guroosh/CS7IS2-AI-project | 3798d32c60754b27ab044d1a1096f6527660ec56 | aae851695a022d374c263bd666fa86a20c478887 | refs/heads/master | 2020-12-31T10:47:06.545624 | 2020-04-13T08:16:30 | 2020-04-13T08:16:30 | 239,007,884 | 1 | 4 | null | null | null | null | UTF-8 | Python | false | false | 1,764 | py | import matplotlib.pyplot as plt
from scipy.ndimage.filters import gaussian_filter1d
# x = [3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35]
# y = [0.0, 0.0, 0.0, 0.0, 0.0, 0.10294117647058823, 0.6617647058823529, 0.59375, 2.6029411764705883, 4.349206349206349, 5.4714285714285715, 7.806451612903226, 10.897058823529411, 16.857142857142858, 11.338461538461539, 17.434782608695652, 16.5, 28.06896551724138, 25.236363636363638, 27.557692307692307, 43.90196078431372, 47.80701754385965, 47.0, 33.0, 34.75, 39.0, 51.0, 42.5, 31.0, 51.5, 128.25, 106.5, 76.66666666666667]
x = []
y1 = []
y2 = []
y3 = []
y4 = []
y5 = []
y6 = []
# y_astar = []
with open('mutation_change_daata.txt', 'r') as inp:
for i in inp:
i = i[:-1]
x.append(float(i.split(', ')[0]))
y1.append(float(i.split(', ')[1]))
y2.append(float(i.split(', ')[2]))
y3.append(float(i.split(', ')[3]))
y4.append(float(i.split(', ')[4]))
y5.append(float(i.split(', ')[5]))
y6.append(float(i.split(', ')[6]))
# ysmoothed = gaussian_filter1d(y30, sigma=2)
def smooth(l1):
return gaussian_filter1d(l1, sigma=2)
y1 = smooth(y1)
y2 = smooth(y2)
y3 = smooth(y3)
y4 = smooth(y4)
y5 = smooth(y5)
y6 = smooth(y6)
plt.plot(x, y6, 'k', label='Global minima (based on A*)')
plt.plot(x, y1, 'r', label='No mutation')
plt.plot(x, y2, 'g', label='30% mutation')
plt.plot(x, y3, 'b', label='50% mutation')
plt.plot(x, y4, 'c', label='70% mutation')
plt.plot(x, y5, 'm', label='100% mutation')
# plt.plot(x40, y40, 'b')
# plt.plot(x30, ysmoothed)
plt.legend(loc="upper left")
plt.xlabel('Grid Size (M) : MxM')
plt.ylabel('Length of best path (after 400 iterations)')
plt.show()
| [
"csingh@tcd.ie"
] | csingh@tcd.ie |
925f9662216aa18534707e210800d7309b084bd3 | dd1b9eaf9e996444f2995220df93f7213133490d | /account/migrations/0001_initial.py | de37dde6d2e51cd2b1f37b2e175d8169816a843f | [] | no_license | 4k45hv3rm4/login_system | 69961f0b81c1e315b2db69588af31eff9d7f6a43 | a07bd08a3cccd6231a253c69e920cbea528081af | refs/heads/master | 2023-01-10T02:26:50.021694 | 2020-11-16T13:34:27 | 2020-11-16T13:34:27 | 313,309,582 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,226 | py | # Generated by Django 3.0.8 on 2020-07-15 11:36
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Account',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('email', models.EmailField(max_length=60, unique=True, verbose_name='email')),
('username', models.CharField(max_length=30, unique=True)),
('date_joined', models.DateTimeField(auto_now_add=True, verbose_name='date joined')),
('last_login', models.DateTimeField(auto_now=True, verbose_name='last login')),
('is_admin', models.BooleanField(default=False)),
('is_active', models.BooleanField(default=True)),
('is_staff', models.BooleanField(default=False)),
('is_superuser', models.BooleanField(default=False)),
],
options={
'abstract': False,
},
),
]
| [
"4k45hr0ck5007@gmail.com"
] | 4k45hr0ck5007@gmail.com |
351c87c5d812181e22737f15be126ece632a8f45 | 682b3381751b6178ea7217c723a46bf74b8ce07e | /experiments/fmeasure.py | 32298f834b052829b4760cf36c14c435f43a7686 | [] | no_license | jminyu/BackgroundSubtraction_by_GBRBM | 7170bb2c04328274a6e671592a7856e363438f7c | 6fada0925d674b66933a4440f157832b97ba3bab | refs/heads/master | 2021-01-10T20:59:06.938073 | 2014-07-26T03:00:23 | 2014-07-26T03:00:23 | 22,276,707 | 2 | 3 | null | null | null | null | UTF-8 | Python | false | false | 2,297 | py | from numpy import *
import os.path
import Image
import matplotlib.pylab as plt
def f_measure(f, fg,gt):
fg_pixgt = where(gt == 1)
bg_pixgt = where(gt == 0)
fg_pixfg = where(fg == 1)
bg_pixfg = where(fg == 0)
tp = (where(gt[fg_pixgt] == fg[fg_pixgt]))[0]
fp = (where(gt[fg_pixgt] != fg[fg_pixgt]))[0]
tn = (where(gt[bg_pixgt] == fg[bg_pixgt]))[0]
fn = (where(gt[bg_pixgt] != fg[bg_pixgt]))[0]
DR = len(tp) * 1.0 / ( len(tp) + len(fn) )
percision = len(tp) * 1.0 / (len(tp) + len(fp))
F = 2 * DR * percision / (DR + percision)
print f, " fmeasure:", F, " tp:fp:tn:fn =", len(tp),":",len(fp),":",len(tn),":",len(fn), "Recall:", DR," Percision:", percision
return F
def print_it(x, dir_name, files):
global dataset
print "here"
#dataset = []
#dataset = np.zeros(256*320)
for f in files:
imf = Image.open(dir_name + '/' + f)
#d = np.array(imf)
d = ((array(imf.getdata())/255) > 0.5 ) * 1
#print f
#print d.shape
dataset.append(d)
return dataset
def print_fmeasure(x, dir_name, files):
global dataset
print "here"
i = 0
s = 0
for f in files:
imf = Image.open(dir_name + '/' + f)
#d = np.array(imf)
groundtruth1 = (((asarray(imf)[:,:,0])/255.0 > 0.5) * 1).flatten()
#print "shape:", groundtruth1.shape, "shape2:", dataset[i].shape
#print i
# print f
s = s + f_measure(f, groundtruth1, dataset[i])
i = i + 1
print "avg: ", s/i
# img = Image.open("data/changedetection\\baseline\highway/gt001367.png")
# img1 = Image.open("data/changedetection\\baseline\highway/bin001367.png")
# print (asarray(img1)).shape
# plt.imshow(((asarray(img)[:,:])/255.0 > 0.5)* 1)
# plt.figure(2)
# plt.imshow(((asarray(img1)[:,:])/255.0 > 0.5) * 1)
# plt.show()
# print "Doie : " , (asarray(img)[:,:]).shape
# groundtruth = (((asarray(img)[:,:])/255.0 > 0.5) * 1).flatten()
# groundtruth1 = (((asarray(img1)[:,:,0])/255.0 > 0.5) * 1).flatten()
# f_measure(groundtruth1,groundtruth)
dataset = []
os.path.walk('C:\work\\backgdSubt\GRBM\deepbelief\code\data\cdresults\GT', print_it, 0)
os.path.walk('C:\work\\backgdSubt\GRBM\deepbelief\code\data\cdresults\DPGMM', print_fmeasure, 0) | [
"jmyu@gist.ac.kr"
] | jmyu@gist.ac.kr |
2953bbb2ca4922a469834aebbec2753d820c24e7 | 5aeaa94117ad5f3ac86f83795e05e9444dfce586 | /scratch.py | c2b0809c86ea77328bd0bb6ea65ee63610c849d8 | [] | no_license | rikkhill/sfcrime | debef1b232db35ad735c8badec360bc5e9f1ad95 | f21fe834da07f701f666198089425e59ed57e70d | refs/heads/master | 2021-01-21T03:31:05.410424 | 2016-08-23T13:38:49 | 2016-08-23T13:38:49 | 49,992,165 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 839 | py | # Rikk's exploratory gubbins
import pandas as pd
import helper.data as hd
import helper.plot as plot
import helper.constants as C
df = hd.get_training_data()
total = len(df)
# Filter for the top ~80%% of crimes
df = df[df.Category.isin([
#'LARCENY/THEFT', # 19.92%
#'OTHER OFFENSES', # 14.37%
#'NON-CRIMINAL', # 10.51%
#'ASSAULT', # 8.76%
'DRUG/NARCOTIC', # 6.15%
#'VEHICLE THEFT', # 6.13%
#'VANDALISM', # 5.09
#'WARRANTS', # 4.81%
#'BURGLARY' # 4.19%
])]
print(len(df)/total)
plot.eventmap(df, 'Category') | [
"rikk@unashamedly.co.uk"
] | rikk@unashamedly.co.uk |
bab376dab409e28cbb1e3490bf25c95372117cd9 | 7a724badef6a881d63d7692de6a7b94daaf820be | /user.py | 133f826c760765de1c14e75acb3678cb4361f23d | [] | no_license | mirshahzad/python-basic | aa19641140e10e6be88944c81c927410ffc23759 | 1552be46b5890c9976fad43dba007410396ad92a | refs/heads/master | 2022-10-24T18:13:58.938338 | 2020-06-13T09:14:12 | 2020-06-13T09:14:12 | 255,885,892 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 166 | py | user_0 = {
'username': 'efermi',
'first': 'enrico',
'last': 'fermi',
}
for key, value in user_0.items():
print(f"\nKey: {key}")
print(f"value: {value}") | [
"noreply@github.com"
] | mirshahzad.noreply@github.com |
021379ed8268f90ad89ad129bff6fa13ecc6d24f | 7c131f19c3a3ae1ee8832ff68a8dd88e68e4d3d8 | /filter_mobilisations.py | 3827c8a12f9092a369e6a67bf2e36dffa856e3d6 | [] | no_license | isildirik/London_Fire_Station_Closures | 42abbaa94462144ac1ccbeba398d038bf51e1322 | bcce10265923fdfeca7fd293712c822e59a95f83 | refs/heads/master | 2021-07-12T19:25:38.210866 | 2020-06-24T07:17:35 | 2020-06-24T07:17:35 | 168,384,656 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 709 | py | #!/usr/bin/env python3
# This script reads london fire brigade mobilisations data from original data spreadsheet.
# Filters out the unnecessary columns and subsets the dataset to 2013 and 2014 years and outputs a csv file for further use.
import pandas as pd
import time
xls_path = './original-dataset/LFB Mobilisation data from Jan 2013.xlsx'
xls_cols = 'A,B,D,E,I,P,Q,S,V'
output_csv_name = './mobilisation_data_2013_2014.csv'
start_time = time.time()
mobdf = pd.read_excel(xls_path, sheet_name='Sheet1', usecols=xls_cols)
# Subset data up to 2015 January
mobdf = mobdf[0:314249]
mobdf.to_csv(output_csv_name, index=False)
print("--- Script finished in %.4f seconds ---" % (time.time() - start_time))
| [
"isildirik@gmail.com"
] | isildirik@gmail.com |
03668cd8657241fcab646595058f80c9f4125756 | c3aad901e32f735625f938b4c26cdfa307254a6b | /biothings_explorer/api_preprocess/reasoner.py | b89e427492a7a016d9355ed1ccfbe18fd59cd9d8 | [
"Apache-2.0"
] | permissive | andrewgcodes/biothings_explorer | 73c598fae2171e8b61687325fa1c1ee1a625fbe1 | b54aa195bbed19ff5be09ed24dee869b24bb3c16 | refs/heads/master | 2022-12-23T18:06:34.061346 | 2022-08-18T20:23:17 | 2022-08-18T20:23:17 | 279,000,723 | 0 | 0 | Apache-2.0 | 2020-07-12T05:49:16 | 2020-07-12T05:49:15 | null | UTF-8 | Python | false | false | 939 | py | from itertools import groupby
def restructure_reasoner_response(json_doc):
"""Restructure the API output from reasoner API.
:param: json_doc: json output from reasoner API
"""
edges = json_doc['knowledge_graph']['edges']
if not edges:
return {}
res = {}
edges = sorted(edges, key=lambda x: x['type'])
for k, g in groupby(edges, lambda x: x['type']):
res[k] = []
for _item in g:
if _item['target_id'].startswith("PANTHER.FAMILY"):
_item['panther'] = _item['target_id'][15:]
if _item['target_id'].startswith("CHEBI"):
_item['chebi'] = _item['target_id']
if _item['target_id'].startswith("CHEMBL:"):
_item['chembl'] = _item['target_id'][7:]
if _item['target_id'].startswith("MONDO:"):
_item['mondo'] = _item['target_id'][6:]
res[k].append(_item)
return res | [
"kevinxin@scripps.edu"
] | kevinxin@scripps.edu |
47c78acbdccd77b171655ae99a43265f89f41011 | 30898ff2de7b05412caf60a23c1f5b53ff35ffd9 | /Script.py | b5f7417f84d0251ff6028eb21361ea507a4a02fb | [] | no_license | niravshah2705/DynamoDB_to_Athena | f106b773dd92008e5e5288cdb49fe16cec97ed1a | 902d3d92b3a3c0c30b2c1630d024f98424977d4b | refs/heads/master | 2020-04-08T19:26:12.273525 | 2018-12-02T03:59:12 | 2018-12-02T03:59:12 | 159,655,315 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,925 | py | import sys
import ast
from awsglue.transforms import *
from awsglue.utils import getResolvedOptions
from pyspark.context import SparkContext
from awsglue.context import GlueContext
from awsglue.job import Job
## @params: [JOB_NAME]
args = getResolvedOptions(sys.argv, ['JOB_NAME','sourcedb','destinationdb','sourcetable','destinationtable','mapping','fields'])
sc = SparkContext()
glueContext = GlueContext(sc)
spark = glueContext.spark_session
job = Job(glueContext)
job.init(args['JOB_NAME'], args)
## @type: DataSource
## @args: [database = "aws-blogs-glue-database098234ytb2", table_name = "auto_billplatformcredentials", transformation_ctx = "datasource0"]
## @return: datasource0
## @inputs: []
datasource0 = glueContext.create_dynamic_frame.from_catalog(database = args['sourcedb'], table_name = args['sourcetable'], transformation_ctx = "datasource0")
## @type: ApplyMapping
## @args: [mapping = [("cgimageurl", "string", "cgimageurl", "string"), ("id", "string", "id", "string"), ("billname", "string", "billname", "string"), ("billserverurl", "string", "billserverurl", "string"), ("billapiusername", "string", "billapiusername", "string"), ("billcurrency", "string", "billcurrency", "string"), ("billappname", "string", "billappname", "string"), ("billapipassword", "string", "billapipassword", "string"), ("billdescription", "string", "billdescription", "string"), ("billverifykey", "string", "billverifykey", "string"), ("billchannel", "string", "billchannel", "string"), ("billmerchantid", "string", "billmerchantid", "string"), ("billcountry", "string", "billcountry", "string"), ("billsubsamounttype", "string", "billsubsamounttype", "string"), ("billsubsenableretry", "string", "billsubsenableretry", "string"), ("billsubsppionly", "string", "billsubsppionly", "string"), ("billwebsiteweb", "string", "billwebsiteweb", "string"), ("billchannelweb", "string", "billchannelweb", "string"), ("billgeneratechecksumurl", "string", "billgeneratechecksumurl", "string"), ("billverifychecksumurl", "string", "billverifychecksumurl", "string"), ("billmerchantkey", "string", "billmerchantkey", "string"), ("buildtype", "string", "buildtype", "string"), ("billrequesttype", "string", "billrequesttype", "string"), ("billsubsfrequencyunit", "string", "billsubsfrequencyunit", "string"), ("billtheme", "string", "billtheme", "string"), ("billsubsfrequency", "string", "billsubsfrequency", "string"), ("billchannelwap", "string", "billchannelwap", "string"), ("billwebsitewap", "string", "billwebsitewap", "string"), ("billindustrytypeid", "string", "billindustrytypeid", "string"), ("billserverurlsandbox", "string", "billserverurlsandbox", "string")], transformation_ctx = "applymapping1"]
## @return: applymapping1
## @inputs: [frame = datasource0]
applymapping1 = ApplyMapping.apply(frame = datasource0, mappings = ast.literal_eval(args['mapping']), transformation_ctx = "applymapping1")
## @type: SelectFields
## @args: [paths = ["cgimageurl", "id", "billname", "billserverurl", "billapiusername", "billcurrency", "billappname", "billapipassword", "billdescription", "billverifykey", "billchannel", "billmerchantid", "billcountry", "billsubsamounttype", "billsubsenableretry", "billsubsppionly", "billwebsiteweb", "billchannelweb", "billgeneratechecksumurl", "billverifychecksumurl", "billmerchantkey", "buildtype", "billrequesttype", "billsubsfrequencyunit", "billtheme", "billsubsfrequency", "billchannelwap", "billwebsitewap", "billindustrytypeid", "billserverurlsandbox"], transformation_ctx = "selectfields2"]
## @return: selectfields2
## @inputs: [frame = applymapping1]
selectfields2 = SelectFields.apply(frame = applymapping1, paths = ast.literal_eval(args['fields']), transformation_ctx = "selectfields2")
## @type: ResolveChoice
## @args: [choice = "MATCH_CATALOG", database = "aws-blogs-glue-database098234ytb2", table_name = "ddb-target-s3-table-auto-billplatformcredentials", transformation_ctx = "resolvechoice3"]
## @return: resolvechoice3
## @inputs: [frame = selectfields2]
resolvechoice3 = ResolveChoice.apply(frame = selectfields2, choice = "MATCH_CATALOG", database = args['destinationdb'], table_name = args['destinationtable'], transformation_ctx = "resolvechoice3")
## @type: ResolveChoice
## @args: [choice = "make_struct", transformation_ctx = "resolvechoice4"]
## @return: resolvechoice4
## @inputs: [frame = resolvechoice3]
resolvechoice4 = ResolveChoice.apply(frame = resolvechoice3, choice = "make_struct", transformation_ctx = "resolvechoice4")
## @type: DataSink
## @args: [database = "aws-blogs-glue-database098234ytb2", table_name = "ddb-target-s3-table-auto-billplatformcredentials", transformation_ctx = "datasink5"]
## @return: datasink5
## @inputs: [frame = resolvechoice4]
datasink5 = glueContext.write_dynamic_frame.from_catalog(frame = resolvechoice4, database = args['destinationdb'], table_name = args['destinationtable'] , transformation_ctx = "datasink5")
job.commit()
| [
"noreply@github.com"
] | niravshah2705.noreply@github.com |
6c350abf42358535fd248b6ad5fdb1c201305ee1 | 1dcde4f75d6b5abb115924a8f3b5989b6fbe2dee | /app/game.py | 5b47ab94c64ce1b84557b462d71012b6190ae918 | [
"MIT"
] | permissive | skrolikowski/PyBox | 0561a0f9bdae70d7f110a92f5b928dbd14a8f779 | d79c5229df69f21767a4db15ebe05b91bba3dc8d | refs/heads/master | 2020-03-19T09:17:15.810828 | 2018-09-08T18:07:47 | 2018-09-08T18:07:47 | 136,274,877 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,979 | py | from .registry import Registry
from .window import GameWindow
class Game:
registry = Registry()
@classmethod
def load(cls, func):
cls.registry.add_command("load", func)
return func
@classmethod
def update(cls, func):
cls.registry.add_command("update", func)
return func
@classmethod
def draw(cls, func):
cls.registry.add_command("draw", func)
return func
@classmethod
def key_press(cls, func):
cls.registry.add_command("key_press", func)
return func
@classmethod
def key_release(cls, func):
cls.registry.add_command("key_release", func)
return func
@classmethod
def key_down(cls, func):
cls.registry.add_command("key_down", func)
return func
@classmethod
def text(cls, func):
cls.registry.add_command("key_text", func)
return func
@classmethod
def mouse_drag(cls, func):
cls.registry.add_command("mouse_drag", func)
return func
@classmethod
def mouse_motion(cls, func):
cls.registry.add_command("mouse_motion", func)
return func
@classmethod
def mouse_press(cls, func):
cls.registry.add_command("mouse_press", func)
return func
@classmethod
def mouse_release(cls, func):
cls.registry.add_command("mouse_release", func)
return func
@classmethod
def mouse_scroll(cls, func):
cls.registry.add_command("mouse_scroll", func)
return func
@classmethod
def focus(cls, func):
cls.registry.add_command("window_focus", func)
return func
@classmethod
def blur(cls, func):
cls.registry.add_command("window_blur", func)
return func
@classmethod
def hide(cls, func):
cls.registry.add_command("window_hide", func)
return func
@classmethod
def show(cls, func):
cls.registry.add_command("window_show", func)
return func
@classmethod
def move(cls, func):
cls.registry.add_command("window_move", func)
return func
@classmethod
def enter(cls, func):
cls.registry.add_command("state_enter", func)
return func
@classmethod
def leave(cls, func):
cls.registry.add_command("state_leave", func)
return func
@classmethod
def resume(cls, func):
cls.registry.add_command("state_resume", func)
return func
@classmethod
def switch(cls, state, *args, **kwargs):
cls.registry.switch(state, *args, **kwargs)
@classmethod
def push(cls, state, *args, **kwargs):
cls.registry.push(state, *args, **kwargs)
@classmethod
def pop(cls, *args, **kwargs):
cls.registry.pop(*args, **kwargs)
@classmethod
def run(cls, width=640, height=480, caption="Game"):
cls.registry.window = GameWindow(cls.registry, width=width, height=height, caption=caption)
| [
"skrolikowski@gmail.com"
] | skrolikowski@gmail.com |
09b2477e4532e1781c1726117b833d2dd74a2098 | c91a9f70e19c25bb8839172ba0733ae90a64c504 | /apps/core/urls.py | e8b4cf1ad85f022d8cdb6681eb2fbc03154d7733 | [] | no_license | barbarakap19/gestao_rh | 464ffb0fb031e91ffef9a40df448ff44998a5fb2 | e2ed295cba055759bd0a9325adf6da9106c58c06 | refs/heads/master | 2020-06-14T20:47:46.471310 | 2019-07-23T03:32:42 | 2019-07-23T03:32:42 | 195,121,768 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 107 | py | from django.urls import path
from .views import home
urlpatterns = [
path('', home, name='home'),
] | [
"barbara.andrade1901@gmail.com"
] | barbara.andrade1901@gmail.com |
046ae5d3523c40583815218388fe2c2f3b49b05b | df0903d95faa22bd37ea661ba706776d384ab0bd | /scripts/propagate/merger.py | 4892333b046c16ea3904661db9531c0ea8556cce | [] | no_license | tschmorleiz/simman | 50ce0cbf7f6695a09fa45a6dcd8c655ba9febb63 | c589b1a9fc429ee33e46e730253a42bd10fa95b3 | refs/heads/master | 2016-09-06T12:05:59.870641 | 2015-02-16T11:48:38 | 2015-02-16T11:48:38 | 30,798,863 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,625 | py |
#!/usr/bin/python -O
################################################################################
################################################################################
#
# State-Based Text Merging Algorithm
# For 6.033 Design Project 2
# TA: Katherine Fang
# 9 May 2012
#
# Stephan Boyer
# Ami Patel
# Vo Thanh Minh Tue
#
# Description:
#
# Attempts to automatically perform a three-way merge.
# Prints the result to standard output.
#
# This is a proof of concept. In a real system, we would
# write at least the diff algorithm in a faster language.
# This implementation quickly slows down for large files,
# unless the fast diff approximation is used (see below).
#
# For more information, see:
# http://www.stephanboyer.com/post/26/3-way-text-merging-algorithm
#
# Usage:
#
# merger.py ancestor_file alice_file bob_file
#
################################################################################
################################################################################
################################################################################
################################################################################
# String Diffing
################################################################################
################################################################################
# represents a change from one string to another
class Change:
pass
# represents adding <text> to string b
class Insert(Change):
def __init__(self, text, pos_a, range_b):
self.text = text
self.pos_a = pos_a
self.range_b = range_b
def __repr__(self):
return "Insert(\"" + str(self.text) + "\", " + str(self.pos_a) + ", " + str(self.range_b) + ")"
# represents deleting <text> from string b
class Delete(Change):
def __init__(self, text, range_a, pos_b):
self.text = text
self.range_a = range_a
self.pos_b = pos_b
def __repr__(self):
return "Delete(\"" + str(self.text) + "\", " + str(self.range_a) + ", " + str(self.pos_b) + ")"
# takes 2 indexable objects (e.g. strings or lists)
# returns a list of Change objects (Delete or Insert)
# guaranteed to produce an optimal diff
def str_diff(a, b):
ls = len(a)
lf = len(b)
memo = {}
def min_diff(si, fi):
if (si, fi) in memo:
return memo[(si, fi)]
ans = []
if si == ls and fi == lf:
ans = []
elif si < ls and fi == lf:
ans = []
for i in range(si, ls):
ans.append((i, "d"))
elif fi < lf and si == ls:
ans = []
for j in range(fi, lf):
ans.append((si, "i", b[j]))
elif a[si] == b[fi]:
ans = min_diff(si + 1, fi + 1)
else:
alts = [(min_diff(si + 1, fi), (si, "d")), (min_diff(si, fi + 1), (si, "i", b[fi]))]
best = min(alts, key=lambda t: len(t[0]))
ans = [best[1]] + best[0]
memo[(si, fi)] = ans
return ans
diff = sorted(min_diff(0, 0), key=lambda x: x[0])
changes = []
pos_diff = 0
offset_b = 0
while pos_diff < len(diff):
length = 0
pos_a_old = diff[pos_diff][0]
while pos_diff < len(diff) and diff[pos_diff][1] == "i":
if diff[pos_diff][0] != pos_a_old:
break
length += 1
pos_diff += 1
if length > 0:
pos_a = pos_a_old
range_b_0 = pos_a_old + offset_b
range_b_1 = pos_a_old + offset_b + length
changes.append(Insert(b[range_b_0:range_b_1], pos_a, (range_b_0, range_b_1)))
offset_b += length
if pos_diff >= len(diff):
break
length = 0
pos_a_old = diff[pos_diff][0]
while pos_diff < len(diff) and diff[pos_diff][1] == "d":
if diff[pos_diff][0] != pos_a_old + length:
break
length += 1
pos_diff += 1
if length > 0:
range_a_0 = pos_a_old
range_a_1 = pos_a_old + length
pos_b = pos_a_old + offset_b
changes.append(Delete(a[range_a_0:range_a_1], (range_a_0, range_a_1), pos_b))
offset_b -= length
return changes
"""
# Here is an alternative version of the str_diff(a, b) function.
# Unlike the version above, it is NOT guaranteed to produce optimal
# diffs. Diffs that are not optimal can sometimes produce unexpected
# results. However, this version is much faster.
import difflib
# takes 2 indexable objects (e.g. strings or lists)
# returns a list of Change objects (Delete or Insert)
# not guaranteed to produce an optimal diff
def str_diff(a, b):
d = difflib.Differ()
diff = list(d.compare(a, b))
changes = []
pos_a = 0
pos_b = 0
pos_diff = 0
while pos_diff < len(diff):
while pos_diff < len(diff) and diff[pos_diff][0] == " ":
pos_diff += 1
pos_a += 1
pos_b += 1
while pos_diff < len(diff) and diff[pos_diff][0] == "?":
pos_diff += 1
length = 0
range_b_0 = pos_b
while pos_diff < len(diff) and diff[pos_diff][0] == "+":
length += 1
pos_diff += 1
pos_b += 1
if length > 0:
changes.append(Insert(b[range_b_0:pos_b], pos_a, (range_b_0, pos_b)))
text = []
range_a_0 = pos_a
while pos_diff < len(diff) and diff[pos_diff][0] == "-":
length += 1
pos_diff += 1
pos_a += 1
if length > 0:
changes.append(Delete(a[range_a_0:pos_a], (range_a_0, pos_a), pos_b))
return changes
"""
################################################################################
################################################################################
# Levenshtein Distance
################################################################################
################################################################################
# compute the Levenshtein distance between two strings
def levenshtein(a, b):
d = {}
for i in range(len(a) + 1):
d[(i, 0)] = i
for j in range(len(b) + 1):
d[(0, j)] = j
for j in range(1, len(b) + 1):
for i in range(1, len(a) + 1):
if a[i - 1] == b[j - 1]:
d[(i, j)] = d[(i - 1, j - 1)]
else:
d[(i, j)] = min([d[(i - 1, j)], d[(i, j - 1)], d[(i - 1, j - 1)]]) + 1
return d[len(a), len(b)]
################################################################################
################################################################################
# Finding Move Actions
################################################################################
################################################################################
# the maximum normalized distance (0-1) between two strings for them to be considered the same
# for the purposes of finding Move actions
MAX_MOVE_DIST = 0.2
# the minimum number of items that can be considered a Move action
MIN_MOVE_LENGTH = 10
# represents moving <text_a> in range <range_a> to <text_b> in range <range_b>
class Move(Change):
def __init__(self, text_a, range_a, pos_a, text_b, range_b, pos_b, first):
self.text_a = text_a
self.range_a = range_a
self.pos_a = pos_a
self.text_b = text_b
self.range_b = range_b
self.pos_b = pos_b
self.first = first
def __repr__(self):
return "Move(\"" + str(self.text_a) + "\", " + str(self.range_a) + ", " + str(self.pos_a) + ", \"" + str(self.text_b) + "\", " + str(self.range_b) + ", " + str(self.pos_b) + ", " + str(self.first) + ")"
# find Move actions in a list of Change objects (mutates the input list).
# a Move action comes from an Insert-Delete pair where the strings differ
# by less than MAX_MOVE_DIST in terms of normalized Levenshtein distance
def find_moves(diff, first):
indices_to_delete = []
for i in range(len(diff)):
if isinstance(diff[i], Delete):
for j in range(len(diff)):
if isinstance(diff[j], Insert):
if not (i in indices_to_delete) and not (j in indices_to_delete):
normalized_dist = float(levenshtein(diff[i].text, diff[j].text)) / max(len(diff[i].text), len(diff[j].text))
if normalized_dist <= MAX_MOVE_DIST and max(len(diff[i].text), len(diff[j].text)) >= MIN_MOVE_LENGTH:
indices_to_delete.append(i)
indices_to_delete.append(j)
diff.append(Move(diff[i].text, diff[i].range_a, diff[j].pos_a, diff[j].text, diff[j].range_b, diff[i].pos_b, first))
indices_to_delete.sort()
indices_to_delete.reverse()
for i in indices_to_delete:
diff.pop(i)
################################################################################
################################################################################
# Text Merging
################################################################################
################################################################################
# represents a list of merge conflicts
class MergeConflictList(Exception):
def __init__(self, conflicts):
self.conflicts = conflicts
def __repr__(self):
return self.conflicts
# takes indexable objects (e.g. strings or lists) a, b and their common ancestor
# returns the merged document
def merge(ancestor, a, b):
# compute the diffs from the common ancestor
diff_a = str_diff(ancestor, a)
diff_b = str_diff(ancestor, b)
# find Move actions
find_moves(diff_a, True)
find_moves(diff_b, False)
# find conflicts and automatically resolve them where possible
conflicts = []
indices_to_delete_a = []
indices_to_delete_b = []
len_diff_a = len(diff_a)
len_diff_b = len(diff_b)
for i in range(len_diff_a):
for j in range(len_diff_b):
if j in indices_to_delete_b:
continue
if isinstance(diff_a[i], Delete) and isinstance(diff_b[j], Delete):
# if two Delete actions overlap, take the union of their ranges
if (diff_b[j].range_a[0] >= diff_a[i].range_a[0] and diff_b[j].range_a[0] < diff_a[i].range_a[1]) or \
(diff_b[j].range_a[1] >= diff_a[i].range_a[0] and diff_b[j].range_a[1] < diff_a[i].range_a[1]) or \
(diff_b[j].range_a[0] < diff_a[i].range_a[0] and diff_b[j].range_a[1] > diff_a[i].range_a[1]):
diff_a[i].range_a = (min(diff_a[i].range_a[0], diff_b[j].range_a[0]), max(diff_a[i].range_a[1], diff_b[j].range_a[1]))
indices_to_delete_b.append(j)
if isinstance(diff_a[i], Delete) and isinstance(diff_b[j], Insert):
# Insert actions inside the range of Delete actions collide
if diff_b[j].pos_a > diff_a[i].range_a[0] and diff_b[j].pos_a < diff_a[i].range_a[1]:
conflicts.append("A is deleting text that B is inserting into.")
if isinstance(diff_a[i], Delete) and isinstance(diff_b[j], Move):
# Delete actions that overlap with but are not fully contained within PsuedoMove sources collide
if diff_a[i].range_a[0] >= diff_b[j].range_a[0] and diff_a[i].range_a[1] <= diff_b[j].range_a[1]:
pass
elif diff_a[i].range_a[0] >= diff_b[j].range_a[0] and diff_a[i].range_a[0] < diff_b[j].range_a[1]:
conflicts.append("B is moving only part of some text that A is deleting.")
elif diff_a[i].range_a[1] >= diff_b[j].range_a[0] and diff_a[i].range_a[1] < diff_b[j].range_a[1]:
conflicts.append("B is moving only part of some text that A is deleting.")
elif diff_a[i].range_a[0] < diff_b[j].range_a[0] and diff_a[i].range_a[1] > diff_b[j].range_a[1]:
conflicts.append("A is deleting text that B is moving.")
# Move destinations inside the range of Delete actions collide
if diff_b[j].pos_a > diff_a[i].range_a[0] and diff_b[j].pos_a < diff_a[i].range_a[1]:
conflicts.append("A is deleting text that B is moving text into.")
if isinstance(diff_a[i], Insert) and isinstance(diff_b[j], Delete):
# Insert actions inside the range of Delete actions collide
if diff_a[i].pos_a > diff_b[j].range_a[0] and diff_a[i].pos_a < diff_b[j].range_a[1]:
conflicts.append("B is deleting text that A is inserting into.")
if isinstance(diff_a[i], Insert) and isinstance(diff_b[j], Insert):
# Insert actions at the same position collide unless the inserted text is the same
if diff_a[i].pos_a == diff_b[j].pos_a:
if diff_a[i].text == diff_b[j].text:
indices_to_delete_b.append(j)
else:
conflicts.append("A and B are inserting text at the same location.")
if isinstance(diff_a[i], Insert) and isinstance(diff_b[j], Move):
# Insert actions at the same location as Move destinations collide unless the text is the same
if diff_a[i].pos_a == diff_b[j].pos_a:
if diff_a[i].text == diff_b[j].text_b:
indices_to_delete_a.append(i)
else:
conflicts.append("A is inserting text at the same location that B is moving text to.")
if isinstance(diff_a[i], Move) and isinstance(diff_b[j], Delete):
# Delete actions that overlap with but are not fully contained within PsuedoMove actions collide
if diff_b[j].range_a[0] >= diff_a[i].range_a[0] and diff_b[j].range_a[1] <= diff_a[i].range_a[1]:
pass
elif diff_b[j].range_a[0] >= diff_a[i].range_a[0] and diff_b[j].range_a[0] < diff_a[i].range_a[1]:
conflicts.append("A is moving only part of some text that B is deleting.")
elif diff_b[j].range_a[1] >= diff_a[i].range_a[0] and diff_b[j].range_a[1] < diff_a[i].range_a[1]:
conflicts.append("A is moving only part of some text that B is deleting.")
elif diff_b[j].range_a[0] < diff_a[i].range_a[0] and diff_b[j].range_a[1] > diff_a[i].range_a[1]:
conflicts.append("B is deleting text that A is moving.")
if isinstance(diff_a[i], Move) and isinstance(diff_b[j], Insert):
# Insert actions at the same location as Move destinations collide unless the text is the same
if diff_b[j].pos_a == diff_a[i].pos_a:
if diff_b[j].text == diff_a[i].text_b:
indices_to_delete_b.append(j)
else:
conflicts.append("B is inserting text at the same location that A is moving text to.")
if isinstance(diff_a[i], Move) and isinstance(diff_b[j], Move):
# PsuedoMove actions collide if their source ranges overlap unless one is fully contained in the other
if diff_b[j].range_a[0] >= diff_a[i].range_a[0] and diff_b[j].range_a[1] <= diff_a[i].range_a[1]:
pass
elif diff_b[j].range_a[0] >= diff_a[i].range_a[0] and diff_b[j].range_a[0] < diff_a[i].range_a[1]:
conflicts.append("A text move by A overlaps with a text move by B.")
elif diff_b[j].range_a[1] >= diff_a[i].range_a[0] and diff_b[j].range_a[1] < diff_a[i].range_a[1]:
conflicts.append("A text move by A overlaps with a text move by B.")
elif diff_b[j].range_a[0] < diff_a[i].range_a[0] and diff_b[j].range_a[1] > diff_a[i].range_a[1]:
pass
# Move actions collide if their destination positions are the same
if diff_a[i].pos_a == diff_b[j].pos_a:
conflicts.append("A and B are moving text to the same location.")
indices_to_delete_a.sort()
indices_to_delete_a.reverse()
for i in indices_to_delete_a:
diff_a.pop(i)
indices_to_delete_b.sort()
indices_to_delete_b.reverse()
for i in indices_to_delete_b:
diff_b.pop(i)
# throw an error if there are conflicts
if len(conflicts) > 0:
return {'merge': None, 'conflicts': conflicts}
# sort the actions by position in the common ancestor
def sort_key(action):
if isinstance(action, Delete):
return action.range_a[0]
if isinstance(action, Insert):
return action.pos_a
actions = sorted(diff_a + diff_b, key=sort_key)
# compute offset lists
offset_changes_ab = []
for i in range(len(actions)):
if isinstance(actions[i], Delete):
offset_changes_ab.append((actions[i].range_a[0], actions[i].range_a[0] - actions[i].range_a[1]))
if isinstance(actions[i], Insert):
offset_changes_ab.append((actions[i].pos_a, len(actions[i].text)))
offset_changes_a = []
for i in range(len(diff_a)):
if isinstance(diff_a[i], Delete):
offset_changes_a.append((diff_a[i].range_a[0], diff_a[i].range_a[0] - diff_a[i].range_a[1]))
if isinstance(diff_a[i], Insert):
offset_changes_a.append((diff_a[i].pos_a, len(diff_a[i].text)))
if isinstance(diff_a[i], Move):
offset_changes_a.append((diff_a[i].range_a[0], diff_a[i].range_a[0] - diff_a[i].range_a[1]))
offset_changes_a.append((diff_a[i].pos_a, len(diff_a[i].text_a)))
offset_changes_b = []
for i in range(len(diff_b)):
if isinstance(diff_b[i], Delete):
offset_changes_b.append((diff_b[i].range_a[0], diff_b[i].range_a[0] - diff_b[i].range_a[1]))
if isinstance(diff_b[i], Insert):
offset_changes_b.append((diff_b[i].pos_a, len(diff_b[i].text)))
if isinstance(diff_b[i], Move):
offset_changes_b.append((diff_b[i].range_a[0], diff_b[i].range_a[0] - diff_b[i].range_a[1]))
offset_changes_b.append((diff_b[i].pos_a, len(diff_b[i].text_a)))
# compute the preliminary merge
preliminary_merge = ancestor[:]
pos_offset = 0
for i in range(len(actions)):
if isinstance(actions[i], Delete):
preliminary_merge = preliminary_merge[:actions[i].range_a[0] + pos_offset] + preliminary_merge[actions[i].range_a[1] + pos_offset:]
pos_offset += actions[i].range_a[0] - actions[i].range_a[1]
offset_changes_ab.append((actions[i].range_a[0], actions[i].range_a[0] - actions[i].range_a[1]))
if isinstance(actions[i], Insert):
preliminary_merge = preliminary_merge[:actions[i].pos_a + pos_offset] + actions[i].text + preliminary_merge[actions[i].pos_a + pos_offset:]
pos_offset += len(actions[i].text)
offset_changes_ab.append((actions[i].pos_a, len(actions[i].text)))
# perform the "delete" part of the moves
for i in range(len(actions)):
if isinstance(actions[i], Move):
range_a0 = actions[i].range_a[0]
range_a1 = actions[i].range_a[1]
for offset_pair in offset_changes_ab:
if offset_pair[0] <= actions[i].range_a[0]:
range_a0 += offset_pair[1]
if offset_pair[0] <= actions[i].range_a[1]:
range_a1 += offset_pair[1]
offset_changes_ab.append((actions[i].range_a[0], actions[i].range_a[0] - actions[i].range_a[1]))
preliminary_merge = preliminary_merge[:range_a0] + preliminary_merge[range_a1:]
# perform the "add" part of the moves
for i in range(len(actions)):
if isinstance(actions[i], Move):
pos_a = actions[i].pos_a
for offset_pair in offset_changes_ab:
if offset_pair[0] <= actions[i].pos_a:
pos_a += offset_pair[1]
text_ancestor = actions[i].text_a
if actions[i].first:
text_a = actions[i].text_b
range_a0 = actions[i].range_a[0]
range_a1 = actions[i].range_a[1]
for offset_pair in offset_changes_b:
if offset_pair[0] <= actions[i].range_a[0]:
range_a0 += offset_pair[1]
if offset_pair[0] <= actions[i].range_a[1]:
range_a1 += offset_pair[1]
text_b = b[range_a0:range_a1]
else:
text_b = actions[i].text_b
range_a0 = actions[i].range_a[0]
range_a1 = actions[i].range_a[1]
for offset_pair in offset_changes_a:
if offset_pair[0] <= actions[i].range_a[0]:
range_a0 += offset_pair[1]
if offset_pair[0] <= actions[i].range_a[1]:
range_a1 += offset_pair[1]
text_a = a[range_a0:range_a1]
text = merge(text_a, text_b, text_ancestor)
offset_changes_ab.append((actions[i].pos_a, len(text)))
preliminary_merge = preliminary_merge[:pos_a] + text + preliminary_merge[pos_a:]
return {'merge': preliminary_merge, 'conflicts': []}
| [
"tschmorleiz@googlemail.com"
] | tschmorleiz@googlemail.com |
b3df07ddba1420150bae25cb281f8f6d17c8c060 | 49b66aad7e8888b8da532d66dac65c5a4e75ceb8 | /pickel/newssort_loader.py | 369b3997ccd4d712af714b4e2fe05bf84a5d977a | [] | no_license | praneetmehta/news_segregator | 7e606057b687dc97a69031e804c5704816b1c4bc | 3be70c007b85559af8c371b815d3122bc2f26ba9 | refs/heads/master | 2021-01-19T23:49:30.571651 | 2017-07-25T23:44:06 | 2017-07-25T23:44:06 | 83,790,066 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 497 | py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 3 20:07:49 2017
@author: praneet
"""
import pickle
import re
def correct(s):
s = re.sub('\s\W',' ',s)
s = re.sub('\W\s',' ',s)
s = re.sub("[^a-zA-Z']",' ', s)
s = re.sub('\s+',' ',s)
return s
vectorizer,assignment,kmeans = pickle.load(open('../pickel/newssort', 'rb'))
def predict(text):
return assignment[kmeans.predict(vectorizer.transform([correct(text)]))[0]]
| [
"praneet.mehta@gmail.com"
] | praneet.mehta@gmail.com |
4c0ca93db88c706bef26591396d38085e7953adf | 5e95f3cb251b0a4ed750ef37955f05854e1498eb | /test/python/index.py | c6de635a94fe7d39d9a1b1ac751279a719de0584 | [] | no_license | davidenq/prime | 48214b2fc0509c81a3ca026311652c17c95dac16 | d65e7048216fceb4221bcf07b062929ce3e91a72 | refs/heads/master | 2022-12-10T01:43:44.769602 | 2020-07-19T01:37:30 | 2020-07-19T01:37:30 | 280,770,805 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 77 | py | from cffi import FFI
ffi = FFI()
lib = ffi.dlopn('../../build/libprime.so') | [
"david.nunez.dev@gmail.com"
] | david.nunez.dev@gmail.com |
bd98415fac9fcd5c2df10970fb49b10c06aabbdf | 125cd1609acc13b0f5d953b7ff0faed7c177eb81 | /core/middlewares.py | e44439059857b530dc2179ef7e71ce36ebb1e121 | [] | no_license | anibalvf/DjangoWeb | 6e0a7d356b691fec4a9200f2e645e66cbb57371a | 0aa3c6528e5d2bbc305b76faf0c2fca4b88801aa | refs/heads/main | 2023-02-06T19:25:39.520929 | 2020-12-30T05:16:50 | 2020-12-30T05:16:50 | 325,130,522 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 537 | py | from django.core.exceptions import PermissionDenied
def simple_middleware(get_response):
# One-time configuration and initialization.
def middleware(request):
# Code to be executed for each request before
# the view (and later middleware) are called
if not request.user.is_superuser:
raise PermissionDenied
response = get_response(request)
# Code to be executed for each request/response after
# the view is called.
return response
return middleware | [
"33007157+anibalvf@users.noreply.github.com"
] | 33007157+anibalvf@users.noreply.github.com |
6f267f19b412e85da602cd830f9cae1e0f449d29 | c0990bc7a4fbc2875e2de8998d274b2e70b91ab3 | /lingvo/jax/layers/stochastics_test.py | 70ffe9917042bf183f1fc0e5792422d75cca1692 | [
"Apache-2.0"
] | permissive | Assimilationstheorie/lingvo | fa5159dd4201b4469398ff2ef004334773ed3642 | dd175517894f7ef541262603e1225341ec3fbb51 | refs/heads/master | 2023-08-27T18:41:31.691814 | 2021-10-24T21:47:29 | 2021-10-24T21:48:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,847 | py | # Lint as: python3
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for lingvo Jax stochastic layers."""
from absl import logging
from absl.testing import absltest
import jax
from jax import numpy as jnp
from jax import test_util
from lingvo.jax import base_layer
from lingvo.jax import test_utils
from lingvo.jax.layers import stochastics
ToNp = test_utils.ToNp
class StochaticsTest(test_util.JaxTestCase):
def test_dropout_layer01(self):
test_layer_p = stochastics.DropoutLayer.Params().Set(
name='dropout', keep_prob=0.8)
layer = test_layer_p.Instantiate()
prng_key = jax.random.PRNGKey(seed=12346)
prng_key, init_key = jax.random.split(prng_key)
initial_vars = layer.InstantiateVariables(init_key)
logging.info('initial_vars: %s', initial_vars)
inputs = jnp.ones([10, 1000], dtype=jnp.bfloat16)
prng_key, compute_key = jax.random.split(prng_key)
global_step = jnp.array(0, dtype=jnp.uint64)
def Comp(theta, prng_key, global_step, inputs):
with base_layer.JaxContext.NewContext():
per_step_prng_key = jax.random.fold_in(prng_key, global_step)
base_layer.ResetPrngKey(per_step_prng_key, global_step)
output1 = layer.FProp(theta, inputs)
output2 = layer.FProp(theta, inputs)
return output1, output2
output1, output2 = Comp(initial_vars, compute_key, global_step, inputs)
out1_sum = jnp.sum(output1)
out2_sum = jnp.sum(output2)
out1_nonzero = jnp.sum(output1 > 0.0)
out2_nonzero = jnp.sum(output2 > 0.0)
logging.info('out1_sum: %s', out1_sum)
logging.info('out2_sum: %s', out2_sum)
logging.info('out1_nonzero: %s', out1_nonzero)
logging.info('out2_nonzero: %s', out2_nonzero)
self.assertEqual(9920.0, out1_sum)
self.assertEqual(10048.0, out2_sum)
self.assertEqual(7944.0, out1_nonzero)
self.assertEqual(8029.0, out2_nonzero)
def test_dropout_layer_02(self):
test_layer_p = stochastics.DropoutLayer.Params().Set(
name='dropout',
keep_prob=0.8,
noise_shape=[10, 6, 8],
noise_shape_broadcast_dims=[2])
layer = test_layer_p.Instantiate()
prng_key = jax.random.PRNGKey(seed=12346)
prng_key, init_key = jax.random.split(prng_key)
initial_vars = layer.InstantiateVariables(init_key)
logging.info('initial_vars: %s', initial_vars)
inputs = jnp.ones([2, 10, 6, 8], dtype=jnp.bfloat16)
prng_key, compute_key = jax.random.split(prng_key)
global_step = jnp.array(0, dtype=jnp.uint64)
def Comp(theta, prng_key, global_step, inputs):
with base_layer.JaxContext.NewContext():
per_step_prng_key = jax.random.fold_in(prng_key, global_step)
base_layer.ResetPrngKey(per_step_prng_key, global_step)
layer.PrepareFProp()
output1 = layer.FProp(theta, inputs)
return output1
output1 = Comp(initial_vars, compute_key, global_step, inputs)
out1_sum = jnp.sum(output1)
out1_nonzero = jnp.sum(output1 > 0.0)
logging.info('out1_sum: %s', out1_sum)
logging.info('out1_nonzero: %s', out1_nonzero)
self.assertEqual(980, out1_sum)
self.assertEqual(784, out1_nonzero)
if __name__ == '__main__':
absltest.main()
| [
"shafey@google.com"
] | shafey@google.com |
0ca3fb72d679efdb72ddfcd757f6f86a4bc25998 | 3a4975bc5256d0c92e04b943d1eeb81b9ce89640 | /allPossibleCombination.py | 6bc8ffc2db7441bf1e57d244ff71f0f06abe4732 | [] | no_license | Priyankajoshipj/DataStructuresAndAlgorithms | edf6c09f4a994341dda2973aef3761c042df36cc | 513a78b95546b89c74acbcc6691e84c4f0bc4f20 | refs/heads/master | 2020-06-27T14:04:23.144829 | 2019-09-18T04:39:11 | 2019-09-18T04:39:11 | 199,972,227 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 803 | py | def allPossibleCombination(s):
n = len(s)
if n <2:
return n
out = set()
def rec_comb_helper(s, i, out):
if i == len(s) - 1:
return ["", s[i]]
one = rec_comb_helper(s, i+1, out)
for com in one:
if s[i] not in com:
com1 = s[i] + com
out.add(com1)
out.add(com)
return list(out)
allp = rec_comb_helper(s, 0, out)
print(sorted(allp))
def permute(choices, s, out1):
if not s:
return [""]
if len(choices) == len(s):
string = "".join(choices)
out1.append(string)
for i in range(len(s)):
choice = s[i]
if choice in choices:
continue
choices.append(choice)
permute(choices, s, out1)
choices.pop()
return out1
res = []
for comb in allp:
a = permute([], comb, [])
res += a
print(sorted(res))
return res
allPossibleCombination("ABC") | [
"33767244+Priyankajoshipj@users.noreply.github.com"
] | 33767244+Priyankajoshipj@users.noreply.github.com |
57839fbdaf39ce151f280eecf2ac06516ded4c83 | 0123229ac84c057b188f6b17c1131ec630ecaf25 | /stochastic_gradient_descent/test_sire_offset/offset_fix_phiandpsi/extract_frcmod.py | 4598f60b3d2efdd919bfb1c52e5dd461d50b8d9e | [] | no_license | michellab/paramfit-tests | 689851ab95406aad7160403c4a70d3ec6be91981 | 39598e93936beff48aefff1604483fd265a5f46a | refs/heads/master | 2021-01-13T05:47:23.287857 | 2017-04-24T10:58:21 | 2017-04-24T10:58:21 | 76,249,680 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 461 | py | from parmed.amber import *
import parmed
import os
base = AmberParm("orig.prmtop", "fit.rst7")
parmed.tools.writeFrcmod(base,"test.frcmod").execute()
frcmod_file = open("test.frcmod","r").readlines()
for fr in frcmod_file:
if "C -N -CT-C " in fr: # this is phi
print("value of Phi")
print(fr)
elif "N -CT-C -N" in fr:
print("value of Psi")
print(fr)
else:
continue
cmd = "rm test.frcmod"
os.system(cmd)
| [
"stefanobosisio1@gmail.com"
] | stefanobosisio1@gmail.com |
a56f746654c8cc821dca0132fe96988a4192a627 | 83cf20e7d181eac59dd0c3b1d5b7e095807fb6e0 | /maintain/move/some_test.py | 8d8776898e0137851dba8f01d1e3de0bbd871b52 | [] | no_license | ys3721/release_update_tools | b96fb272103e245fdf4273763c5f9b29e75e1529 | 4fd76ce05e793c3163a208830eee18b5f986ac73 | refs/heads/master | 2022-06-27T05:12:07.158151 | 2021-11-30T08:36:53 | 2021-11-30T08:36:53 | 178,325,294 | 2 | 2 | null | 2021-04-26T20:35:39 | 2019-03-29T03:24:35 | Python | UTF-8 | Python | false | false | 328 | py | #! /usr/bin/python
# -*-coding=utf8-*-
# @Auther: Yao Shuai
import os
import subprocess
sql_name = os.system("sshpass -p 321 ssh root@10.10.6.14 ls /data0/src/s1145_*")
print "os.system.result=" + str(sql_name)
result = os.popen("sshpass -p 321 ssh root@10.10.6.14 ls /data0/src/s1145_*").readline()
print result
| [
"ys3721@hotmail.com"
] | ys3721@hotmail.com |
f42908edf55755571fd62a4c3989c96f7da5a3b2 | 0def7d63d694908d4fa5422631d86b61a31b3e8b | /module/HaiGuan/Python_PJ/YDMPython3.py | 30a42ed3662dc6a1fa9512916af7ed9a4046d096 | [] | no_license | 921016124/Spiders | 45fffb5a48ecce2c2754187d2f9b6c9caf1f3eaf | e2a3f3f59657974940801de8bc2bbb1a416af55b | refs/heads/master | 2022-12-13T10:59:42.375979 | 2020-09-14T09:02:13 | 2020-09-14T09:02:13 | 191,732,013 | 0 | 1 | null | null | null | null | GB18030 | Python | false | false | 819 | py | # -*- coding: cp936 -*-
import os
import sys
from ctypes import *
class PJ:
def Po_Jie(self):
print('>>>正在初始化...')
YDMApi = windll.LoadLibrary('yundamaAPI-x64')
appId = 7931
appKey = b'07e97c69ff9cd82a854d636ae1c1cb5e'
print('软件ID:%d\r\n软件密钥:%s' % (appId, appKey))
username = b'machengguang'
password = b'zxcv_1234'
if username == b'test':
exit('\r\n>>>请先设置用户名密码')
print('\r\n>>>正在一键识别...')
codetype = 1004
result = c_char_p(b" ")
timeout = 60
filename = b'Captcha.jpg'
captchaId = YDMApi.YDM_EasyDecodeByPath(username, password, appId, appKey, filename, codetype, timeout, result)
print("一键识别:验证码ID:%d,识别结果:%s" % (captchaId, result.value))
return result.value
| [
"921016124@qq.com"
] | 921016124@qq.com |
a7a23ce0dca0223e330d4f53312a3a2dcdd5b5e2 | 00540adf5d4e1be80b3edca5f60f298497eddef0 | /expression_analysis.py | f767375bce05d8fe1368a3bff3ea33932a27efc0 | [] | no_license | smetroid/expression_analysis | 4fe274302f29a12689d97872dbc659d734c236f1 | 445a73b7fb2be0e153f6e25666f4be14cf1ae3a4 | refs/heads/master | 2020-03-27T11:02:57.525089 | 2018-11-09T04:19:53 | 2018-11-09T04:21:04 | 146,461,705 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,747 | py | #!/usr/bin/python
import sqlite3, csv, re
import sys
def createDBs(conn):
interproscan7_table = "create table interproscan7(trinity, random1, random2, sites, code1, description1, start, stop, evalue, random3, date, code2, description2, goterms, reactome)"
expression_counts_table = "create table expression_counts (trinity, ho8_quants, ho7_quants)"
interproscan8_table = "create table interproscan8(trinity, random1, random2, sites, code1, description1, start, stop, evalue, random3, date, code2, description2, goterms, reactome)"
fastaho7 = "create table fastaho7 (trinity, data BLOB)"
fastaho8 = "create table fastaho8 (trinity, data BLOB)"
fastaho7transdecoder = "create table fastaho7transdecoder (trinity, data BLOB)"
fastaho8transdecoder = "create table fastaho8transdecoder (trinity, data BLOB)"
ho8ids_with_quantids = "create table ho8idswithquantsids (ho8ids, jointids)"
cur = conn.cursor()
cur.execute(interproscan7_table)
cur.execute(interproscan8_table)
cur.execute(expression_counts_table)
cur.execute(fastaho7)
cur.execute(fastaho8)
cur.execute(fastaho7transdecoder)
cur.execute(fastaho8transdecoder)
cur.execute(ho8ids_with_quantids)
def testDB(cur):
cur.execute("SELECT * from hostinfo")
rows = cur.fetchall()
for row in rows:
print(row)
return None
def loadInterproScan7(cur):
tsv_data_file = open("./data/Ho7_K31_Trinity_InterProScan_1.tsv")
tsv_reader = csv.reader(tsv_data_file, delimiter="\t")
for i in tsv_reader:
# Remove the .p1 from the trinity value
#i[0] = re.sub(r'_i.*$', '', i[0])
print(i)
if len(i) < 15:
continue
cur.execute("INSERT INTO interproscan7 VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", tuple(i))
else:
cur.execute("INSERT INTO interproscan7 VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", tuple(i))
def loadInterproScan8(cur):
tsv_data_file = open("./data/Ho8_k31_Trinity_InterProScan.tsv")
tsv_reader = csv.reader(tsv_data_file, delimiter="\t")
for i in tsv_reader:
# Remove the .p1 from the trinity value
#i[0] = re.sub(r'_i.*$', '', i[0])
print(i)
if len(i) < 15:
continue
cur.execute("INSERT INTO interproscan8 VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", tuple(i))
else:
cur.execute("INSERT INTO interproscan8 VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", tuple(i))
def loadNorm(cur):
tsv_data_file = open("./norm")
tsv_reader = csv.reader(tsv_data_file, delimiter="\t")
for i in tsv_reader:
# Remove the "_i1-4" from the trinity value
i[0] = re.sub(r'_i.*$', '', i[0])
print(i)
if len(i) == 0:
continue
else:
cur.execute("INSERT INTO expression_counts VALUES (?, ?, ?)", tuple(i))
def getDataSet(cur, filter):
cur.execute("SELECT trinity,start,stop FROM interproscan7 WHERE code1 LIKE ?", (filter,))
file_name = "%s_%s.csv" % ("interproscan7", filter)
rows = cur.fetchall()
data = []
fo = open(file_name, 'w')
for row in rows:
info = "%s,%s,%s\n" % (row[0],row[1],row[2])
data.append(info)
fo.writelines(data)
fo.close()
def buildTempView(cur):
#Generate a temporary view for the ho8 and ho7 quants aggregate counts
sql_view = ('CREATE VIEW expression_count_aggregates '
'AS '
'SELECT trinity, SUM(ho8_quants) as ho8_quants, '
'SUM(ho7_quants) as ho7_quants '
'FROM expression_counts '
'GROUP BY trinity ')
cur.execute(sql_view)
def getNormAndHoData(cur, filter):
sql = ('SELECT DISTINCT inter.trinity, inter.start, inter.stop, '
'ec.ho8_quants, ec.ho7_quants '
'FROM interproscan7 inter '
'INNER JOIN expression_count_aggregates ec '
'ON inter.trinity = ec.trinity '
'WHERE code1 LIKE "%s" '
'ORDER BY inter.trinity ')
file_name = "%s_%s.csv" % ("normAndHoData",filter)
print sql % (filter)
cur.execute(sql % (filter))
rows = cur.fetchall()
data = []
header = "%s,%s,%s,%s,%s\n" % ("trinity", "start", "stop", "ho8_quants", "ho7_quants")
data.append(header)
fo = open(file_name, 'w')
for row in rows:
info = "%s,%s,%s,%s,%s\n" % (row[0],row[1],row[2],row[3],row[4])
data.append(info)
fo.writelines(data)
fo.close()
def loadFastaDataHo7(cur):
trinity = ""
data = ""
with open("./data/Ho7_K31_Trinity.fasta") as f:
for i in f:
match = re.search(r'^>TRINITY', i)
#print i
if (match):
if (data != ""):
sql_insert = (trinity, data)
cur.execute("INSERT INTO fastaho7 VALUES (?, ?)", tuple(sql_insert))
data = i
else:
data = i
trinity_field = i.split(" ")
#trinity = re.sub(r'_i.*$', '', trinity_field[0]).replace('>', '')
trinity = trinity_field[0].replace('>', '')
else:
data += i
# When EOF is reached commit the last values into SQL
sql_insert = (trinity, data)
cur.execute("INSERT INTO fastaho7 VALUES (?, ?)", tuple(sql_insert))
def loadFastaDataHo8(cur):
trinity = ""
data = ""
with open("./data/Ho8_K31_Trinity.fasta") as f:
for i in f:
match = re.search(r'^>TRINITY', i)
#print i
if (match):
if (data != ""):
sql_insert = (trinity, data)
cur.execute("INSERT INTO fastaho8 VALUES (?, ?)", tuple(sql_insert))
data = i
else:
data = i
trinity_field = i.split(" ")
#trinity = re.sub(r'_i.*$', '', trinity_field[0]).replace('>', '')
trinity = trinity_field[0].replace('>', '')
else:
data += i
# When EOF is reached commit the last values into SQL
sql_insert = (trinity, data)
cur.execute("INSERT INTO fastaho8 VALUES (?, ?)", tuple(sql_insert))
def loadFastaDataHo7Transdecoder(cur):
trinity = ""
data = ""
with open("./data/Ho7_K31_Trinity.fasta.transdecoder.pep") as f:
for i in f:
match = re.search(r'^>TRINITY', i)
#print i
if (match):
if (data != ""):
sql_insert = (trinity, data)
cur.execute("INSERT INTO fastaho7transdecoder VALUES (?, ?)", tuple(sql_insert))
data = i
else:
data = i
trinity_field = i.split(" ")
#trinity = re.sub(r'_i.*$', '', trinity_field[0]).replace('>', '')
trinity = trinity_field[0].replace('>', '')
else:
data += i
# When EOF is reached commit the last values into SQL
sql_insert = (trinity, data)
cur.execute("INSERT INTO fastaho7transdecoder VALUES (?, ?)", tuple(sql_insert))
def loadFastaDataHo8Transdecoder(cur):
trinity = ""
data = ""
with open("./data/Ho8_K31_Trinity.fasta.transdecoder.pep") as f:
for i in f:
match = re.search(r'^>TRINITY', i)
#print i
if (match):
if (data != ""):
sql_insert = (trinity, data)
cur.execute("INSERT INTO fastaho8transdecoder VALUES (?, ?)", tuple(sql_insert))
data = i
else:
data = i
trinity_field = i.split(" ")
#trinity = re.sub(r'_i.*$', '', trinity_field[0]).replace('>', '')
trinity = trinity_field[0].replace('>', '')
else:
data += i
# When EOF is reached commit the last values into SQL
sql_insert = (trinity, data)
cur.execute("INSERT INTO fastaho8transdecoder VALUES (?, ?)", tuple(sql_insert))
def loadHo8idsWithQuantIds(cur):
tsv_data_file = open("./data/ho8_ids_with_corresponding_combined_ho7_ho8_ids.csv")
tsv_reader = csv.reader(tsv_data_file, delimiter="\t")
for i in tsv_reader:
# Remove the "_i1-4" from the trinity value
#i[0] = re.sub(r'_i.*$', '', i[0])
print(i)
if len(i) == 0:
continue
else:
cur.execute("INSERT INTO ho8idswithquantsids VALUES (?, ?)", tuple(i))
def fastaDataQuery(cur):
tsv_data_file = open(sys.argv[2])
tsv_reader = csv.reader(tsv_data_file, delimiter="\t")
for i in tsv_reader:
# Remove the "_i1-4" from the trinity value
i[0] = re.sub(r'_i.*$', '', i[0])
print(i)
if len(i) == 0:
continue
else:
cur.execute("INSERT INTO expression_counts VALUES (?, ?, ?)", tuple(i))
if __name__ == "__main__":
conn = sqlite3.Connection("expression_data.sqlite3")
#conn = sqlite3.Connection(":memory:")
createDBs(conn)
cur = conn.cursor()
loadInterproScan7(cur)
loadInterproScan8(cur)
loadNorm(cur)
# PF00201
getDataSet(cur, "PF00201")
# PS00375
getDataSet(cur, "PS00375")
buildTempView(cur)
getNormAndHoData(cur, "PF00201")
getNormAndHoData(cur, "PS00375")
loadFastaDataHo7(cur)
loadFastaDataHo8(cur)
loadFastaDataHo7Transdecoder(cur)
loadFastaDataHo8Transdecoder(cur)
loadHo8idsWithQuantIds(cur)
conn.commit()
conn.close()
| [
"enriquegc1982@gmail.com"
] | enriquegc1982@gmail.com |
48096466ced3cec7b5b5429e2b83fb56cd6edfd4 | a5b9ca98802358af0ad2698696cd0a145379c4dc | /python_俄罗斯方块/game/src/gameState.py | b4ba0d9cdda27a4e1c960b3834c2c69d3f73e197 | [] | no_license | young-yang/InitialTemptation | dea09f89007f50317b611568a784e0d524e3cfe2 | 79a6fad172076e412dde6a36527f1adf2132cffb | refs/heads/master | 2021-01-22T21:53:46.879485 | 2018-07-24T16:36:37 | 2018-07-24T16:36:37 | 85,488,008 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,131 | py | # -*- coding: UTF-8 -*-
import random
from settings import *
from piece import Piece
from gameWall import GameWall
import pygame
class GameState():
def __init__(self,screen):
self.screen = screen
self.wall = GameWall(screen)
self.piece = None
self.timer_interval = TIMER_INTERVAL
self.game_score = 0
#游戏是否停止
self.stopped = True
#游戏是否暂停
self.paused = False
def set_timer(self,time_interval):
self.game_timer = pygame.time.set_timer(pygame.USEREVENT, time_interval)
def add_score(self,score):
self.game_score += score
def startGame(self):
self.stopped = False
self.set_timer(TIMER_INTERVAL)
self.timer_interval = TIMER_INTERVAL
self.piece = Piece(random.choice(PIECE_TYPES),self.screen,self.wall)
def pauseGame(self):
pygame.time.set_timer(pygame.USEREVENT, 0) #传入0表示清除定时器
self.paused = True
def resumeGame(self):
self.set_timer(self.timer_interval)
self.paused = False
| [
"314235034@qq.com"
] | 314235034@qq.com |
298e2aa7b8a6a78750a2be2b758d445d10b343ae | b4826ec1ca1f71401cad7dcfe4c7f7573223ffc5 | /assignment6/submission/solution.py | dda3cb4842026a8976b5fcc2a86a6f95531cd331 | [] | no_license | deepmodh1996/Artificial-Intelligence-Lab | 01d5240127198e85ddcd853379343c44ad0d481e | cf945095cf599e291116b61784b109f868418ae3 | refs/heads/master | 2021-01-11T18:25:53.871827 | 2017-06-17T05:22:10 | 2017-06-17T05:22:10 | 79,544,323 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,302 | py | import sys
filename = sys.argv[1]
# print filename
f = open(filename,'r')
MDP = f.read().replace('\n','\t').split('\t')
MDP = [x for x in MDP if x]
# print MDP
S = int(MDP[0])
A = int(MDP[1])
# print S
# print A
r = 2
R = [[[0 for x in range(S)] for y in range(A)] for z in range(S)]
for i in range(0, S):
for j in range(0, A):
for k in range(0, S):
R[i][j][k] = float(MDP[r])
r = r + 1
# print R
T = [[[0 for x in range(S)] for y in range(A)] for z in range(S)]
for i in range(0, S):
for j in range(0, A):
for k in range(0, S):
T[i][j][k] = float(MDP[r])
r = r + 1
# print T
gamma = float(MDP[r])
# print gamma
V = [0]*S
PI = [0]*S
t = 0
epsilon = 10**(-16)
while (True):
t = t + 1
Vprev = V[:]
# print Vprev
for i in range(0, S):
maxvalue = 0.0
PI[i] = 0
for k in range(0, S):
maxvalue += T[i][0][k]*(R[i][0][k] + gamma*Vprev[k])
for j in range(0, A):
value = 0.0
for k in range(0, S):
value += T[i][j][k]*(R[i][j][k] + gamma*Vprev[k])
if (value > maxvalue):
maxvalue = max(value, maxvalue)
PI[i] = j
V[i] = maxvalue
numValid = 0
for i in range(0, len(V)):
if(abs(V[i] - Vprev[i]) > epsilon):
numValid = 1
if (numValid == 0):
break
for i in range(0, S):
print str(V[i]) + '\t' + str(PI[i])
print "Iterations" + '\t' + str(t)
| [
"deepmodh1996@gmail.com"
] | deepmodh1996@gmail.com |
35a457296554b87038a7ebfa03198c4b1c60e697 | ebd5c4632bb5f85c9e3311fd70f6f1bf92fae53f | /Sourcem8/pirates/effects/VoodooAura2.py | 852b91918310ef820ba576e0b80105d5ea24b395 | [] | no_license | BrandonAlex/Pirates-Online-Retribution | 7f881a64ec74e595aaf62e78a39375d2d51f4d2e | 980b7448f798e255eecfb6bd2ebb67b299b27dd7 | refs/heads/master | 2020-04-02T14:22:28.626453 | 2018-10-24T15:33:17 | 2018-10-24T15:33:17 | 154,521,816 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,226 | py | # File: V (Python 2.4)
from pandac.PandaModules import *
from direct.interval.IntervalGlobal import *
from direct.particles import ParticleEffect
from direct.particles import Particles
from direct.particles import ForceGroup
from otp.otpbase import OTPRender
from PooledEffect import PooledEffect
from EffectController import EffectController
import random
class VoodooAura2(PooledEffect, EffectController):
cardScale = 128.0
def __init__(self):
PooledEffect.__init__(self)
EffectController.__init__(self)
model = loader.loadModel('models/effects/battleEffects')
self.card = model.find('**/effectVoodooShockwave')
if not self.particleDummy:
self.particleDummy = self.attachNewNode(ModelNode('VoodooAura2ParticleDummy'))
self.particleDummy.setDepthWrite(0)
self.particleDummy.setLightOff()
self.particleDummy.hide(OTPRender.ShadowCameraBitmask)
self.effectColor = Vec4(1, 1, 1, 1)
self.f = ParticleEffect.ParticleEffect('VoodooAura2')
self.f.reparentTo(self)
self.p0 = Particles.Particles('particles-1')
self.p0.setFactory('PointParticleFactory')
self.p0.setRenderer('SpriteParticleRenderer')
self.p0.setEmitter('PointEmitter')
self.f.addParticles(self.p0)
self.p0.setPoolSize(64)
self.p0.setBirthRate(0.02)
self.p0.setLitterSize(1)
self.p0.setLitterSpread(0)
self.p0.setSystemLifespan(0.0)
self.p0.setLocalVelocityFlag(0)
self.p0.setSystemGrowsOlderFlag(0)
self.p0.factory.setLifespanBase(1.0)
self.p0.factory.setLifespanSpread(0.0)
self.p0.factory.setMassBase(1.0)
self.p0.factory.setMassSpread(0.0)
self.p0.factory.setTerminalVelocityBase(400.0)
self.p0.factory.setTerminalVelocitySpread(0.0)
self.p0.renderer.setAlphaMode(BaseParticleRenderer.PRALPHAOUT)
self.p0.renderer.setUserAlpha(0.5)
self.p0.renderer.setFromNode(self.card)
self.p0.renderer.setColor(Vec4(1.0, 1.0, 1.0, 1.0))
self.p0.renderer.setXScaleFlag(1)
self.p0.renderer.setYScaleFlag(1)
self.p0.renderer.setAnimAngleFlag(0)
self.p0.renderer.setInitialXScale(0.0050000000000000001 * self.cardScale)
self.p0.renderer.setFinalXScale(0.012 * self.cardScale)
self.p0.renderer.setInitialYScale(0.0050000000000000001 * self.cardScale)
self.p0.renderer.setFinalYScale(0.012 * self.cardScale)
self.p0.renderer.setNonanimatedTheta(0.0)
self.p0.renderer.setAlphaBlendMethod(BaseParticleRenderer.PPBLENDLINEAR)
self.p0.renderer.setAlphaDisable(0)
self.p0.renderer.getColorInterpolationManager().addLinear(0.0, 1.0, Vec4(0.80000000000000004, 0.80000000000000004, 0.80000000000000004, 1), self.effectColor, 1)
self.p0.renderer.setColorBlendMode(ColorBlendAttrib.MAdd, ColorBlendAttrib.OIncomingAlpha, ColorBlendAttrib.OOne)
self.p0.emitter.setEmissionType(BaseParticleEmitter.ETRADIATE)
self.p0.emitter.setAmplitude(0.20000000000000001)
self.p0.emitter.setAmplitudeSpread(0.0)
self.p0.emitter.setOffsetForce(Vec3(0.0, 0.0, 0.0))
self.p0.emitter.setExplicitLaunchVector(Vec3(1.0, 0.0, 0.0))
self.p0.emitter.setRadiateOrigin(Point3(0.0, 0.0, 0.0))
def createTrack(self, rate = 1):
self.startEffect = Sequence(Func(self.p0.setBirthRate, 0.029999999999999999), Func(self.p0.clearToInitial), Func(self.f.start, self, self.particleDummy))
self.endEffect = Sequence(Func(self.p0.setBirthRate, 100), Wait(1.0), Func(self.cleanUpEffect))
self.track = Sequence(self.startEffect, Wait(0.75), self.endEffect)
def setEffectColor(self, color):
self.effectColor = color
self.p0.renderer.getColorInterpolationManager().clearToInitial()
self.p0.renderer.getColorInterpolationManager().addLinear(0.0, 1.0, Vec4(0.80000000000000004, 0.80000000000000004, 0.80000000000000004, 1), self.effectColor, 1)
def cleanUpEffect(self):
self.detachNode()
self.checkInEffect(self)
def destroy(self):
EffectController.destroy(self)
PooledEffect.destroy(self)
| [
"brandoncarden12345@gmail.com"
] | brandoncarden12345@gmail.com |
8941c5b291ec2762adfe11384e1d783e5e747927 | b5ff404bda572f32b8f3a28025c6e76ed381d504 | /Ali/forms.py | 79fbab94a165611df8f85ee32aac656c08c35a39 | [] | no_license | Daechulbae/56chul | d5366e4ea7a43e329435da082f98503dd1a7c0f2 | aef1028568057264c998a915240256065e776830 | refs/heads/master | 2018-10-29T18:30:47.901769 | 2018-08-23T07:12:25 | 2018-08-23T07:12:25 | 124,995,760 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 99 | py | from django import forms
class actionForm(forms.Form):
texbox = forms.CharField(max_length=10) | [
"eocjf17@gmail.com"
] | eocjf17@gmail.com |
aafc7c42fc0bc0c36abc57d07eba7f7d396a0646 | 0db5a9cf6be1e08accb24151982fe297287af158 | /test2/main.py | 58a2620a0238e11ca745e00ea22d128bb975a8b8 | [] | no_license | Robinsondssantos/fastapi-test | a30a14bfaa93bbd6aef4c3758f1be5c426927d39 | 7bab2cdbef6344d4c9c817a33fe6e63fd0861d1e | refs/heads/master | 2022-12-16T12:20:11.359535 | 2020-09-29T19:20:39 | 2020-09-29T19:20:39 | 296,133,546 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,274 | py | # from sqlalchemy import create_engine
# from sqlalchemy.ext.declarative import declarative_base
# from sqlalchemy.orm import sessionmaker
import json
import psycopg2
from typing import Optional
from fastapi import FastAPI
# SQLALCHEMY_DATABASE_URL = 'postgresql://postgres:password@localhost:5432/books'
# engine = create_engine(
# SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False}
# )
# SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
# Base = declarative_base()
connection = psycopg2.connect(
host='localhost',
database='fastdb',
user='postgres',
password='postgres'
)
print('connection:', connection)
cursor = connection.cursor()
print('cursor:', cursor)
# cursor.execute(
# """
# CREATE TABLE readings (
# id INTEGER PRIMARY KEY,
# humidity INTEGER
# )
# """
# )
# cursor.close()
# connection.commit()
app = FastAPI()
@app.get('/')
async def read_root():
cursor.execute(
"""
SELECT * FROM readings
"""
)
str_with_quotes = str([dict((cursor.description[i][0], value) for i, value in enumerate(row)) for row in cursor.fetchall()])
str_with_quotes = str_with_quotes.replace("'",'"')
return json.loads(str_with_quotes) | [
"robinsonsantos@localhost.localdomain"
] | robinsonsantos@localhost.localdomain |
947031afacd28cc7dd21f3fba36625e6915693d7 | 15fa13fad9a05a51843c3ed6cf1f8afbb33aae66 | /examples/reference/arc/arc.pde | b443a2ddb54118463bb6c09c57edbe0b4a5eb328 | [] | no_license | kazimuth/python-mode-processing | 4ad39f18c9637206fa7c691ac328baae0fc21b1a | e6274f89e0464b771870327a56ce01bff629e0fb | refs/heads/master | 2021-01-22T05:27:44.912530 | 2014-04-22T17:20:15 | 2014-04-22T17:20:15 | 10,946,779 | 4 | 0 | null | 2013-12-31T01:23:52 | 2013-06-25T18:40:55 | Java | UTF-8 | Python | false | false | 149 | pde |
arc(50, 55, 50, 50, 0, PI/2)
noFill()
arc(50, 55, 60, 60, PI/2, PI)
arc(50, 55, 70, 70, PI, TWO_PI-PI/2)
arc(50, 55, 80, 80, TWO_PI-PI/2, TWO_PI)
| [
"martin_p@lineone.net"
] | martin_p@lineone.net |
19f57d01e4d553b3bab39e996318902932a5bef7 | 19631688a8be0e390f25a915f634a76c9a3d4fa3 | /ABC108/B_rined_square.py | 1adcdf28210c801eb65f6cbd8748a2b5afd49091 | [] | no_license | tsurusekazuki/AtCoder-practice | 7141fd74f6a876c9f3e65a1dca400ef4c153bca8 | e9538157b6a63f43300c6693e9c5deadaa4d5d2a | refs/heads/master | 2020-05-02T02:57:14.180926 | 2019-04-28T10:00:49 | 2019-04-28T10:00:49 | 177,715,718 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 87 | py | a, b, c, d = map(int, input().split())
x = c - a
y = d - b
print(c-y, d+x, a-y, b+x)
| [
"b1714935@planet.kanazawa-it.ac.jp"
] | b1714935@planet.kanazawa-it.ac.jp |
f67e1e6de3d56e55471bc879166edec1c32ba813 | 8da79aedfb20c9798de0f4db4c5d85929a32f82b | /boo/columns.py | 200ff1a19478b1dd373b0d3bbfd9b11bfc79fc79 | [
"MIT"
] | permissive | nasingfaund/boo | a94e941ca8d3251fbb320c2e2f63e439f7ef4d59 | 96d08857abd790bc44f48256e7be7da130543a84 | refs/heads/master | 2023-07-01T00:33:33.085311 | 2021-08-03T21:23:03 | 2021-08-03T21:23:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,429 | py | """Преобразование сырых названий столбцов в названия переменных.
Описания полей отчетности можно посмотреть например в:
http://info.avtovaz.ru/files/avtovaz_ras_fs_2012_rus_secured.pdf
Более подробно о публикуемой форме отчетности:
http://www.consultant.ru/document/cons_doc_LAW_103394/b990bf4a13bd23fda86e0bba50c462a174c0d123/#dst100515
"""
from collections import OrderedDict
from dataclasses import dataclass
import numpy
import pandas as pd
# Column names as provided at Rosstat web site
TTL_COLUMNS = [
"Наименование",
"ОКПО",
"ОКОПФ",
"ОКФС",
"ОКВЭД",
"ИНН",
"Код единицы измерения",
"Тип отчета",
"11103",
"11104",
"11203",
"11204",
"11303",
"11304",
"11403",
"11404",
"11503",
"11504",
"11603",
"11604",
"11703",
"11704",
"11803",
"11804",
"11903",
"11904",
"11003",
"11004",
"12103",
"12104",
"12203",
"12204",
"12303",
"12304",
"12403",
"12404",
"12503",
"12504",
"12603",
"12604",
"12003",
"12004",
"16003",
"16004",
"13103",
"13104",
"13203",
"13204",
"13403",
"13404",
"13503",
"13504",
"13603",
"13604",
"13703",
"13704",
"13003",
"13004",
"14103",
"14104",
"14203",
"14204",
"14303",
"14304",
"14503",
"14504",
"14003",
"14004",
"15103",
"15104",
"15203",
"15204",
"15303",
"15304",
"15403",
"15404",
"15503",
"15504",
"15003",
"15004",
"17003",
"17004",
"21103",
"21104",
"21203",
"21204",
"21003",
"21004",
"22103",
"22104",
"22203",
"22204",
"22003",
"22004",
"23103",
"23104",
"23203",
"23204",
"23303",
"23304",
"23403",
"23404",
"23503",
"23504",
"23003",
"23004",
"24103",
"24104",
"24213",
"24214",
"24303",
"24304",
"24503",
"24504",
"24603",
"24604",
"24003",
"24004",
"25103",
"25104",
"25203",
"25204",
"25003",
"25004",
"32003",
"32004",
"32005",
"32006",
"32007",
"32008",
"33103",
"33104",
"33105",
"33106",
"33107",
"33108",
"33117",
"33118",
"33125",
"33127",
"33128",
"33135",
"33137",
"33138",
"33143",
"33144",
"33145",
"33148",
"33153",
"33154",
"33155",
"33157",
"33163",
"33164",
"33165",
"33166",
"33167",
"33168",
"33203",
"33204",
"33205",
"33206",
"33207",
"33208",
"33217",
"33218",
"33225",
"33227",
"33228",
"33235",
"33237",
"33238",
"33243",
"33244",
"33245",
"33247",
"33248",
"33253",
"33254",
"33255",
"33257",
"33258",
"33263",
"33264",
"33265",
"33266",
"33267",
"33268",
"33277",
"33278",
"33305",
"33306",
"33307",
"33406",
"33407",
"33003",
"33004",
"33005",
"33006",
"33007",
"33008",
"36003",
"36004",
"41103",
"41113",
"41123",
"41133",
"41193",
"41203",
"41213",
"41223",
"41233",
"41243",
"41293",
"41003",
"42103",
"42113",
"42123",
"42133",
"42143",
"42193",
"42203",
"42213",
"42223",
"42233",
"42243",
"42293",
"42003",
"43103",
"43113",
"43123",
"43133",
"43143",
"43193",
"43203",
"43213",
"43223",
"43233",
"43293",
"43003",
"44003",
"44903",
"61003",
"62103",
"62153",
"62203",
"62303",
"62403",
"62503",
"62003",
"63103",
"63113",
"63123",
"63133",
"63203",
"63213",
"63223",
"63233",
"63243",
"63253",
"63263",
"63303",
"63503",
"63003",
"64003",
"Дата актуализации",
]
# -- Текстовые поля
MAPPER = OrderedDict(
[
("Наименование", "name"),
("ОКПО", "okpo"),
("ОКОПФ", "okopf"),
("ОКФС", "okfs"),
("ОКВЭД", "okved"),
("ИНН", "inn"),
("Код единицы измерения", "unit"),
("Тип отчета", "report_type"),
("Дата актуализации", "date_published"),
# -- Баланс
# -- Внеоборотные активы
("1100", "ta_fix"),
("1150", "of"),
("1170", "ta_fix_fin"),
# -- Оборотные активы
("1200", "ta_nonfix"),
("1210", "inventory"),
("1230", "receivables"),
("1240", "ta_nonfix_fin"),
("1250", "cash"),
("1600", "ta"),
# -- Пассивы
("1300", "tp_capital"),
("1360", "retained_earnings"),
("1400", "tp_long"),
("1410", "debt_long"),
("1500", "tp_short"),
("1510", "debt_short"),
("1520", "payables"),
("1700", "tp"),
# -- ОПУ
("2110", "sales"),
("2120", "costs"),
("2200", "profit_oper"),
("2330", "exp_interest"),
("2300", "profit_before_tax"),
("2400", "profit_after_tax"),
# -- ОДДС
("4400", "cf"),
# -- Операционная деятельность
("4100", "cf_oper"),
("4110", "cf_oper_in"),
("4111", "cf_oper_in_sales"),
("4120", "cf_oper_out"),
("4121", "paid_to_supplier"),
("4122", "paid_to_worker"),
("4123", "paid_interest"),
("4124", "paid_profit_tax"),
# -- Инвестицонная деятельность
("4200", "cf_inv"),
("4210", "cf_inv_in"),
("4220", "cf_inv_out"),
("4221", "paid_fa_investment"),
# -- Финансовая деятельность
("4300", "cf_fin"),
("4310", "cf_fin_in"),
("4311", "cf_loan_in"),
("4312", "cf_eq_in_1"),
("4313", "cf_eq_in_2"),
("4314", "cf_bond_in"),
("4320", "cf_fin_out"),
("4321", "cf_eq_out"),
("4322", "cf_div_out"),
("4323", "cf_debt_out"),
]
)
def ask(code):
return MAPPER.get(str(code))
def fst(text):
return text[0]
def last(text):
return text[-1]
def trim(text):
return text[0:-1]
NON_NUMERIC = "x"
# This type assures missing interger values will be converted to NaNs
# See https://pandas.pydata.org/pandas-docs/stable/user_guide/integer_na.html
# and https://github.com/ru-corporate/boo/issues/18
INT_TYPE = pd.Int64Dtype()
@dataclass
class Column:
code: str
section: str
lag: bool
def rename_with(self, mapper: dict):
new_code = mapper.get(self.code, self.code)
return Column(new_code, self.section, self.lag)
def is_numeric(self):
return self.section != NON_NUMERIC
@property
def label(self):
return self.code + ("_lag" if self.lag else "")
@property
def dtype(self):
return INT_TYPE if self.is_numeric() else str
def is_lagged(text):
if fst(text) == "3":
return False
if last(text) == "3":
return False
if last(text) == "4":
return True
return None
assert is_lagged("63243") is False
assert is_lagged("Дата актуализации") is None
assert is_lagged("23304") is True
def section(text):
num = text[0]
return {
"1": "Баланс",
"2": "ОПУ",
"3": "Изменения капитала",
"4": "ОДДС",
"6": "Extras",
}.get(num, NON_NUMERIC)
def code(text):
if fst(text) in ["1", "2", "4", "6"]:
return text[0:-1]
else:
return text
def column(text):
return Column(code(text), section(text), is_lagged(text))
columns = [column(x) for x in TTL_COLUMNS]
INDEX = [i for (i, c) in enumerate(columns) if c.rename_with(MAPPER) != c]
columns_short = [c.rename_with(MAPPER) for c in columns if c.rename_with(MAPPER) != c]
NAMES = {c.label: c.dtype for c in columns_short}
assert len(INDEX) == len(NAMES)
| [
"e.pogrebnyak@gmail.com"
] | e.pogrebnyak@gmail.com |
14c5cd9b73325b3972cb62a0961b995040c5e757 | c36d980ba59de3a562a878f185bad628078ea279 | /Mundo_2_Python/estrutura_for/desafio_53.py | f00fb7eac2b231297a400abccb2d112bd91e8d87 | [
"MIT"
] | permissive | tamyrds/Exercicios-Python | d79f295acec4204293a076842598832f7d42e6c6 | 73bd8ea49e74db88b39c4f20dfe058a4805c0567 | refs/heads/main | 2023-04-11T21:40:15.446784 | 2021-05-11T02:12:34 | 2021-05-11T02:12:34 | 363,760,074 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 306 | py | frase = str(input('Digite uma frase: '))
palavra = frase.split()
junto = ''.join(palavra)
inverso = ''
for letra in range(len(junto) - 1,-1,-1):
inverso += junto[letra]
print(junto,inverso)
if inverso == junto:
print('Temos um palindromo')
else:
print('A frase digitada não é um palindromo') | [
"78001437+tamyrds@users.noreply.github.com"
] | 78001437+tamyrds@users.noreply.github.com |
b27851062a7ab359e902306b8840e240f1c7031a | 2473096eef0a4c19d73494616562612e5fe8d85f | /chapters/04_machine_learning_basics/linear_regression_graph.py | ebf439242c9279c49239c3b3ccc6476a85c33556 | [] | no_license | barmi/tensorflowbook | 85e38fa6575efc0df08f4f62af5c6eff68d9860a | b2d8bd6c612786c89f7b94aefd545bfbe4943c8d | refs/heads/master | 2021-01-22T12:38:36.253253 | 2017-10-12T11:51:33 | 2017-10-12T11:51:33 | 102,354,484 | 0 | 0 | null | 2017-09-04T11:31:35 | 2017-09-04T11:31:35 | null | UTF-8 | Python | false | false | 2,124 | py | # Linear regression example in TF.
import tensorflow as tf
import numpy as np
def inference(X):
return tf.add(tf.matmul(X, W, name='W_mul_X'), b, name='inference')
def evaluate(sess):
print(sess.run(inference([[80., 25.]]))) # ~ 303
print(sess.run(inference([[65., 25.]]))) # ~ 256
# weight_age
x_data = np.float32([
[84, 46], [73, 20], [65, 52], [70, 30], [76, 57],
[69, 25], [63, 28], [72, 36], [79, 57], [75, 44],
[27, 24], [89, 31], [65, 52], [57, 23], [59, 60],
[69, 48], [60, 34], [79, 51], [75, 50], [82, 34],
[59, 46], [67, 23], [85, 37], [55, 40], [63, 30]])
# blood_fat_content
y_data = [
354, 190, 405, 263, 451,
302, 288, 385, 402, 365,
209, 290, 346, 254, 395,
434, 220, 374, 308, 220,
311, 181, 274, 303, 244]
graph = tf.Graph()
with graph.as_default():
W = tf.Variable(tf.random_uniform([2,1], -1.0, 1.0), name="weight")
b = tf.Variable(tf.zeros([1]), name="bias")
y = tf.add(tf.matmul(x_data, W, name="mul"), b, name="add")
loss = tf.reduce_sum(tf.squared_difference(y, y_data), name="loss")
train_op = tf.train.GradientDescentOptimizer(0.0000001).minimize(loss, name="GradientDescent")
# Launch the graph in a session, setup boilerplate
with tf.Session(graph=graph) as sess:
writer = tf.summary.FileWriter('./linearReg', graph)
tf.global_variables_initializer().run()
'''
def inference(X):
return tf.add(tf.multiply(X, W, name='W_mul_X'), b, name='inference')
def loss(X, Y):
Y_predicted = inference(X)
return tf.reduce_sum(tf.squared_difference(Y, Y_predicted))
def train(total_loss):
learning_rate = 0.0000001
return tf.train.GradientDescentOptimizer(learning_rate).minimize(total_loss)
'''
# actual training loop
training_steps = 10000
for step in range(training_steps):
result = sess.run(train_op)
w_res = sess.run(W)
if step % 100 == 0:
print("%5d : W (%12.8f, %12.8f), b (%12.8f), loss: %12.8f" % (step, w_res[0], w_res[1], sess.run(b), sess.run(loss)))
writer.flush()
evaluate(sess)
| [
"skshin@nbreds.com"
] | skshin@nbreds.com |
f771322752f5feab04cb77f3b2f35d3026f3513f | 8aa3069cd4840fd216b917187a9c96bd7d3e2367 | /Exercícios/binomiofatorial.py | 424d1e4b8b3bb4389d4000032efe0357afec0102 | [] | no_license | rafaelsaidbc/USP | b10a28f958a1af5670fe48061f7b0c8b9db5d5d0 | 8c077f392fccd814380ea0e1b5ec228a54d4f779 | refs/heads/master | 2020-03-24T00:41:12.718523 | 2018-07-25T18:31:47 | 2018-07-25T18:31:47 | 142,302,564 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 901 | py | def fatorial(n):
fat = 1 #variavel fat recebe o valor 1, porque 1 eh um valor nulo em uma multiplicacao
while(n > 1): #enquanto n for maior que 1, o laço (while) continua executando
fat = fat * n #multiplica fat por n
n = n - 1 #atualiza o n subtraindo 1
return fat #finalzia o while e atualiza a variavel fat
def numero_binomial(n, k):
return fatorial(n) / (fatorial(k) * fatorial(n - k))
def testa_fatorial(): #testa a funcao fatorial
if fatorial(1) == 1:
print("Funciona para 1")
else:
print("Não funciona para 1")
if fatorial(2) == 2:
print("Funciona para 2")
else:
print("Não funciona para 2")
if fatorial(0) == 1:
print("Funciona para 0")
else:
print("Não funciona para 0")
if fatorial(5) == 120:
print("Funciona para 5")
else:
print("Não funciona para 5")
| [
"rafaelsaidbc@yahoo.com.br"
] | rafaelsaidbc@yahoo.com.br |
1f0baac7e207e96a1df522f360dbc91a273ce68e | d4bad471feefc230441ff2bbb2c312b50c96d554 | /chapter_code/chapter32_Spark_AI/HorovodEstimator.py | 36243e6c0f91c166e37312e6b7640cb19d773c1b | [] | no_license | limiaoiao/code-of-spark-big-data-business-trilogy | 6d69b1c6cf4b91ec12c65b9e4c64b0034f679d79 | 6bb7aa6c0209c8e20c5c1d7a191162635ecd658b | refs/heads/master | 2022-12-29T05:47:53.133212 | 2020-08-16T12:38:34 | 2020-08-16T12:38:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,356 | py | # Databricks notebook source
import numpy as np
import tensorflow as tf
import horovod.tensorflow as hvd
from pyspark.sql.types import *
from pyspark.sql.functions import rand, when
from sparkdl.estimators.horovod_estimator.estimator import HorovodEstimator
# COMMAND ----------
# Load MNIST dataset, with images represented as arrays of floats
(x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data("/tmp/mnist")
x_train = x_train.reshape((x_train.shape[0], -1))
data = [(x_train[i].astype(float).tolist(), int(y_train[i])) for i in range(len(y_train))]
schema = StructType([StructField("image", ArrayType(FloatType())),
StructField("label_col", LongType())])
df = spark.createDataFrame(data, schema)
display(df)
# COMMAND ----------
help(HorovodEstimator)
# COMMAND ----------
def model_fn(features, labels, mode, params):
"""
Arguments:
* features: Dict of DataFrame input column name to tensor (each tensor corresponding to
batch of data from the input column)
* labels: Tensor, batch of labels
* mode: Specifies if the estimator is being run for training, evaluation or prediction.
* params: Optional dict of hyperparameters. Will receive what is passed to
HorovodEstimator in params parameter. This allows for configuring Estimators for
hyperparameter tuning.
Returns: tf.estimator.EstimatorSpec describing our model.
"""
from tensorflow.examples.tutorials.mnist import mnist
# HorovodEstimator feeds scalar Spark SQL types to model_fn as tensors of shape [None]
# (i.e. a variable-sized batch of scalars), and array Spark SQL types (including
# VectorUDT) as tensors of shape [None, None] (i.e. a variable-sized batch of dense variable-length arrays).
#
# Here image data is fed from an ArrayType(FloatType()) column,
# e.g. as a float tensor with shape [None, None]. We know each float array is of length 784,
# so we reshape our tensor into one of shape [None, 784].
input_layer = features['image']
#input_layer = tf.reshape(input_layer, shape=[-1, 784])
logits = mnist.inference(input_layer, hidden1_units=params["hidden1_units"],
hidden2_units=params["hidden2_units"])
serving_key = tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY
# Generate a dictionary of inference output name to tensor (for PREDICT mode)
# Tensor outputs corresponding to the DEFAULT_SERVING_SIGNATURE_DEF_KEY are produced as output columns of
# the TFTransformer generated by fitting our estimator
predictions = {
"classes": tf.argmax(input=logits, axis=1, name="classes_tensor"),
"probabilities": tf.nn.softmax(logits, name="softmax_tensor"),
}
export_outputs = {serving_key: tf.estimator.export.PredictOutput(predictions)}
# If the estimator is running in PREDICT mode, you can stop building our model graph here and simply return
# our model's inference outputs
if mode == tf.estimator.ModeKeys.PREDICT:
return tf.estimator.EstimatorSpec(mode=mode, predictions=predictions,
export_outputs=export_outputs)
# Calculate Loss (for both TRAIN and EVAL modes)
onehot_labels = tf.one_hot(indices=tf.cast(labels, tf.int32), depth=10)
loss = tf.losses.softmax_cross_entropy(onehot_labels=onehot_labels, logits=logits)
if mode == tf.estimator.ModeKeys.TRAIN:
# Set up logging hooks; these run on every worker.
logging_hooks = [tf.train.LoggingTensorHook(tensors={"predictions": "classes_tensor"}, every_n_iter=5000)]
# Horovod: scale learning rate by the number of workers, add distributed optimizer
optimizer = tf.train.MomentumOptimizer(
learning_rate=0.001 * hvd.size(), momentum=0.9)
optimizer = hvd.DistributedOptimizer(optimizer)
train_op = optimizer.minimize(
loss=loss,
global_step=tf.train.get_global_step())
return tf.estimator.EstimatorSpec(mode=mode, loss=loss, train_op=train_op,
export_outputs=export_outputs,
training_hooks=logging_hooks)
# If running in EVAL mode, add model evaluation metrics (accuracy) to your EstimatorSpec so that
# they're logged when model evaluation runs
eval_metric_ops = {"accuracy": tf.metrics.accuracy(
labels=labels, predictions=predictions["classes"])}
return tf.estimator.EstimatorSpec(
mode=mode, loss=loss, eval_metric_ops=eval_metric_ops, export_outputs=export_outputs)
# COMMAND ----------
# Model checkpoints will be saved to the driver machine's local filesystem.
model_dir = "/tmp/horovod_estimator"
dbutils.fs.rm(model_dir[5:], recurse=True)
# Create estimator
est = HorovodEstimator(modelFn=model_fn,
featureMapping={"image": "image"},
modelDir=model_dir,
labelCol="label_col",
batchSize=64,
maxSteps=5000,
isValidationCol="isVal",
modelFnParams={"hidden1_units": 100, "hidden2_units": 50},
saveCheckpointsSecs=30)
# COMMAND ----------
# Add column indicating whether each row is in the training/validation set; we perform a random split of the data
df_with_val = df.withColumn("isVal", when(rand() > 0.8, True).otherwise(False))
# Fit estimator to obtain a TFTransformer
transformer = est.fit(df_with_val)
# Apply the TFTransformer to our training data and display the results. Note that our predicted "classes" tend to
# match the label column in our training set.
res = transformer.transform(df)
display(res)
# COMMAND ----------
est.setMaxSteps(10000)
new_transformer = est.fit(df_with_val)
new_res = transformer.transform(df)
display(new_res)
# COMMAND ----------
dbutils.fs.cp("file:/tmp/horovod_estimator/", "dbfs:/horovod_estimator/", recurse=True)
# COMMAND ----------
# MAGIC %sh
# MAGIC ls -ltr /tmp/horovod_estimator
# COMMAND ----------
print(dbutils.fs.ls("dbfs:/horovod_estimator/"))
# COMMAND ----------
# MAGIC %sh
# MAGIC rm -rf /tmp/horovod_estimator
# COMMAND ----------
# MAGIC %sh
# MAGIC ls -ltr /tmp/horovod_estimator
| [
"noreply@github.com"
] | limiaoiao.noreply@github.com |
bda639d9da4402bab567f7414ee6b727da5e4aa0 | 2b7b5628f199bca51ff916b81140794ee2f79edd | /Array/array_reverse.py | 0bb4dd7f925eb88352441b11a1509c9346815905 | [] | no_license | Koilada-Rao-au16/DSA_Solver | 9d01cdc72bf8602926a1824ba8dc739924b62406 | 6db2d9b855b0bf7ee6dfb5e5bcf50f4969534fc6 | refs/heads/main | 2023-04-06T03:26:47.902446 | 2021-04-12T10:07:56 | 2021-04-12T10:07:56 | 348,692,423 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 432 | py | # Iterative python program to reverse an array
def reverseList(self):
print(self[::-1])
self = [1,2,3,4,5]
print(self)
print("reverse list is ")
reverseList(self)
# method 2
def reverseList(A,start,end):
if start >= end:
return
A[start],A[end] = A[end],A[start]
reverseList(A, start+1 , end-1)
A = [1,2,3,4,5]
print(A)
reverseList(A, 0, 4)
print("reversed list is")
print(A)
# time complexity O(n)
| [
"bhaskar9.koilada@gmail.com"
] | bhaskar9.koilada@gmail.com |
5a4e2fd6d664cb94196906be84cc66e9eb31eac7 | 95807bb74dd42332d094d4de5564f3b4f460adbb | /Python 101/indexing_and_slicing.py | 0b01b489434ada6e2e55ee779ab27e723d07c172 | [
"MIT"
] | permissive | projetosparalelos/The-Complete-Python-Course-including-Django-Web-Framework | 92442cfa16609016f714bbc4af91782859c7a646 | 402b35d4739ed91e50d6c3380cab6f085a46c52b | refs/heads/main | 2023-05-12T01:20:41.299572 | 2021-06-03T07:56:56 | 2021-06-03T07:56:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 171 | py | lst = ['one', 'two', 'three', 'four', 'five']
# 0 1 2 3 4
print(lst[-2::])
# b = True
# print(b[0])
course = "Python 101"
print(course[5])
| [
"noreply@github.com"
] | projetosparalelos.noreply@github.com |
d3bba560b1a63f14196f8ac1f02e0fc94bd6ac84 | c5873c616d60d51ddc0a2388dce3c69ee0332d96 | /poker_project/settings/base.py | 1ab88949cbc4e7687010c5a66291d865059adfc6 | [] | no_license | dmongey101/texas-hold-em | 4e915acc6326cd07463e7b5160e870556a69a689 | 9e3481d8fed77b0f80c8740f5946505e9ead4a6e | refs/heads/master | 2022-12-12T23:04:30.665901 | 2022-10-24T17:56:32 | 2022-10-24T17:56:32 | 158,427,569 | 3 | 1 | null | 2022-12-08T01:28:49 | 2018-11-20T17:30:48 | HTML | UTF-8 | Python | false | false | 3,459 | py | """
Django settings for ecommerce_project project.
Generated by 'django-admin startproject' using Django 2.0.8.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
import dj_database_url
import environ
import os
env = environ.Env()
environ.Env.read_env()
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "jnem='&rm_rqin%u!h1dwqh6wc-qkr#j-=77)r%*dw^-cxb#!++"
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_forms_bootstrap',
'accounts',
'poker',
'storages',
'donations'
]
MIDDLEWARE = [
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'poker_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, "templates")],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.media',
],
},
},
]
WSGI_APPLICATION = 'poker_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': dj_database_url.parse(os.environ.get("DATABASE_URL", "sqlite:///db.sqlite3"))
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
LOGIN_REDIRECT_URL = '/'
LOGOUT_REDIRECT_URL = '/'
STRIPE_PUBLISHABLE = os.environ.get('STRIPE_PUBLISHABLE')
STRIPE_SECRET = os.environ.get('STRIPE_SECRET') | [
"donalmongey@gmail.com"
] | donalmongey@gmail.com |
efa8b8921a7754cc8ad6ddb8d8d7f7bae7ff52b6 | 4c9ea189dac171be81ef2d72c2bbc7541e93e0a4 | /credit_risk.py | b0cfd2a010acf81e76f5f09a18948982a7ac03d6 | [] | no_license | Ayush19443/Credit-risk | dc44a388afe2f378c8fe71df86aa216c42985149 | cbb2aed96e93217a11d9b379318f678e524fb809 | refs/heads/master | 2022-12-02T19:51:26.785161 | 2020-08-15T11:49:10 | 2020-08-15T11:49:10 | 287,735,727 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,582 | py | #%%
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
import pandas as pd
import numpy as np
import warnings
import time
import sys
import os
DeprecationWarning('ignore')
warnings.filterwarnings('ignore',message="don't have warning")
#%%
from sklearn.tree import DecisionTreeClassifier
#%%
tf=pd.read_csv('credit_risk.csv')
#%%
tf.head()
#%%
tf.sample(12)
#%%
tf.describe()
#%%
tf.isnull().sum()
#%%
tf.Gender[tf.Gender == 'Male'] = 1
tf.Gender[tf.Gender == 'Female'] = 2
tf.Married[tf.Married == 'Yes'] = 1
tf.Married[tf.Married == 'No'] = 2
tf.Education[tf.Education == 'Graduate'] = 1
tf.Education[tf.Education == 'Not Graduate'] = 2
tf.Self_Employed[tf.Self_Employed == 'Yes'] = 1
tf.Self_Employed[tf.Self_Employed == 'No'] = 2
tf.Property_Area[tf.Property_Area == 'Rural'] = 1
tf.Property_Area[tf.Property_Area == 'Urban'] = 2
tf.Property_Area[tf.Property_Area == 'Semiurban']= 3
tf.Dependents[tf.Dependents=='3+']=3
#%%
tf.head()
#%%
import seaborn as sns
sns.distplot(tf.Gender.dropna())
#%%
train,test = train_test_split(tf, test_size=0.2, random_state=12)
#%%
clf = DecisionTreeClassifier()
#%%
train.shape
#%%
test.shape
#%%
train.isnull().sum()
#%%
def fill_Gender(tf):
median= 1
tf['Gender'].fillna(median, inplace = True)
return tf
def fill_Married(tf):
median= 1
tf['Married'].fillna(median, inplace = True)
return tf
def fill_Dependents(tf):
median= 0
tf['Dependents'].fillna(median, inplace = True)
return tf
def fill_Self_Employed(tf):
median= 2
tf['Self_Employed'].fillna(median, inplace = True)
return tf
def fill_LoanAmount(tf):
mean= 142.5717
tf['LoanAmount'].fillna(mean, inplace = True)
return tf
def fill_Loan_Amoount_Term(tf):
median= 360
tf['Loan_Amount_Term'].fillna(median, inplace = True)
return tf
def fill_Credit_Historys(tf):
median= 1
tf['Credit_History'].fillna(median, inplace = True)
return tf
def encode_feature(tf):
tf = fill_Gender(tf)
tf=fill_Married(tf)
tf=fill_Dependents(tf)
tf=fill_Self_Employed(tf)
tf=fill_LoanAmount(tf)
tf=fill_Loan_Amoount_Term(tf)
tf=fill_Credit_Historys(tf)
return(tf)
#%%
tf=encode_feature(tf)
#%%
train = encode_feature(train)
test = encode_feature(test)
#%%
def x_and_y(tf):
x = tf.drop(["Loan_Status","Loan_ID","Gender","Dependents","Property_Area","Education","Self_Employed","ApplicantIncome","CoapplicantIncome"],axis=1)
y = tf["Loan_Status"]
return x,y
x_train,y_train = x_and_y(train)
x_test,y_test = x_and_y(test)
"""
clf_entropy = DecisionTreeClassifier(
criterion = "entropy", random_state = 100,
max_depth = 3, min_samples_leaf = 5)
"""
#%%
"""
clf_entropy.fit(x_train,y_train)
return clf_entropy
"""
#%%
from sklearn.metrics import accuracy_score, confusion_matrix
from sklearn.linear_model import LogisticRegression
#%%
log_model = DecisionTreeClassifier(criterion='entropy')
log_model.fit(x_train,y_train)
prediction = log_model.predict(x_train)
score = accuracy_score(y_train,prediction)
print(score*100)
#%%
y_train.shape
#%%
x_train.columns
#%%
log_model = DecisionTreeClassifier(criterion='entropy')
log_model.fit(x_train,y_train)
prediction = log_model.predict(x_test)
score1 = accuracy_score(y_test,prediction)
print(score1)
#%%
import seaborn as sns
sns.distplot(tf.LoanAmount.dropna())
#%%
| [
"noreply@github.com"
] | Ayush19443.noreply@github.com |
bd8fcf8cfabbf0d617f23ee7dd8b9d937d3551d1 | 7be178ac527253028a5d3009bac33d2e7e9533cd | /alphabet.py | ed7eda8cf9f8a6508fa0fb5a336a328d5f56c609 | [] | no_license | mrajeshraj/guvi | 27f745ea7a24ba8ecb465d2cbc1373dedef7a776 | 275cf24f1f1fe11571cec88c12b64bb80ad4c93a | refs/heads/master | 2020-05-25T23:02:11.526682 | 2019-06-17T17:05:33 | 2019-06-17T17:05:33 | 188,025,966 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 111 | py | ch=input()
if(ch=='a' or ch=='b' or ch=='c' or ch=='d'):
print("Alphabet")
else:
print("Not an Alphabet")
| [
"noreply@github.com"
] | mrajeshraj.noreply@github.com |
0b20a4b796a7b98a278b4bcbbc680c4308972641 | 35438635c64c8ec1066285f9849bb3634ee46644 | /weibo乃万/analyze.py | 11fbf4a9ad459c2d9824f995016c70746c68a6bb | [] | no_license | fancccc/code2021 | 8bbd6e68e1c2587e520daa18a147c48f431a5269 | 8e85a69bd30092e7337cbfade5c34ec62b7c2ad3 | refs/heads/main | 2023-04-26T06:27:06.812121 | 2021-05-13T15:10:22 | 2021-05-13T15:10:22 | 367,084,415 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,339 | py | # -*- coding: utf-8 -*-
"""
Created on Mon May 10 23:27:01 2021
@author: Mario
"""
import pandas as pd
from snownlp import SnowNLP
from snownlp import sentiment
import random
import jieba
import imageio
from wordcloud import WordCloud,ImageColorGenerator
import matplotlib.pyplot as plt
from PIL import Image
import numpy as np
from pyecharts import options as opts
from pyecharts.charts import TreeMap
from matplotlib.patches import ConnectionPatch
plt.rcParams['font.sans-serif']=['SimHei']
#sentiment.train('neg.txt', 'pos.txt')
#sentiment.save('QuanShi.marshal')
df = pd.read_csv('AllData.csv',encoding = 'gb18030')
df['ZAN'] = df['ZAN'].replace('list index out of range',0)
df['ZAN'] = df['ZAN'].apply(int)
'''
lis = df['comment'][80:100].tolist()
for text in lis:
s = SnowNLP(text)
print(text,'\n',s.sentiments)
#df.dropna(subset=['comment'],inplace=True)
qs = []
for i in df['comment'].tolist():
try:
qs.append(SnowNLP(i).sentiments)
except:
qs.append(0)
df['拳师score'] = qs
df['拳师'] = df['拳师score'].apply(lambda x:'yes' if x >= 0.5 else 'no')
df.to_csv('AllData.csv',index = 0,encoding = 'gb18030')
'''
dfY = df[df['拳师'] == 'yes']
dfN = df[df['拳师'] == 'no']
for example in random.sample(dfY['comment'].tolist(), 10):
print(example)
sex_propor = len(dfY[dfY['sex'] == '女']) / len(dfY)
print(sex_propor)
# 绘制词云
def draw_wordcloud(s,filename):
#读入一个txt文件
comment_text = s
#结巴分词,生成字符串,如果不通过分词,无法直接生成正确的中文词云
cut_text = " ".join(jieba.cut(comment_text))
#color_mask = imageio.imread("rock.png") # 读取背景图片
#color_mask = np.array(Image.open('rock.png'))
cloud = WordCloud(
font_path = 'msyhl.ttc',
#设置背景色
background_color = 'white',
#词云形状
#mask = color_mask,
#允许最大词汇
max_words = 2000,
#最大号字体
max_font_size = 50,
scale = 6,#分辨率
)
word_cloud = cloud.generate(cut_text) # 产生词云
plt.figure(figsize = (10,10),dpi = 80)
word_cloud.to_file('img/'+filename) #保存图片
plt.axis('off')
# 显示词云图片
plt.imshow(word_cloud)
s = ''
for i in dfN['comment']:
s += str(i) + '。'
draw_wordcloud(s,'非女权.png')
s = ''
for i in dfY['comment']:
s += str(i) + '。'
draw_wordcloud(s,'女权.png')
plt.figure(dpi = 120)
x = [len(dfY), len(dfN)]
explode = [0.1,0.01]
labels = ['拳师','非拳师']
plt.pie(x = x,
explode = explode,
labels = labels,
autopct = '%3.2f%%')
plt.title('总体分布')
plt.savefig('img/总体分布.png')
plt.show()
plt.figure(dpi = 120)
x = [len(dfY[dfY['sex'] == '女']), len(dfY[dfY['sex'] == '男'])]
explode = [0.01,0.01]
labels = ['女','男']
plt.pie(x = x,
explode = explode,
labels = labels,
autopct = '%3.2f%%')
plt.title('性别')
plt.savefig('img/性别.png')
plt.show()
plt.figure(figsize=(15,15),dpi = 120)
df_acre = df.groupby('acre')['id'].count()
x = df_acre.values
labels = df_acre.index
explode = [0.001] * len(x)
plt.pie(x = x,
explode = explode,
labels = labels,
autopct = '%3.2f%%')
plt.title('地区')
plt.savefig('img/地区.png')
plt.show()
df['lable'] = df['拳师score'].apply(lambda x:1 if x >= 0.5 else 0)
replyY = dfY['reply'].mean()
replyN = dfN['reply'].mean()
weiboY = dfY['weibo'].mean()
weiboN = dfN['weibo'].mean()
befanY = dfY['befan'].mean()
befanN = dfN['befan'].mean()
fanY = dfY['fan'].mean()
fanN = dfN['fan'].mean()
zanY = dfY['ZAN'].mean()
zanN = dfN['ZAN'].mean()
plt.figure(dpi = 120)
x = ['回复数(评论)','点赞数(评论)','微博数','关注数','粉丝数']
index = np.arange(len(x))
bar_width = 0.45
y1 = [replyY, zanY, weiboY, befanY, fanY]
y2 = [replyN, zanN, weiboN, befanN, fanN]
plt.bar(index, y1, bar_width, label = '女权')
plt.bar(index+bar_width, y2, bar_width, label = '非女权')
plt.xticks(index+bar_width/2, x)
plt.legend()
plt.title('数据对比')
plt.xlabel('指标均值')
for a,b in zip(index,y1):
plt.text(a, b+10,'%.1f'%b, ha = 'center',va = 'bottom')
for a,b in zip(index,y2):
plt.text(a+bar_width, b+10,'%.1f'%b, ha = 'center',va = 'bottom')
plt.savefig('img/各项指标分析.png')
plt.show()
| [
"workfc@163.com"
] | workfc@163.com |
cb840373802f4a2f053aa9b6db014d5a830284dd | 404cb0431675327a751f7a6f422f53288a92b85b | /chirp/library/order_test.py | 33fccabf573816f97b45246bff10199393e598bb | [
"Apache-2.0"
] | permissive | chirpradio/chirpradio-machine | ade94d7ac9ded65f91e1b3845be408723c0501da | 6fea6a87f2eb3cfac2a47831892c9ce02163b03b | refs/heads/master | 2023-09-01T02:57:07.749370 | 2023-08-28T23:57:46 | 2023-08-28T23:57:46 | 2,330,078 | 9 | 10 | Apache-2.0 | 2018-03-16T01:26:29 | 2011-09-05T19:10:48 | Python | UTF-8 | Python | false | false | 3,525 | py | #!/usr/bin/env python
import unittest
import mutagen.id3
from chirp.library import order
class OrderTest(unittest.TestCase):
def test_decode(self):
test_cases = (("1", 1, None),
(" 6", 6, None),
("006", 6, None),
("1/2", 1, 2),
("3 of 7", 3, 7),
("03anything04", 3, 4))
for text, order_num, max_num in test_cases:
self.assertEqual((order_num, max_num), order.decode(text))
# These should not be parseable.
error_test_cases = ("", "xxx", "0", "-1", "0/3", "3/", "3/0", "6/5",
"-1/4", "2/-1", "2/-", "3-4", "3/0")
for text in error_test_cases:
self.assertRaises(order.BadOrderError, order.decode, text)
def test_encode(self):
test_cases = ((1, 3, "1/3"), (7, None, "7"))
for order_num, total_num, expected_text in test_cases:
self.assertEqual(expected_text, order.encode(order_num, total_num))
error_test_cases = ((7, 5), (0, 3), (-1, 3), (4, 0), (4, -1))
for order_num, total_num in error_test_cases:
self.assertRaises(order.BadOrderError,
order.encode, order_num, total_num)
def test_standardize_str(self):
self.assertEqual("3", order.standardize_str(" 3 "))
self.assertEqual("3/7", order.standardize_str("3 of 7"))
def test_standardize(self):
tag = mutagen.id3.TRCK(text=["3 of 7"])
order_num, max_num = order.standardize(tag)
self.assertEqual(["3/7"], tag.text)
self.assertEqual(3, order_num)
self.assertEqual(7, max_num)
def test_is_archival(self):
self.assertTrue(order.is_archival("3/7"))
self.assertFalse(order.is_archival("bad"))
self.assertFalse(order.is_archival("3"))
self.assertFalse(order.is_archival("3 of 7"))
self.assertFalse(order.is_archival("7/3"))
self.assertFalse(order.is_archival(" 3/7"))
def test_verify_and_standardize_str_list(self):
# Check the simplest valid case.
self.assertEqual(["1/1"], order.verify_and_standardize_str_list(["1"]))
# Check an already-standardized set.
self.assertEqual(
["1/4", "3/4", "2/4", "4/4"],
order.verify_and_standardize_str_list(
["1/4", "3/4", "2/4", "4/4"]))
# Check strings without a max number.
self.assertEqual(
["1/4", "3/4", "2/4", "4/4"],
order.verify_and_standardize_str_list(["1", "3", "2", "4"]))
# Check mixed formats.
self.assertEqual(
["1/4", "3/4", "2/4", "4/4"],
order.verify_and_standardize_str_list(["1", "3/4", "2", "4 of 4"]))
# Check empty list.
self.assertRaises(order.BadOrderError,
order.verify_and_standardize_str_list, [])
# Check garbage in list.
self.assertRaises(order.BadOrderError,
order.verify_and_standardize_str_list, ["xxx"])
# Check treatment of gaps.
self.assertRaises(order.BadOrderError,
order.verify_and_standardize_str_list,
["1", "2", "4"])
# Check bad max number.
self.assertRaises(order.BadOrderError,
order.verify_and_standardize_str_list,
["1/5", "3/5", "2/5", "4/5"])
if __name__ == "__main__":
unittest.main()
| [
"kumar.mcmillan@gmail.com"
] | kumar.mcmillan@gmail.com |
775d4e974bbace6a037417248f6885324aebea6a | 85764904e918310f9e4a209f64570dcdcf099818 | /loutilities/user/roles.py | 3df49ee5b5f5d63b1edda6261c2976dbd2e6b5e1 | [
"Apache-2.0"
] | permissive | louking/loutilities | 05bb20994ae06d2e68989cd6a779c350a9a430ad | aaf7410849d0167001cd5f06ab0dae6563e58ec7 | refs/heads/master | 2023-07-24T18:32:36.128102 | 2023-07-15T10:02:43 | 2023-07-15T10:02:43 | 5,824,315 | 2 | 2 | null | 2023-05-10T09:59:37 | 2012-09-15T21:29:29 | Python | UTF-8 | Python | false | false | 3,136 | py | ###########################################################################################
# roles - common location for xtility role declaration
#
# Date Author Reason
# ---- ------ ------
# 03/11/20 Lou King Create
#
# Copyright 2020 Lou King. All rights reserved
###########################################################################################
from loutilities.user.model import APP_CONTRACTS, APP_MEMBERS, APP_ROUTES, APP_SCORES, APP_ALL
# common roles
ROLE_SUPER_ADMIN = 'super-admin'
ROLES_COMMON = [ROLE_SUPER_ADMIN]
roles_common = [
{'name': 'super-admin', 'description': 'allowed to do everything on all applications', 'apps': APP_ALL},
]
# members roles
ROLE_LEADERSHIP_ADMIN = 'leadership-admin'
ROLE_LEADERSHIP_MEMBER = 'leadership-member'
ROLE_MEMBERSHIP_ADMIN = 'membership-admin'
ROLE_MEETINGS_ADMIN = 'meetings-admin'
ROLE_MEETINGS_MEMBER = 'meetings-member'
ROLE_RACINGTEAM_ADMIN = 'racingteam-admin'
ROLE_RACINGTEAM_MEMBER = 'racingteam-member'
roles_members = [
{'name': ROLE_LEADERSHIP_ADMIN, 'description': 'access to leadership tasks for members application', 'apps':[APP_MEMBERS]},
{'name': ROLE_LEADERSHIP_MEMBER, 'description': 'user of leadership tasks for members application', 'apps':[APP_MEMBERS]},
{'name': ROLE_MEMBERSHIP_ADMIN, 'description': 'access to membership admininstration for members application', 'apps':[APP_MEMBERS]},
{'name': ROLE_MEETINGS_ADMIN, 'description': 'access to meetings administration for members application', 'apps':[APP_MEMBERS]},
{'name': ROLE_MEETINGS_MEMBER, 'description': 'user of meetings for members application', 'apps':[APP_MEMBERS]},
{'name': ROLE_RACINGTEAM_ADMIN, 'description': 'access to racingteam administration for members application', 'apps':[APP_MEMBERS]},
{'name': ROLE_RACINGTEAM_MEMBER, 'description': 'user of racingteam module for members application', 'apps':[APP_MEMBERS]},
]
# routes roles
ROLE_ROUTES_ADMIN = 'routes-admin'
ROLE_ICON_ADMIN = 'icon-admin'
roles_routes = [{'name': ROLE_ROUTES_ADMIN, 'description': 'access to routes for routes application', 'apps':[APP_ROUTES]},
{'name': ROLE_ICON_ADMIN, 'description': 'access to icons for routes application', 'apps':[APP_ROUTES]}
]
# contracts roles
ROLE_EVENT_ADMIN = 'event-admin'
ROLE_SPONSOR_ADMIN = 'sponsor-admin'
roles_contracts = [{'name': ROLE_EVENT_ADMIN, 'description': 'access to events for contracts application', 'apps':[APP_CONTRACTS]},
{'name': ROLE_SPONSOR_ADMIN, 'description': 'access to sponsors/races for contracts application', 'apps':[APP_CONTRACTS]}
]
# scores roles
ROLE_SCORES_ADMIN = 'scores-admin'
ROLE_SCORES_VIEWER = 'scores-viewer'
roles_scores = [{'name': ROLE_SCORES_ADMIN, 'description': 'administer scores application', 'apps':[APP_SCORES]},
{'name': ROLE_SCORES_VIEWER, 'description': 'view scores application', 'apps':[APP_SCORES]},
]
all_roles = [roles_common, roles_contracts, roles_members, roles_routes, roles_scores] | [
"lking@pobox.com"
] | lking@pobox.com |
6db98282426351cd2e6b5f2fb0f62ec064600def | a0215172ddf6663916b15cea3879e5b0ebdfddd9 | /scripts/yum_pkglist_from_ks.py | 797dbaad45ebdaaae277b2bcf954298c6fd09579 | [
"Apache-2.0"
] | permissive | perfsonar/toolkit-building | fa502a9ec04ba951a75a82c5b02ae3db69651bca | aca87d58649d3bc95c5b6768bf307702331f2535 | refs/heads/master | 2022-06-14T03:36:20.681776 | 2020-10-30T20:05:29 | 2020-10-30T20:05:29 | 32,428,947 | 2 | 3 | null | 2016-07-22T12:59:48 | 2015-03-18T00:43:47 | Shell | UTF-8 | Python | false | false | 4,393 | py | #########################################################################################
# Name: yum_pkglist_from_ks.py
# Usage: python yum_pkglist_from_ks.py [options] kickstart outfile
# Description:
# This script takes a kickstart file, extracts the package list, and finds all the
# dependencies in the dependency tree for those packages. This list is then output to a
# file with each package name on a line. The output file can be passed to a tool such as
# yum downloader to download all the packages needed for the kickstart. This can be
# especially useful when building custom Linux ISOs.
########################################################################################
import yum
import optparse
import sys
################
# Setup CLI opts
################
parser = optparse.OptionParser(usage="python %prog [options] kickstart outfile")
parser.add_option('-i', '--installroot', dest="installroot", help="Install root for yum. Useful in chroot environments. Defaults to '/'.")
(options, args) = parser.parse_args()
if len(args) != 2:
parser.print_help()
kickstart_path = args[0]
outfile = args[1]
###################
# Parse Kickstart
###################
kickstart = open(kickstart_path)
found_packages = False
input_pkg_names = []
input_pkg_groups = []
for line in kickstart:
line = line.rstrip().lstrip()
if not line:
continue
elif line.startswith("#"):
continue
elif line.startswith("%end"):
break
elif found_packages:
if line.startswith("@"):
input_pkg_groups.append(line.replace("@", ""))
else:
input_pkg_names.append(line)
elif line.startswith("%packages"):
found_packages = True
###################
# Initialize yum
###################
yb = yum.YumBase()
yb.conf.assumeyes = True
if options.installroot:
yb.conf.installroot = options.installroot
############################
# Form initial package lists
############################
raw_pkg_names = {}
output_pkg_names = []
missing_pkg_names = []
pkg_names = input_pkg_names
##
# Expand package groups and add to inital package list
for input_pkg_group in input_pkg_groups:
g = yb.comps.return_group(input_pkg_group)
for p in g.packages:
if p not in pkg_names:
pkg_names.append(p)
############################
# Walk the dependency tree
############################
while pkg_names:
pkj_objs = []
while pkg_names:
pkg_name = pkg_names.pop()
##
# searchProvides allows us to look fo packages in lots of different forms
# e.g perl(LWP) or perl-LWP
results = yb.pkgSack.searchProvides(name=pkg_name)
if not results:
if pkg_name not in missing_pkg_names:
##
# if we didn't find it, may not be a big deal. make sure we mark
# as visited though so we don't loop forever
missing_pkg_names.append(pkg_name)
continue
for r in results:
# use r.name to normalize package name to what yum actually calls it
raw_pkg_names[r.name] = 1
##
# Add pkg_name to list so we can also make we track searches we've already done
# where a specific package name was not given. e.g perl(LWP) vs perl-LWP
output_pkg_names.append(pkg_name)
pkj_objs.append(results[0])
##
# For each package found go through the dependencies and find ones we haven't seen yet
deps = yb.findDeps(pkj_objs)
for parent_pkg in deps:
for dep in deps[parent_pkg]:
if (dep[0] not in output_pkg_names) and (dep[0] not in missing_pkg_names) and (dep[0] not in pkg_names):
pkg_names.append(dep[0])
################
# Output to file
################
fout = open(outfile, "w")
##
# Print out the package names as we saw them in kickstart and dependency list except for
# names like perl(LWP), libX.so, /usr/bin/python that yumdownloader won't take. This may
# be overkill and lead to some duplicates in the list, but ensures we get all we need
for r in output_pkg_names:
if r.startswith("/"):
continue
elif "." in r:
continue
elif "(" in r:
continue
fout.write("%s\n" % r)
##
# Print the nicely formatted package names
for r in raw_pkg_names:
fout.write("%s\n" % r)
fout.close()
| [
"andy@es.net"
] | andy@es.net |
6137faebb9a642e09f39a277fe0b98ca4709d399 | 8398421e297b61c345f81005940f01aa79e2bf53 | /subwindows/delete_orphans/delete_orphans_creator.py | f1e9c501371625d623e3f3f2f90533ab408fc85d | [] | no_license | fcunhaneto-test/mscollection_qt | e206a90eb8f3a80885d1067648c718f32a6d2057 | 3ccc26087d503aa833d0725e93f927a86800d7c0 | refs/heads/master | 2022-11-09T20:16:41.730677 | 2019-06-15T10:48:58 | 2019-06-15T10:48:58 | 166,302,027 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,376 | py | import os
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QColor, QFont
from PyQt5.QtWidgets import QMdiSubWindow, QTableWidget, QWidget, \
QTableWidgetItem, QVBoxLayout, QHBoxLayout, QCheckBox
import texts
from db.db_model import Creator, SeriesCreator
from db.db_settings import Database as DB
from lib.function_lib import hbox_create, pb_create, delete_orphans
class DeleteOrphansCreator(QMdiSubWindow):
def __init__(self, main):
"""
Class for delete creators who are orphan in database.
:param main: Reference for main windows.
"""
super(DeleteOrphansCreator, self).__init__()
self.session = DB.get_session()
self.creator = self.session.query(Creator)
self.main = main
window_title = texts.delete_orphans + ' ' + texts.creator_p
self.setWindowTitle(window_title)
self.subwindow = QWidget()
p = self.palette()
p.setColor(self.backgroundRole(), QColor(230, 230, 250))
self.setPalette(p)
self.setWidget(self.subwindow)
font = QFont()
font.setPointSize(12)
# Vbox Main
self.vbox_main = QVBoxLayout(self.subwindow)
self.vbox_main.setContentsMargins(20, 20, 20, 20)
self.vbox_main.setSpacing(10)
# Table Cast
self.table = QTableWidget()
self.table.setColumnCount(2)
self.table.setContentsMargins(20, 0, 0, 0)
self.headers = [
texts.creator_s,
'Del'
]
self.table.setHorizontalHeaderLabels(self.headers)
# table set column width
w = int(0.5 * main.frameSize().width())
col_1 = int(0.60 * (w - 50))
col_2 = int(0.20 * (w - 50))
col_width = col_1 + col_2 + 4
self.table.setColumnWidth(0, col_1)
self.table.setColumnWidth(1, col_2)
self.table.rowHeight(30)
self.table.setFixedWidth(col_width)
self.table.horizontalHeader().setFont(font)
self.table.horizontalHeader().setStyleSheet(
'background-color: rgb(230, 230, 230);')
self.table.verticalHeader().setVisible(False)
self.rows = 0
self.ch_del = []
self.vbox_main.addWidget(self.table)
# Buttons
self.pb_delete = pb_create(texts.pb_delete, 12, 40)
self.pb_delete.setMinimumHeight(40)
self.pb_delete.setShortcut('Ctrl+D')
self.pb_delete.clicked.connect(self.delete)
self.pb_leave = pb_create(texts.pb_leave, 12, 40)
self.pb_leave.setMinimumHeight(40)
self.pb_leave.setShortcut('Ctrl+Q')
self.pb_leave.clicked.connect(self.close)
self.pb_help = pb_create(texts.pb_help, height=40)
self.pb_help.setMinimumHeight(40)
self.pb_help.clicked.connect(self.help)
self.pb_help.setShortcut('Ctrl+H')
self.pb_select_all = pb_create(texts.pb_select_all, 12, 40)
self.pb_select_all.setMinimumHeight(40)
self.pb_select_all.setShortcut('Ctrl+A')
self.pb_select_all.clicked.connect(self.select_all)
self.hb_pb = QHBoxLayout()
self.hb_pb.setSpacing(10)
self.hb_pb.addWidget(self.pb_delete)
self.hb_pb.addWidget(self.pb_leave)
self.hb_pb.addWidget(self.pb_help)
self.hb_pb.addWidget(self.pb_select_all)
self.vbox_main.addLayout(self.hb_pb)
self.width = col_width + 44
self.height = int(0.8 * main.frameSize().height())
self.setGeometry(0, 0, self.width, self.height)
self.create_table()
def create_table(self):
"""
Create a table for show all orphan creators info and with a QCheckBox
that if is check the actor will be delete.
"""
sub = self.session.query(SeriesCreator.creator_id)
sub = sub.distinct()
creator_result = self.creator.filter(Creator.id.notin_(sub)).all()
for creator in creator_result:
self.table.insertRow(self.rows)
self.table.setItem(self.rows, 0, QTableWidgetItem(creator.name))
ch_del = QCheckBox(str(creator.id))
self.ch_del.append(ch_del)
hb_del = hbox_create([self.ch_del[self.rows]], 0)
hb_del.setAlignment(Qt.AlignCenter)
cell_del = QWidget()
cell_del.setLayout(hb_del)
self.table.setCellWidget(self.rows, 1, cell_del)
if self.rows % 2 != 0:
self.table.item(self.rows, 0).setBackground(
QColor(230, 230, 230)
)
self.table.cellWidget(self.rows, 1).setStyleSheet(
'background-color: #E6E6E6;'
'color: #E6E6E6;'
)
else:
self.table.cellWidget(self.rows, 1).setStyleSheet(
'color: #FFFFFF;'
)
self.table.item(self.rows, 0).setFlags(
Qt.ItemIsSelectable | Qt.ItemIsEnabled
)
self.rows += 1
height = self.rows * 30 + 20
self.table.setMinimumHeight(height)
self.height = height + 130
self.setGeometry(0, 0, self.width, self.height)
def delete(self):
"""
Delete creator from database.
"""
delete_orphans(self.session, self.ch_del, Creator, texts.creator_s)
self.clear()
self.create_table()
def select_all(self):
"""
Mark all delete QCheckBox.
"""
for ch in self.ch_del:
ch.setChecked(True)
# Clear
def clear(self):
"""
Clear all values in windows.
"""
for row in range(self.rows):
self.table.removeRow(row)
self.table.clear()
self.table.setColumnCount(2)
self.table.setRowCount(0)
self.table.setHorizontalHeaderLabels(self.headers)
self.rows = 0
self.ch_del = []
self.session.expire_all()
# Help
def help(self):
# I have to perform help preview functions on the main because the bug
# "stack_trace posix.cc (699)" does not let the page find its directory.
dir = os.getcwd()
url = 'file:///' + dir + '/views_help/help_delete_orphans.html'
self.main.views_help(url, texts.help_edit_movie)
# Close Event
def closeEvent(self, event):
self.session.close() | [
"fcunhaneto@gmail.com"
] | fcunhaneto@gmail.com |
c4fb0116985e3ace94fc0fe7bbfb80ab7f53d331 | 7edb6f64afb9a9d5fd2b712faae9841d45c3a3b3 | /monkeyAndPerformance/allCode/performanceTest/traffic/traffic.py | 9edb99221fc8f0b920e0abebe9a4f074378baddb | [] | no_license | Hanlen520/AppSpecialTest | 413babbbecbeaa8e25dd1fd70dd349a1de07eb5e | 06f69f116245162220985ad2632fbff3af72450c | refs/heads/master | 2023-04-22T19:59:35.523780 | 2019-08-08T09:48:28 | 2019-08-08T09:48:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,398 | py | import csv,os,time
from config.config import *
from monkeyAndPerformance.allCode.util.gettimestr import GetTimeStr
gettimestr = GetTimeStr() #实例化GetTimeStr
#控制类
class Controller(object):
def __init__(self):
self.counter = RunTrafficCount # 定义测试的次数
#定义收集数据的数组
self.alldata = [("deviceid","appversion","timestamp", "traffic")] # 要保存的数据,时间戳及流量
#单次测试过程
def TestProcessOnce(self):
#执行获取进程的命令
cmd = 'adb shell "ps | grep %s"' % AppPackageName # 获取进程
content = os.popen(cmd)
result = content.readlines()
print("result:%s"% result)
print("result.length:%s" % len(result))
if len(result):
#获取进程ID
# pid = result[0].split(" ")[5]
pid = result[0].split(" ")[3]
print("result[0].split():%s" % result[0].split(" "))
print("pid:%s"% pid)
self.DeleyTime(3)
#执行进程ID使用的流量
cmd = 'adb shell cat /proc/%s/net/dev'% pid # 获取流量
content = os.popen(cmd)
traffic = content.readlines()
print("traffic:%s"% traffic)
#获取流量
for line in traffic:
print("line:%s" % line)
if "wlan0" in line:
#将所有空行换成#
line = "#".join(line.split())
print("line##:%s"% line)
#按#号拆分,获取收到和发出的流量
receive = line.split("#")[1]
print("receive#:%s"%receive)
transmit = line.split("#")[9]
print("transmit##:%s"% transmit)
# if "eth0" in line:
# #将所有空行换成#
# line = "#".join(line.split())
# #按#号拆分,获取收到和发出的流量
# receive = line.split("#")[1]
# transmit = line.split("#")[9]
# elif "eth1" in line:
# # 将所有空行换成#
# line = "#".join(line.split())
# # 按#号拆分,获取收到和发出的流量
# receive2 = line.split("#")[1]
# transmit2 = line.split("#")[9]
#计算所有流量的之和
# alltraffic = int(receive) + int(transmit) + int(receive2) + int(transmit2)
alltraffic = int(receive) + int(transmit)
#按KB计算流量值
alltraffic = alltraffic/1024
currenttime = self.GetCurrentTime() # 获取当前时间
#将获取到的数据存储到数组中
self.alldata.append((TestDeviceID,AppVersion,currenttime,alltraffic)) # 写入数据到self.alldata
else:
print("没有获取到相应进程,请确定打开相应的app")
#延时函数
def DeleyTime(self,delaytime):
delaytime = int(delaytime)
time.sleep(delaytime) # 等待5秒
print("等待%s秒..."% delaytime)
#多次执行测试过程
def RunMore(self):
#设置手机进入非充电状态
cmd = 'adb shell dumpsys battery set status 1'
os.popen(cmd)
self.DeleyTime(3)
print("循环开始时间:%s" % self.GetCurrentTime() )
while self.counter>0: # 如果次数大于0
self.TestProcessOnce() # 则执行一次测试过程
self.counter = self.counter -1 # 测试次数减一
self.DeleyTime(5) # 间隔5秒取一次值
gettimestr.outPutMyLog("流量统计剩余运行次数为:%s" % self.counter)
print("循环结束时间:%s" % self.GetCurrentTime())
#获取当前存储数据的时间戳
def GetCurrentTime(self):
currenttime = time.strftime("%Y-%m-%d %H:%M:%S",time.localtime()) # 获取当前时间
return currenttime # 返回当前时间
# 获取当前时间的字符串
def GetCurrentTimeString(self):
currenttime = time.strftime("%Y%m%d%H%M%S", time.localtime()) # 获取当前时间
return currenttime # 返回当前时间
#存储数据到CSV时间
def SaveDataToCSV(self,timestr):
basedir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) + "/" + "codeResult"
nyrsfmdir = gettimestr.createNYRSFMdir(basedir,timestr)
csvfile = "%s/%s_%s" % (nyrsfmdir,timestr,AppTrafficCSVFile)
opencsvfile = open(csvfile, "w",newline="") #加入newline="",解决python3写入csv出现空白行
writercsv = csv.writer(opencsvfile) # 写入文件
writercsv.writerows(self.alldata) # 写入数据,将字符串数据转换为字节,存储到CSV中
opencsvfile.close() # 关闭文件
print("数据:%s" % self.alldata)
print("数据保存路径:%s"% csvfile)
print("流量消耗:最后一次的流量值减去第一次的流量值,就是本次操作消耗的流量值")
def run(self,timestr): # 运行
self.RunMore()
self.SaveDataToCSV(timestr)
if __name__ == "__main__":
timestr = gettimestr.getTimeStr()
controller = Controller()
controller.run(timestr) | [
"410287958@qq.com"
] | 410287958@qq.com |
dd3f9af7d6f306b028114ba50b54b6d9a3c66546 | eff6be2d99ba83a1080cdacb2094fa6aaec1477c | /.venv/bin/jupyter-troubleshoot | 3bbf5b79a436aed52757e87445950d074d852a92 | [] | no_license | CFtriksX/Embeded_AI_A1 | 1b1b5559faf6e7569ba225b71ca60406ccf49a39 | d2fc13305293677d2f529ed86ce06f4f7c0afc0d | refs/heads/main | 2023-04-13T21:19:43.129144 | 2021-04-20T17:17:09 | 2021-04-20T17:17:09 | 359,892,756 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 263 | #!/home/paulgelas/assignment/assignment1/.venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from jupyter_core.troubleshoot import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"paulgelas@desktop-c57a2a9.home"
] | paulgelas@desktop-c57a2a9.home | |
25e372cb14bdc5d7011802d05410d01a864a361a | 7f8d2288dc8d81275269bdb8e8f196339a52d30d | /code/1010_solution.py | c14133019520efb5a27564644e2a7e131773bfda | [] | no_license | ishaansharma/leetcode-3 | f9cab568c31322e2bf84768264f3c644182cd470 | 9081dd3ff86409d554b0298a8152ed40a6befa96 | refs/heads/master | 2023-03-25T15:36:04.235650 | 2021-03-30T20:15:45 | 2021-03-30T20:15:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 229 | py | class Solution:
def numPairsDivisibleBy60(self, time: List[int]) -> int:
count = 0
seen = [0] * 60
for t in time:
count += seen[-t % 60]
seen[t % 60] += 1
return count
| [
"noreply@github.com"
] | ishaansharma.noreply@github.com |
8ff2e48f09e238f0d2399ee7edab5a7d44af8ee2 | 142122fb03679fe84d54ccae65493416b925fe25 | /code/main_classification.py | 1104890bc6c75b9d8b475ab8f2a841ee4d69ff00 | [] | no_license | mshaikh2/IPMI2021 | dce84bf37204a1518b776f1dc50e0b4078e3744f | cb4eb39f707995f5cfc2b54d54ed5d763f508e1e | refs/heads/master | 2023-01-28T16:55:06.184132 | 2020-12-15T02:18:32 | 2020-12-15T02:18:32 | 316,624,372 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,152 | py | from __future__ import print_function
from misc.config import Config
from dataset_classification import IUDataset, build_dataset
from trainer_classification import JoImTeR as trainer
import os
# import os
import sys
import time
import random
import pprint
import datetime
import dateutil.tz
import argparse
import numpy as np
import pandas as pd
import torch
import torchvision.transforms as transforms
import pickle
dir_path = (os.path.abspath(os.path.join(os.path.realpath(__file__), './.')))
sys.path.append(dir_path)
cfg = Config()
def parse_args():
parser = argparse.ArgumentParser(description='')
parser.add_argument('--gpu', dest='gpu_id', type=int, default=-1)
parser.add_argument('--data_dir', dest='data_dir', type=str, default='')
args = parser.parse_args()
return args
if __name__ == "__main__":
args = parse_args()
if args.gpu_id != -1:
cfg.GPU_ID = args.gpu_id
else:
cfg.CUDA = False
if args.data_dir != '':
cfg.DATA_DIR = args.data_dir
torch.manual_seed(cfg.seed)
if cfg.CUDA:
torch.cuda.manual_seed_all(cfg.seed)
########################################
now = datetime.datetime.now(dateutil.tz.tzlocal())
timestamp = now.strftime('%Y_%m_%d_%H_%M_%S')
# LAMBDA_FT,LAMBDA_FI,LAMBDA_DAMSM=01,50,10
output_dir = '../output/%s_%s_%s'%(cfg.DATASET_NAME, cfg.CONFIG_NAME, timestamp)
data_set = build_dataset('train', cfg)
train_loader = torch.utils.data.DataLoader(
data_set, batch_size=cfg.batch_size, drop_last=True,
shuffle=True, num_workers=cfg.num_workers)
data_set = build_dataset('val', cfg)
val_loader = torch.utils.data.DataLoader(
data_set, batch_size=cfg.val_batch_size, drop_last=False,
shuffle=False, num_workers=cfg.num_workers)
# Define models and go to train/evaluate
algo = trainer(output_dir, train_loader, val_loader)
start_t = time.time()
algo.train()
end_t = time.time()
print('Total time for training:', end_t - start_t)
| [
"mshaikh2@buffalo.edu"
] | mshaikh2@buffalo.edu |
3e637f3de409c402ebe0b44c9d3ce320ed721c64 | c7d39fd93d6c616cf6adc005bab9298947c305f6 | /library/sns_command.py | c8b876f261a3be3ad63b4cb3089ece8fd7da7cae | [
"Apache-2.0"
] | permissive | mareckis/SNS-tests | 810d80612ced7b0dabd7a907c8c5169ba24c0e72 | e939c5830067671a217bbf6d878b8ba3481bcd32 | refs/heads/master | 2023-01-22T04:49:05.179410 | 2020-12-02T19:55:22 | 2020-12-02T19:55:22 | 259,371,654 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,053 | py | #!/usr/bin/python
# Copyright: (c) 2018, Stormshield https://www.stormshield.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: sns_command
short_description: API client to configure Stormshield Network Security appliances
description:
This module executes configuration commands or scripts on the remote appliance.
Configuration API reference: https://documentation.stormshield.eu/SNS/v3/en/Content/CLI_Serverd_Commands_reference_Guide_v3/Introduction.htm
options:
script:
description:
- Configuration script to execute
expect_disconnect:
description
- Set to True if the script makes the remote server to disconnect (ie: install firmware update)
force_modify:
description
- Set to true to disconnect other administrator already connected with modify privilege.
appliance:
description:
- appliance connection's parameters (host, port, user, password, sslverifypeer, sslverifyhost, cabundle, usercert, proxy)
author:
- Remi Pauchet (@stormshield)
notes:
- This module requires python-SNS-API library
'''
EXAMPLES = '''
- name: Get appliance properties
sns_command:
script: "SYSTEM PROPERTY"
appliance:
host: myappliance.local
password: mypassword
delegate_to: localhost
- name: Update firmware with a local update file
sns_command:
script: |
SYSTEM UPDATE UPLOAD < /tmp/fwupd-SNS-3.7.1-amd64-M.maj
SYSTEM UPDATE ACTIVATE
expect_disconnect: True
appliance:
host: myappliance.local
password: mypassword
delegate_to: localhost
'''
RETURN = '''
ret:
description: last command return code
returned: changed
type: int
sample: 100
output:
description: script execution output
returned: changed
type: string
sample: |
> CONFIG NTP SERVER LIST
101 code=00a01000 msg="Begin" format="section_line"
[Result]
name=fr.pool.ntp.org keynum=none type=host
100 code=00a00100 msg="Ok"
> HELP
101 code=00a01000 msg="Begin" format="raw"
AUTH : User authentication
CHPWD : Return if it's necessary to update password or not
CONFIG : Firewall configuration functions
GLOBALADMIN : Global administration
HA : HA functions
HELP : Display available commands
LIST : Display the list of connected users, show user rights (Level) and rights for current session (SessionLevel).
LOG : Log related functions.Everywhere a timezone is needed, if not specified the command is treated with firewall timezone setting.
MODIFY : Get / lose the modify or the mon_write right
MONITOR : Monitor related functions
NOP : Do nothing but avoid disconnection from server.
PKI : show or update the pki
QUIT : Log off
REPORT : Handling of reports
SYSTEM : System commands
USER : User related functions
VERSION : Display server version
100 code=00a00100 msg="Ok"
result:
description: last command output
returned: changed
type: string
sample: |
101 code=00a01000 msg="Begin" format="section_line"
[Result]
name=ntp1.stormshieldcs.eu keynum=none type=host
name=ntp2.stormshieldcs.eu keynum=none type=host
100 code=00a00100 msg="Ok"
data:
description: last parsed command result
type: complex
sample: |
{'Result': [
{'name': 'ntp1.stormshieldcs.eu', 'keynum': 'none', 'type': 'host'},
{'name': 'ntp2.stormshieldcs.eu', 'keynum': 'none', 'type': 'host'}
]}
'''
import re
from stormshield.sns.sslclient import SSLClient
from ansible.module_utils.basic import AnsibleModule
def main():
module = AnsibleModule(
argument_spec={
"command": {"required": False, "type": "str"},
"script": {"required": False, "type": "str"},
"expect_disconnect": {"required": False, "type":"bool", "default":False},
"force_modify": {"required": False, "type":"bool", "default":False},
"appliance": {
"required": True, "type": "dict",
"options": {
"host": {"required": True, "type": "str"},
"ip": {"required": False, "type": "str"},
"port": {"required": False, "type": "int", "default": 443},
"user": {"required": False, "type": "str", "default": "admin"},
"password": {"required": False, "type": "str"},
"sslverifypeer": {"required": False, "type": "bool", "default": True},
"sslverifyhost": {"required": False, "type": "bool", "default": True},
"cabundle": {"required": False, "type": "str"},
"usercert": {"required": False, "type": "str"},
"proxy": {"required": False, "type": "str"},
}
}
}
)
EMPTY_RE = re.compile(r'^\s*$')
command = module.params['command']
script = module.params['script']
expect_disconnect = module.params['expect_disconnect']
force_modify = module.params['force_modify']
if command is None and script is None:
module.fail_json(msg="A command or a script is required")
if command is not None and script is not None:
module.fail_json(msg="Got both command and script")
try:
client = SSLClient(
host=module.params['appliance']['host'],
ip=module.params['appliance']['ip'],
port=module.params['appliance']['port'],
user=module.params['appliance']['user'],
password=module.params['appliance']['password'],
sslverifypeer=module.params['appliance']['sslverifypeer'],
sslverifyhost=module.params['appliance']['sslverifyhost'],
cabundle=module.params['appliance']['cabundle'],
usercert=module.params['appliance']['usercert'],
proxy=module.params['appliance']['proxy'],
autoconnect=False)
except Exception as exception:
module.fail_json(msg=str(exception))
try:
client.connect()
except Exception as exception:
module.fail_json(msg=str(exception))
if force_modify:
try:
response = client.send_command("MODIFY FORCE ON")
except Exception as exception:
client.disconnect()
module.fail_json(msg="Can't take Modify privilege: {}".format(str(exception)))
if response.ret >= 200:
client.disconnect()
module.fail_json(msg="Can't take Modify privilege", result=response.output,
data=response.parser.serialize_data(), ret=response.ret)
if command is not None:
# execute single command
try:
response = client.send_command(command)
except Exception as exception:
client.disconnect()
module.fail_json(msg=str(exception))
client.disconnect()
module.exit_json(changed=True, result=response.output,
data=response.parser.serialize_data(), ret=response.ret)
else:
# execute script
output = ""
success = True
need_reboot = False
for command in script.splitlines():
command = command.strip('\r\n')
output += command + "\n"
if command.startswith('#'):
continue
if EMPTY_RE.match(command):
continue
try:
response = client.send_command(command)
output += response.output + "\n"
if response.ret >= 200:
success = False
elif response.ret == client.SRV_RET_MUSTREBOOT:
need_reboot = True
except Exception as exception:
if expect_disconnect and str(exception) == "Server disconnected":
break
else:
client.disconnect()
module.fail_json(msg=str(exception), output=output, success=False, need_reboot = need_reboot)
client.disconnect()
if success:
module.exit_json(changed=True, output=output, success=True, need_reboot = need_reboot)
else:
module.fail_json(msg="Errors during the script execution", output=output, success=False, need_reboot = need_reboot)
if __name__ == '__main__':
main()
| [
"remi.pauchet@stormshield.eu"
] | remi.pauchet@stormshield.eu |
ed52f2978ed7fd109f6361a4a2a3228b37559c87 | 09f09b393b8c909a22bbb84a21a50e007e35556e | /control.py | d576303e30aa86b271f597b6d2cd13a0a71b4a5d | [] | no_license | Henry-Hwang/audio-tools | abd61a0cf865ebeba3c42c40493efc7d22e35ffe | b19aac94b94577ca1ef0951cbbd4f83ab08ab3d3 | refs/heads/master | 2020-03-22T21:58:54.831972 | 2018-08-19T15:48:47 | 2018-08-19T15:48:47 | 140,726,006 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 863 | py | import os
import sys
import commands
import time
import argparse
import tparser
import tinycmd
from decimal import Decimal
class Codec(object):
name = ""
def __init__(self):
pass
def get_codecs(self):
cmdstr = "adb shell cat /d/asoc/codecs"
print cmdstr
result = os.popen(cmdstr)
ret = result.read()
codecs = ret.split('\n')
self.codecs = codecs
#asoc_t = Asoc()
for i in range(len(codecs)):
print self.codecs[i]
#result = os.popen("adb shell cat " + paths[i])
#ret = result.read()
#if (ret.strip() != "closed"):
# print ret
def get_snd_cards(self):
cmdstr = "adb shell cat /proc/asound/cards"
print cmdstr
result = os.popen(cmdstr)
ret = result.read()
snds = ret.split('\n')
self.snd_cards.append(snds[1].strip())
for i in range(len(self.snd_cards)):
print self.snd_cards[i]
def get_dais(self):
pass
| [
"henry.huang@cirrus.com"
] | henry.huang@cirrus.com |
17d4ac7a0625e913523b131d25040cd6fe5c1260 | 8b64dba83a0f1bedf713faa0dcd96a218c80af08 | /app/requirement/migrations/0001_initial.py | d8bd78c43fe4041130960411584382bf4142437c | [] | no_license | chrxr/studyplan-test | 818a669dd85e74dd92393c5d006e2446c16f83b1 | 791b9400302d61f65cc37e9f3912cbcc9d4c041f | refs/heads/main | 2023-02-27T02:45:55.565870 | 2021-01-29T13:35:21 | 2021-01-29T13:35:21 | 329,410,529 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 491 | py | # Generated by Django 3.1.5 on 2021-01-13 19:33
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Requirement',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
],
),
]
| [
"chrxr@outlook.com"
] | chrxr@outlook.com |
6cf223258093e6753c9230a394a1b18bb4ec23de | 6f7474a0e586e5aba19fef5ad1d02ba35f596cc7 | /exam_16_26/p_know_19.py | 0edd5f5feb8290f12cac42036535c777ba7c2003 | [] | no_license | dainioska/python-opencv | 690b71bf843d9e486a3a7152fe733e97720e86ff | 12c2baa67a9b462448233a1569b9d43fef60e139 | refs/heads/main | 2023-04-01T16:23:27.588108 | 2021-04-15T15:54:03 | 2021-04-15T15:54:03 | 343,664,442 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 399 | py | #matplotlib filtering
import cv2
import numpy as np
from matplotlib import pyplot as plt
img = cv2.imread('samples/smarties.png')
img = cv2.cvtColor(img,cv2.COLOR_BGR2RGB)
titles = ['image']
images = [img]
for i in range(1):
plt.subplot(1, 1, i+1), plt.imshow(images[i], 'gray')
plt.title(titles[i])
plt.xticks([]), plt.yticks([])
plt.show()
cv2.waitKey(0)
cv2.destroyAllWindows()
| [
"dainioshka@gmail.com"
] | dainioshka@gmail.com |
b8d7b70e9e7650a1684d15ecd33a819b51bcfc93 | 9a425f153816cd206451876b8da570a9446d76c4 | /reservations/admin.py | 0a1968bae83ee5649193989b193a5268f3beb9cb | [] | no_license | salhi100/airbnb-clone-2 | cea837f8dd24ab4e634ff7e2f0ce474581313642 | 685477bedc6fed18e40fb53febf22e4393796f4f | refs/heads/master | 2023-01-31T11:31:37.190244 | 2020-01-09T04:53:35 | 2020-01-09T04:53:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 500 | py | from django.contrib import admin
from . import models
@admin.register(models.Reservation)
class ReservationAdmin(admin.ModelAdmin):
""" Reservation Admin Definition """
list_display = (
"room",
"status",
"check_in",
"check_out",
"guest",
"in_progress",
"is_finished",
)
list_filter = ("status",)
@admin.register(models.BookedDay)
class BookedDayAdmin(admin.ModelAdmin):
list_display = ('day', 'reservation')
| [
"53186618+hanulbom@users.noreply.github.com"
] | 53186618+hanulbom@users.noreply.github.com |
cc27af1c9bbe4c95be19d7ed9110de8cba144d7d | ad21a8962f2e375cffbfb5cd3e228357c145d95e | /isbn_calculator/test_isbn_calculator.py | 1593f0195b8330a811f9753947b3a4ec74792b33 | [] | no_license | sabrown89/python-practice-problems | 2301c93486ec1c04ded562c635090eb1f4705c94 | 678dcc04f8b1e481b8d4d8a5d4633a9a6d434ead | refs/heads/master | 2022-11-22T02:55:58.808825 | 2021-08-11T16:12:34 | 2021-08-11T16:12:34 | 117,746,775 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,059 | py | import hiker
def test_remove_hyphens_and_spaces():
douglas = hiker.Hiker('978-0-13- 149505- 0')
assert douglas.isbn == '9780131495050'
def test_isbn_is_thirteen_digits():
douglas = hiker.Hiker('978-0-13- 149505- 0')
assert douglas._isbn_is_correct_number_of_digits() is True
def test_isbn_is_correct_number_digits_is_false_if_not_all_digits():
douglas = hiker.Hiker('978-0-1A- 149505- 0')
assert douglas._isbn_is_correct_number_of_digits() is False
def test_isbn_is_correct_number_of_digits_is_false_if_not_length_thirteen():
douglas = hiker.Hiker('97- 149505- 0')
assert douglas._isbn_is_correct_number_of_digits() is False
def test_is_isbn_thirteen_returns_false():
douglas = hiker.Hiker('97- 149505- 0')
assert douglas.is_isbn() is False
def test_calculate_sums():
douglas = hiker.Hiker('978-0-13- 149505- 4')
assert douglas._calculate_sums_for_isbn_thirteen() == 100
def test_check_digit():
douglas = hiker.Hiker('978-0-13- 149505- 4')
assert douglas._check_digit() == 4
def test_is_isbn_thirteen_true():
douglas = hiker.Hiker('978-0-13- 149505- 0')
assert douglas.is_isbn() is True
def test_is_isbn_thirteen_true_another_number():
douglas = hiker.Hiker('978-0596809485')
assert douglas.is_isbn() is True
def test_isbn_calculator():
douglas = hiker.Hiker('978-0596809485')
assert douglas._isbn_calculator() == 5
def test_isbn_calculator_another():
douglas = hiker.Hiker('978-0-13- 149505- 0')
assert douglas._isbn_calculator() == 0
def test_isbn_ten_is_valid():
douglas = hiker.Hiker('0471958697', 10)
assert douglas.is_isbn() is True
def test_valid_isbn_standard_invalid():
douglas = hiker.Hiker('0471958697', 5)
assert douglas._valid_isbn_standard() is False
def test_valid_isbn_standard_valid_10():
douglas = hiker.Hiker('047195869')
assert douglas._valid_isbn_standard() is True
def test_valid_isbn_standard_valid_13():
douglas = hiker.Hiker('0471958697', 13)
assert douglas._valid_isbn_standard() is True
| [
"scottabrown89@gmial.com"
] | scottabrown89@gmial.com |
4f188c997813865f09a503591890d02c54d164c7 | 3db0c85f582dafd3b6c16543275953a2e22f1276 | /graph.py | b186c22bf45d1d61126a1859b4493c139b526f10 | [
"MIT"
] | permissive | BartMassey/nb-misc | 005ee943f962a05a6c340f8109792dd00b1de077 | ec8f6fdba200fcb4816e170c1517899f1c03db04 | refs/heads/master | 2021-01-01T16:40:34.234512 | 2014-09-02T18:46:19 | 2014-09-02T18:46:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 487 | py | # Copyright © 2014 Bart Massey
# Markable graph node class.
class Node(object):
def __init__(self, label):
self.label = label
self.marked = False
def clear_mark(self):
self.marked = False
def set_mark(self):
self.marked = True
def is_marked(self):
return self.marked
class Graph(object):
def __init__(self, nodes, edges, weights):
self.nodes = nodes
self.edges = edges
self.weights = weights
| [
"bart@cs.pdx.edu"
] | bart@cs.pdx.edu |
55d9f6399b04b0ce86ed81f5a3b8c384235039dd | 02be54b8a9ab6813274ae18feb428d0d1405b0b0 | /products/widgets.py | 85945a02257c7670ba7e55d443f81a90674423f4 | [] | no_license | paulloy/msp4-brazen-mma | 8ee98e2f2b12a4a24b14d30c1456233b02457dcd | 33a430d6c81bb44525469bcee1beaee7c92b3f58 | refs/heads/master | 2023-03-27T13:29:12.825997 | 2021-03-31T03:30:46 | 2021-03-31T03:30:46 | 341,317,874 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 362 | py | from django.forms.widgets import ClearableFileInput
from django.utils.translation import gettext_lazy as _
class CustomClearableFileInput(ClearableFileInput):
clear_checkbox_label = ('Remove')
initial_text = ('Current Image')
input_text = _('')
template_name = 'products/custom_widget_templates' + \
'/custom_clearable_file_input.html'
| [
"paulloy020896@gmail.com"
] | paulloy020896@gmail.com |
3862e819cf78b3797b247c2104e6c3624d5586fa | d1c4ea86a7148ceb4068c27ba6b51cedd3abd46f | /Natural_Language_Process/Lab1/src/utils/pre-treat.py | f7a50c81777b0241b55c07249f9ac23ecc78e9cc | [] | no_license | zirui-HIT/HIT_Lab | dfbf756e7951e4bf65ad96b561f6fb0c5d88dc59 | e74550450d7b7362bd873a613f28208401a26c45 | refs/heads/main | 2023-06-05T14:00:09.423997 | 2021-07-02T06:34:38 | 2021-07-02T06:34:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,247 | py | from copy import deepcopy
def simplify_data(data_path: str, save_path: str):
target = open(save_path, 'w')
with open(data_path) as f:
for line in f:
words = line.split()
for i in range(len(words)):
target.write((words[i].split('/'))[0].strip('[').strip(']') +
' ')
target.write('\n')
def check_not_mark(word: str) -> bool:
marks = ["。", ",", "《", "》", "“", "”", "、", "!",
"?", "‘", "’", "(", ")", "[", "]", ";", ":"]
if word in marks:
return False
return True
def get_dictionary(data_path: str, dictionary_path: str):
single_dictionary = []
double_dictionary = []
single_cnt = {}
double_cnt = {}
with open(data_path, encoding='utf-8') as f:
for line in f:
words = line.split()
for i in range(len(words)):
single_dictionary.append(
(words[i].split('/'))[0].strip('[').strip(']'))
last = len(single_dictionary) - 1
if i != 0:
pre_word = deepcopy(single_dictionary[last - 1])
current_word = deepcopy(single_dictionary[last])
if check_not_mark(pre_word) and check_not_mark(
current_word):
double_dictionary.append(pre_word + ' ' + current_word)
for w in single_dictionary:
single_cnt[w] = single_cnt.get(w, 0) + 1
for w in double_dictionary:
double_cnt[w] = double_cnt.get(w, 0) + 1
single_dictionary = list(set(single_dictionary))
single_dictionary = sorted(single_dictionary)
double_dictionary = list(set(double_dictionary))
double_dictionary = sorted(double_dictionary)
with open(dictionary_path, 'w') as f:
for w in single_dictionary:
f.write('1' + ' ' + str(single_cnt[w]) + ' ' + w + '\n')
for w in double_dictionary:
f.write('2' + ' ' + str(double_cnt[w]) + ' ' + w + '\n')
if __name__ == '__main__':
# simplify_data('../data/199801_seg&pos.txt', '../result/simplified.txt')
get_dictionary('../data/199801_seg&pos.txt', '../result/dic.txt')
| [
"WDZRMPCBIT@163.com"
] | WDZRMPCBIT@163.com |
d510b163bd7b37d3020df5d1bb6e1dca90b0d1ce | a879480e8ecff79be624df9d6f67d49c90448672 | /src/webjob/log_history_manager.py | 9f707cf56282b0a3f5e56c856d3672d1066cd71b | [] | no_license | fortesinformatica/SIGA | 49299696e5de14a1e4fa8e31ef5d8529e2711ded | 2b2473c7880becf4f47ea36925f09660379e5eb8 | refs/heads/master | 2022-07-27T16:56:10.499679 | 2019-06-17T13:22:18 | 2019-06-17T13:22:18 | 192,342,983 | 1 | 1 | null | 2021-03-25T22:41:08 | 2019-06-17T12:30:18 | Python | UTF-8 | Python | false | false | 815 | py | """
LogHistoryManager
=====
Prover
1. Gerenciamento da quantidade de log mantidos no historico
2. Remoção dos log mais antigos
Como usar
Adicione este script como um WebJob Triggered no Azure App Service "siga-api" com a seguinte expressão CRON:
0 0 0 * * *
"""
import os
import sys
sys.path.append(os.getenv("APP_ROOT"))
sys.path.append(os.getenv("APP_SITEPACKAGES"))
days_ago = int(os.getenv("PERIOD_LOG_IN_DAYS"))
log_directory = os.getenv("APP_LOG")
if os.path.exists(log_directory):
files = os.listdir(log_directory)
files.sort()
number_of_files = len(files)
if number_of_files > days_ago:
excess = number_of_files - days_ago
to_remove = files[:excess]
for file in to_remove:
os.remove(log_directory + file)
print(files)
| [
"ronaldox2@gmail.com"
] | ronaldox2@gmail.com |
88919fdc89c292d3a1afd47cf1b67122a3121bdf | 4957dd5d3583552f1ebc06f8125c10753383e81c | /protocols/Plate_Cells_Trough/Plate_Cells_Trough_2017-10-25.ot1.py | 70480af1f758312124889a18e41a44ccae577152 | [] | no_license | glebkuznetsov/Protocols | 4072eb4bb1bf18e9324fa3bada953b0655db2f4d | 0e3e9b03f1616c695f9e8e843dc5f9e3a59d1619 | refs/heads/master | 2020-03-27T05:43:11.285730 | 2018-05-23T21:33:58 | 2018-05-23T21:33:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,618 | py | from opentrons import containers, instruments
trough = containers.load('trough-12row', 'A1')
trash = containers.load('trash-box', 'B1')
tiprack = containers.load('tiprack-200ul', 'C1')
plate1 = containers.load('96-deep-well', 'D1')
plate2 = containers.load('96-deep-well', 'E1')
plate3 = containers.load('96-deep-well', 'A2')
plate4 = containers.load('96-deep-well', 'B2')
plate5 = containers.load('96-deep-well', 'C2')
plate6 = containers.load('96-deep-well', 'D2')
plate7 = containers.load('96-deep-well', 'E2')
all_plates = [plate1, plate2, plate3, plate4, plate5, plate6, plate7]
p50multi = instruments.Pipette(
axis='a',
name='p50',
max_volume=50,
min_volume=5,
channels=8,
tip_racks=[tiprack],
trash_container=trash)
media = trough.wells(0, length=6)
cells = trough.wells('A8')
def run_custom_protocol(number_of_plates: int=7):
plates = all_plates[0:number_of_plates]
tube_vol = 20000
media_vol = 80
media_vol_per_plate = 96*media_vol
media_tube = 0
cell_vol = 20
p50multi.pick_up_tip()
for plate in plates:
tube_vol = tube_vol - 7680
if tube_vol <= media_vol_per_plate:
tube_vol = 20000
media_tube += 1
p50multi.distribute(
media_vol,
media[media_tube],
plate.rows(),
new_tip='never')
p50multi.drop_tip()
p50multi.pick_up_tip()
p50multi.mix(5, p50multi.max_volume)
for plate in plates:
p50multi.distribute(
cell_vol,
cells,
plate.rows(), new_tip='never')
p50multi.drop_tip()
| [
"laura@opentrons.com"
] | laura@opentrons.com |
f576f6b14129bdf18f73c27796621f6de5c53ec6 | 9613fbaea6fab62623b3c8f11d205a28a959c2c3 | /25-刘杰-北京/第七周/ransac_manual.py | c57ea23fe2f2d42e96a83457a4947a7ca551a7d6 | [] | no_license | strongerfly/badou-Turing | 4eeae21a5ebf1fdd4df1ffe1156958f3fa939473 | 3262f01ccc64e4cbf66be2fd43ec437eb80c8663 | refs/heads/main | 2023-07-25T21:09:38.696925 | 2021-09-06T14:09:53 | 2021-09-06T14:09:53 | 378,666,594 | 1 | 0 | null | 2021-06-20T14:34:54 | 2021-06-20T14:34:54 | null | UTF-8 | Python | false | false | 2,572 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
'''
@Project :badou-Turing
@File :ransac_manual.py
@Author :luigi
@Date :2021/8/2 下午4:02
'''
import numpy as np
import matplotlib.pyplot as plt
class liner():
"""定义线性类"""
# 通过最小二乘法拟合线性关系
def fit(self, data):
x = data[0]
y = data[1]
A = np.vstack([x, np.ones(len(x))]).T
self.m, self.c = np.linalg.lstsq(A, y, rcond=None)[0]
return self.m, self.c
# 线性预估
def predict(self, X):
return X * self.m + self.c
def ransac(data, model, sample_number, epoch, threshold):
""" ransac算法实现
:param data: 数据集
:type data: np.ndarray
:param model: 模型
:type model: class type
:param sample_number: 随机采样样本数
:type sample_number: int
:param epoch: 迭代次数
:type epoch: int
:param threshold: 判断内群的阈值
:type threshold: int
:return: 模型
:rtype: class type
"""
max = 0
target = None
for i in range(epoch):
# 根据参数sample_number,选择k个随机点作为内群
dataIndex = np.arange(data.shape[0])
dataIndexRandomk = np.random.choice(dataIndex, sample_number)
dataRandomK = data[dataIndexRandomk]
# 选取除k个随机点之外的所有点作为验证模型的数据点
# 方式一:通过list generation
# dataRandomExcept = data[[i for i in dataIndex if i not in dataIndexRandomk]]
# 方式二:通过numpy mask
mask = np.ones(data.shape[0], dtype=bool)
mask[dataIndexRandomk] = False
dataRandomExcept = data[mask]
valX = dataRandomExcept[:, 0]
valY = dataRandomExcept[:, 1]
# 模型拟合
model.fit(dataRandomK)
# 模型预估
predictY = model.predict(valX)
# 损失函数
cost = np.absolute(valY - predictY)
# 计算内群数
count = np.sum(cost <= threshold)
if count > max:
max = count
print('max is:{}'.format(count))
target = model
return target
def main():
model = liner()
data = np.random.randint(1, 100, (100, 2))
sample_k = 5
epoch = 10000
threshold = 5
ransac(data, model, sample_k, epoch, threshold)
x = data[:, 0]
y = data[:, 1]
plt.plot(x, y, 'o', label='original data', markersize=10)
plt.plot(x, model.m * x + model.c, 'r', label='fitter line')
plt.show()
if __name__ == '__main__':
main()
| [
"86213076+luigide2020@users.noreply.github.com"
] | 86213076+luigide2020@users.noreply.github.com |
6346b5baf6d9b590f6b07edcd2f1fef680b4fe6f | 32263e1e50b90ce7049661f85f515ae8b3f11120 | /answer4.py | 1ee21b24c5a4b68915a24f9f4b3a7c5b16233a2a | [] | no_license | harshitahluwalia7895/Thread_and_Processes | 7e926f211c75d5d700be28b29da77fb5e1b48182 | 3e7ad17b7ea2056b124e5bfd082f2ab39ce09552 | refs/heads/master | 2020-03-19T06:44:48.014423 | 2018-06-04T16:18:27 | 2018-06-04T16:18:27 | 136,051,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 232 | py | from threading import *
import time
import math
class abc(Thread):
def run(self):
m = int(input('Enter the Number to be Factorial:'))
print('The factorial of {} is '.format(m),math.factorial(m))
t=abc()
t.start() | [
"harshit.ahluwalia7895889924@gmail.com"
] | harshit.ahluwalia7895889924@gmail.com |
c383a0ab8b68f0818c9f72c933f6d128dad4b8a6 | 3de707e3e7f3fcbf46700e1bf8d6c394a71410a2 | /augpathlib/remotes.py | bc67fd0ba4611b6910d60347b5944246e289d464 | [
"MIT"
] | permissive | tmsincomb/augpathlib | 984f1c8418e7e8eaa5675a3c209cbd745cdee3e7 | ed9c0edff540741fca866780a3d043a3b7644f08 | refs/heads/master | 2022-11-17T12:03:13.852433 | 2020-06-29T10:10:23 | 2020-06-29T10:10:23 | 276,260,552 | 0 | 0 | MIT | 2020-07-01T02:47:31 | 2020-07-01T02:47:30 | null | UTF-8 | Python | false | false | 33,457 | py | import os
import sys
import atexit
import pathlib
import warnings
import subprocess
from augpathlib import exceptions as exc
from augpathlib.meta import PathMeta
from augpathlib import caches, LocalPath
from augpathlib.utils import _bind_sysid_, StatResult, cypher_command_lookup, log
if os.name != 'nt':
# pexpect on windows does not support pxssh
# because it is missing spawn
from pexpect import pxssh
class RemotePath:
""" Remote data about a remote object. """
_cache_class = None
_debug = False
# ugh this is such a bad implementation, let the remote paths exists
# and init, and then just check if they exist, a path is not an object
# that always dereferences ... what the heck was I thinking when I did this ...
# we use a PurePath becuase we still want to key off this being local path
# but we don't want any of the local file system operations to work by accident
# so for example self.stat should return the remote value not the local value
# which is what would happen if we used a PosixPath as the base class
# need a way to pass the session information in independent of the actual path
# abstractly having remote.data(global_id_for_local, self)
# should be more than enough, the path object shouldn't need
# to know that it has a remote id, the remote manager should
# know that
@classmethod
def _new(cls, local_class, cache_class):
""" when constructing a new remote using _new you MUST
call init afterward to bind the remote api """
# FIXME 1:1ness issue from local -> cache
# probably best to force the type of the cache
# to switch if there are multiple remote mappings
# since there can be only 1 local file with the same
# path, a composite cache or a multi-remote cache
# seems a bit saner, or require explicit switching of
# the active remote if one-at-a-time semantics are desired
newcls = type(cls.__name__,
(cls,),
dict(_local_class=local_class,
_cache_class=cache_class))
local_class._remote_class = newcls
local_class._cache_class = cache_class
cache_class._remote_class = newcls
cache_class._local_class = local_class
newcls.weighAnchor()
cache_class.weighAnchor()
return newcls
@classmethod
def init(cls, identifier):
""" initialize the api from an identifier and bind the root """
if not hasattr(cls, '_api'):
cls._api = cls._api_class(identifier)
cls.root = cls._api.root
else:
raise ValueError(f'{cls} already bound an api to {cls._api}')
@classmethod
def anchorToCache(cls, cache_anchor, init=True):
# FIXME need to check for anchor after init and init after anchor
if not hasattr(cls, '_cache_anchor'):
if init:
if not hasattr(cls, '_api'):
cls.init(cache_anchor.id)
if hasattr(cls, 'root') and cls.root != cache_anchor.id:
raise ValueError('root and anchor ids do not match! '
f'{cls.root} != {cache_anchor.id}')
cls._cache_anchor = cache_anchor
return cls._cache_anchor
else:
raise ValueError(f'already anchored to {cls._cache_anchor}')
@classmethod
def anchorTo(cls, path, create=False):
""" You already know the rock you want and
you want the anchor stuck to it. """
# FIXME should we fail on create=True and exists?
if isinstance(path, caches.CachePath):
# FIXME the non-existence problem rears its head again
return cls.anchorToCache(path)
elif isinstance(path, LocalPath):
# FIXME the non-existence problem rears its head again
if path.cache:
return cls.anchorToCache(path.cache)
else:
root = cls.root if isinstance(cls.root, cls) else cls(cls.root)
if path.name != root.name:
# unlike git you cannot clone to a folder with a different
# name (for now ... maybe can figure out how in the future)
raise ValueError('Path name and root name do not match.'
f'{path.name} != {cls.root.name}')
if create:
return cls.dropAnchor(path.parent) # existing folder dealt with in dropAnchor
else:
raise ValueError(f'not creating {path} since create=False')
else:
raise TypeError(f"Don't know how to anchor to a {type(path)} {path}")
@classmethod
def _get_local_root_path(cls, parent_path=None):
if parent_path is None:
parent_path = cls._local_class.cwd()
else:
parent_path = cls._local_class(parent_path)
root = cls(cls.root) # FIXME formalize the use of root
path = parent_path / root.name
return root, path
@classmethod
def smartAnchor(cls, parent_path=None):
# work around the suspect logic
# in the implementation below
try:
return cls.dropAnchor(parent_path=parent_path)
except exc.RemoteAlreadyAnchoredError as e:
root, path = cls._get_local_root_path(parent_path)
if cls._cache_anchor == path.cache:
return cls._cache_anchor
else:
raise e # possibly check if the anchor is the same?
except exc.CacheExistsError as e:
root, path = cls._get_local_root_path(parent_path)
cls._cache_anchor = path.cache
return cls._cache_anchor
except exc.DirectoryNotEmptyError as e:
root, path = cls._get_local_root_path(parent_path)
if path.cache:
cls._cache_anchor = path.cache
return cls._cache_anchor
else:
raise e
@classmethod
def dropAnchor(cls, parent_path=None):
""" If a _cache_anchor does not exist then create it,
otherwise raise an error. If a local anchor already
exists do not use this method.
You know that the ship (path) is more or less in the right
place but you don't know for sure exactly which rock the
anchor will catch on (you don't know the name of the remote).
"""
if not hasattr(cls, '_cache_anchor'):
root, path = cls._get_local_root_path(parent_path)
if not path.exists():
if root.is_file():
raise NotImplementedError(
'Have not implemented mapping for individual files yet.')
elif root.is_dir():
path.mkdir()
else:
raise NotImplementedError(f'What\'s a {root}?!')
elif list(path.children):
raise exc.DirectoryNotEmptyError(f'has children {path}')
cls._cache_anchor = path.cache_init(root.id, anchor=True)
# we explicitly do not handle the possible CacheExistsError here
# so that there is a path where anchoring can fail loudly
# we may not need that at the end of the day, but we will see
return cls._cache_anchor
else:
raise exc.RemoteAlreadyAnchoredError(f'{cls} already anchored to '
f'{cls._cache_anchor}')
@classmethod
def weighAnchor(cls):
# TODO determine whether the current behavior is correct
# calling this will not cause the cache class to weigh anchor
# but there is a small chance that it should
# TODO is _abstract_class needed here? or do we not need it
# because remote paths don't have the crazy hierarchy that
# pathlib derived paths do? and will this change when we fix
# everything ...
if hasattr(cls, '_cache_anchor'):
delattr(cls, '_cache_anchor')
@classmethod
def setup(cls, local_class, cache_class):
""" call this once to bind everything together """
cn = self.__class__.__name__
warnings.warn(f'{cn}.setup is deprecated please switch to RemotePath._new',
DeprecationWarning,
stacklevel=2)
cache_class.setup(local_class, cls)
def bootstrap(self, recursive=False, only=tuple(), skip=tuple(), sparse=tuple()):
#self.cache.remote = self # duh
# if you forget to tell the cache you exist of course it will go to
# the internet to look for you, it isn't quite smart enough and
# we're trying not to throw dicts around willy nilly here ...
return self.cache.bootstrap(self.meta, recursive=recursive, only=only, skip=skip, sparse=sparse)
def __init__(self, thing_with_id, cache=None):
if isinstance(thing_with_id, str):
id = thing_with_id
elif isinstance(thing_with_id, PathMeta):
id = thing_with_id.id
elif isinstance(thing_with_id, RemotePath):
id = thing_with_id.id
else:
raise TypeError(f'Don\'t know how to initialize a remote from {thing_with_id}')
self._id = id
if cache is not None:
self._cache = cache
self.cache._remote = self
self._errors = []
@property
def id(self):
return self._id
@property
def errors(self):
raise NotImplementedError
@property
def cache(self):
if hasattr(self, '_cache_anchor') and self._cache_anchor is not None:
return self._cache
else:
# cache is not real
class NullCache:
@property
def local(self, remote=self):
raise TypeError(f'No cache for {remote}')
@property
def _are_we_there_yet(self, remote=self):
# this is useless since these classes are ephemoral
if hasattr(remote, '_cache_anchor') and remote._cache_anchor is not None:
remote.cache_init()
def __rtruediv__(self, other):
return None
def __truediv__(self, other):
return None
return NullCache()
def cache_init(self, parents=False):
try:
return self._cache_anchor / self
except FileNotFoundError:
if parents:
#parent, *rest = self.parent.cache_init(parents=parents)
#return (self.cache_init(), parent, *rest)
parent = self.parent
parent_cache = parent.cache_init(parents=parents)
parent_cache.local.cache_init(parent.meta) # FIXME hrm we shouldn't have to do this
# and it isn't working anyway ... the xattrs don't seem to be getting set
return self.cache_init()
else:
raise
@property
def _cache(self):
""" To catch a bad call to set ... """
if hasattr(self, '_c_cache'):
return self._c_cache
@_cache.setter
def _cache(self, cache):
if not isinstance(cache, caches.CachePath):
raise TypeError(f'cache is a {type(cache)} not a CachePath!')
#elif cache.meta is None: # useful for certain debugging situations
#raise ValueError(f'cache has no meta {cache}')
self._c_cache = cache
def _cache_setter(self, cache, update_meta=True):
cache._remote = self
# FIXME in principle
# setting cache needs to come before update_meta
# in the event that self.meta is missing file_id
# if meta updater fails we unset self._c_cache
self._cache = cache
if update_meta:
try:
cache._meta_updater(self.meta)
except BaseException as e:
self._c_cache = None
delattr(self, '_c_cache')
raise e
@property
def parent_id(self):
""" BEWARE if self.parent hits the network so will this.
In the event that it does, overwrite this method. """
return self.parent.id
def _parent_changed(self, cache):
return self.parent_id != cache.parent.id
def _on_cache_move_error(self, error, cache):
""" called after a failure to move a cached file to a new location """
raise error
def update_cache(self, cache=None, fetch=True):
""" Update a cache object using the metadata attached to this remote.
This is different form _cache_setter in that it runs update_meta
by default, handles many more edge cases, and checks for consistency.
_cache_setter is usually invoked internally by a CachePath method that
wants to register itself with a remote as an implementaiton detail. """
if cache is not None and self.cache is not None:
# TODO see if there are any exceptions to this behavior
raise TypeError('cannot accept cache kwarg when self.cache not None')
elif cache is None:
cache = self.cache
parent_changed = self._parent_changed(cache)
if self.cache is None:
# HACK test if cache is not None before it may have been reassigned
if cache.name != self.name:
msg = ('Cannot update the name and content of a file at the '
'same time.\nAre you sure you have passed the right '
f'cache object?\n{cache.name} != {self.name}')
raise ValueError(msg)
elif parent_changed:
msg = ('Cannot update the parent and content of a file at the '
'same time.\nAre you sure you have passed the right '
f'cache object?\n{cache.parent.id} != {self.parent_id}')
raise ValueError(msg)
log.debug(f'maybe updating cache for {self.name}')
file_is_different = cache._meta_updater(self.meta, fetch=fetch)
# update the cache first # FIXME this may be out of order ...
# then move to the new name if relevant
# prevents moving partial metadata onto existing files
if cache.name != self.name or parent_changed: # this is localy correct
# the issue is that move is now smarter
# and will detect if a parent path has changed
try:
cache.move(remote=self)
except exc.WhyDidntThisGetMovedBeforeError as e:
# AAAAAAAAAAAAAAAAAAAAAAAAAAAAA
# deal with the sadness that is non-unique filenames
# I am 99.999999999999999% certain that users do not
# expect this behavior ...
log.error(e)
self._on_cache_move_error(e, cache)
return file_is_different
@property
def local(self):
return self.cache.local # FIXME there are use cases for bypassing the cache ...
@property
def local_direct(self):
# kind of uninstrumeted ???
return self._local_class(self.as_path())
@property
def anchor(self):
""" the semantics of anchor for remote paths are a bit different
RemotePath code expects this function to return a RemotePath
NOT a string as is the case for core pathlib. """
raise NotImplementedError
@property
def _meta(self): # catch stragglers
raise NotImplementedError
def refresh(self):
""" Refresh the local in memory metadata for this remote.
Implement actual functionality in your subclass. """
raise NotImplementedError
# could be fetch or pull, but there are really multiple pulls as we know
# clear the cached value for _meta
if hasattr(self, '_meta'):
delattr(self, '_meta')
@property
def data(self):
raise NotImplementedError
self.cache.id
for chunk in chunks:
yield chunk
@property
def meta(self):
# on blackfynn this is the package id or object id
# this will error if there is no implementaiton if self.id
raise NotImplementedError
#return PathMeta(id=self.id)
def _meta_setter(self, value):
raise NotImplementedError
@property
def annotations(self):
# these are models etc in blackfynn
yield from []
raise NotImplementedError
def as_path(self):
""" returns the relative path construction for the child so that local can make use of it """
return pathlib.PurePath(*self.parts)
def _parts_relative_to(self, remote, cache_parent=None):
parent_names = [] # FIXME massive inefficient due to retreading subpaths :/
# have a look at how pathlib implements parents
parent = self.parent
if parent != remote:
parent_names.append(parent.name)
# FIXME can this go stale? if so how?
#log.debug(cache_parent)
if cache_parent is not None and parent.id == cache_parent.id:
for c_parent in cache_parent.parents:
if c_parent is None:
continue
elif c_parent.name == remote.name: # FIXME trick to avoid calling id
parent_names.append(c_parent.name) # since be compare one earlier we add here
break
else:
parent_names.append(c_parent.name)
else:
for parent in parent.parents:
if parent == remote:
break
elif parent is None:
continue # value error incoming
else:
parent_names.append(parent.name)
else:
self._errors += ['file-deleted']
msg = f'{remote} is not one of {self}\'s parents'
log.error(msg)
#raise ValueError()
args = (*reversed(parent_names), self.name)
elif self == parent:
args = ('',)
else:
args = self.name,
return args
@property
def parts(self):
if self == self.anchor:
return tuple()
if not hasattr(self, '_parts'):
if self.cache:
cache_parent = self.cache.parent
else:
cache_parent = None
self._parts = tuple(self._parts_relative_to(self.anchor, cache_parent))
return self._parts
@property
def parent(self):
""" The atomic parent operation as understood by the remote. """
raise NotImplementedError
@property
def parents(self):
parent = self.parent
while parent:
yield parent
parent = parent.parent
@property
def children(self):
# uniform interface for retrieving remote hierarchies decoupled from meta
raise NotImplementedError
@property
def rchildren(self):
# uniform interface for retrieving remote hierarchies decoupled from meta
yield from self._rchildren()
def _rchildren(self, create_cache=True, sparse=False):
raise NotImplementedError
def children_pull(self, existing):
# uniform interface for asking the remote to
# update children using its own implementation
raise NotImplementedError
def iterdir(self):
# I'm guessing most remotes don't support this
raise NotImplementedError
def glob(self, pattern):
raise NotImplementedError
def rglob(self, pattern):
raise NotImplementedError
def __eq__(self, other):
return self.id == other.id
def __ne__(self, other):
return not self == other
def __repr__(self):
return f'{self.__class__.__name__}({self.id!r})'
class SshRemote(RemotePath, pathlib.PurePath):
""" Testing. To be used with ssh-agent.
StuFiS The stupid file sync. """
_cache_class = None # set when calling __new__
encoding = 'utf-8'
_meta = None # override RemotePath dragnet
_meta_maker = LocalPath._meta_maker
sysid = None
_bind_sysid = classmethod(_bind_sysid_)
@classmethod
def _new(cls, local_class, cache_class):
newcls = super()._new(local_class, cache_class)
# must run before we can get the sysid, which is a bit odd
# given that we don't actually sandbox the filesystem
newcls._bind_flavours()
return newcls
@classmethod
def _bind_flavours(cls, pos_helpers=tuple(), win_helpers=tuple()):
pos, win = cls._get_flavours()
if pos is None:
pos = type(f'{cls.__name__}Posix',
(*pos_helpers, cls, pathlib.PurePosixPath), {})
if win is None:
win = type(f'{cls.__name__}Windows',
(*win_helpers, cls, pathlib.PureWindowsPath), {})
cls.__abstractpath = cls
cls.__posixpath = pos
cls.__windowspath = win
@classmethod
def _get_flavours(cls):
pos, win = None, None
for subcls in cls.__subclasses__(): # direct only
if subcls._flavour is pathlib._posix_flavour:
pos = subcls
elif subcls._flavour is pathlib._windows_flavour:
win = subcls
else:
raise TypeError(f'unknown flavour for {cls} {cls._flavour}')
return pos, win
def __new__(cls, *args, **kwargs):
if not hasattr(cls, '_flavour'):
cls = cls.__windowspath if os.name == 'nt' else cls.__posixpath
if isinstance(args[0], str) and args[0].startswith(cls.host + ':'):
# FIXME not great but allows less verbose where possible ...
# also possibly an opportunity to check if hostnames match?
# ugh unix everything is a stream of bytes is annoying here
_, *args = (args[0].split(':', 1), *args[1:])
_self = pathlib.PurePath.__new__(cls, *args) # no kwargs since the only kwargs are for init
_self.remote_platform = _self._remote_platform
return _self
# TODO this isn't quite working yet due to bootstrapping issues as usual
# it also isn't working because we want access to all paths in many cases
# the root remains and the calculation against anchor remains for any
# relative path that is provided, and the default behavior for absolute
# paths protects us from sillyness
if _self.id != cls.root: #_cache_anchor.id:
self = _self.relative_to(_self.anchor)
else:
self = pathlib.PurePath.__new__(cls, '.') # FIXME make sure this is interpreted correctly ...
self._errors = []
return self
@classmethod
def init(cls, host_path):
""" should only be invoked after _new has bound local and cache classes """
if not hasattr(cls, '_anchor'):
cls.root = host_path # I think this is right ...
host, path = host_path.split(':', 1)
if not hasattr(cls, '_flavour'):
cls = cls.__windowspath if os.name == 'nt' else cls.__posixpath
cls._anchor = pathlib.PurePath.__new__(cls, path)
session = pxssh.pxssh(options=dict(IdentityAgent=os.environ.get('SSH_AUTH_SOCK')))
session.login(host, ssh_config=LocalPath('~/.ssh/config').expanduser().as_posix())
cls._rows = 200
cls._cols = 200
session.setwinsize(cls._rows, cls._cols) # prevent linewraps of long commands
session.prompt()
atexit.register(lambda:(session.sendeof(), session.close()))
cls.host = host
cls.session = session
cls._uid, *cls._gids = [int(i) for i in (cls._ssh('echo $(id -u) $(id -G)')
.decode().split(' '))]
else:
raise ValueError(f'{cls} already bound an remote to {cls._anchor}')
@classmethod
def anchorToCache(cls, cache_anchor, init=True):
anchor = super().anchorToCache(cache_anchor=cache_anchor, init=init)
# _cache_anchor has to be bound for _bind_sysid to work
# that binding happens after init so we do this here
cls._bind_sysid()
return anchor
def __init__(self, thing_with_id, cache=None):
if isinstance(thing_with_id, pathlib.PurePath):
thing_with_id = thing_with_id.as_posix()
super().__init__(thing_with_id, cache=cache)
@property
def anchor(self):
return self._anchor
#return self._cache_anchor.remote
# FIXME warning on relative paths ...
# also ... might be convenient to allow
# setting non-/ anchors, but perhaps for another day
#return self.__class__('/', host=self.host)
@property
def id(self):
return f'{self.host}:{self.rpath}'
#return self.host + ':' + self.as_posix() # FIXME relative to anchor?
@property
def cache_key(self):
""" since some systems have compound ids ... """
raise NotImplementedError
@property
def rpath(self):
# FIXME relative paths when the anchor is set differently
# the anchor will have to be stored as well since there coulde
# be many possible anchors per host, thus, if an anchor relative
# identifier is supplied then we need to construct the full path
# conveniently in this case if self is a fully rooted path then
# it will overwrite the anchor path
# TODO make sure that the common path is the anchor ...
return (self.anchor / self).as_posix()
def _parts_relative_to(self, remote, cache_parent=None):
if remote == self.anchor:
# have to build from self.anchor._parts because it is the only
# place the keeps the original parts
remote = pathlib.PurePath(*self.anchor._parts)
return self.relative_to(remote).parts
def refresh(self):
# TODO probably not the best idea ...
raise NotImplementedError('This baby goes to the network every single time!')
def access(self, mode):
""" types are 'read', 'write', and 'execute' """
try:
st = self.stat()
except (PermissionError, FileNotFoundError) as e:
return False
r, w, x = 0x124, 0x92, 0x49
read = ((r & st.st_mode) >> 2) & (mode == 'read' or mode == os.R_OK) * x
write = ((w & st.st_mode) >> 1) & (mode == 'write' or mode == os.W_OK) * x
execute = (x & st.st_mode) & (mode == 'execute' or mode == os.X_OK) * x
current = read + write + execute
u, g, e = 0x40, 0x8, 0x1
return (u & current and st.st_uid == self._uid or
g & current and st.st_gid in self._gids or
e & current)
def open(self, mode='wt', buffering=-1, encoding=None,
errors=None, newline=None):
if mode not in ('wb', 'wt'):
raise TypeError('only w[bt] mode is supported') # TODO ...
#breakpoint()
return
class Hrm:
session = self.session
def write(self, value):
self.session
#cmd = ['ssh', self.host, f'"cat - > {self.rpath}"']
#self.session
#p = subprocess.Popen()
@property
def data(self):
cmd = ['scp', self.id, '/dev/stdout']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
while True:
data = p.stdout.read(4096) # TODO hinting
if not data:
break
yield data
p.communicate()
# reuse meta from local
# def meta (make it easier to search for this)
meta = LocalPath.meta # magic
#def _ssh(self, remote_cmd):
@classmethod
def _ssh(cls, remote_cmd):
#print(remote_cmd)
if len(remote_cmd) > cls._cols:
raise exc.CommandTooLongError
n_bytes = cls.session.sendline(remote_cmd)
cls.session.prompt()
raw = cls.session.before
out = raw[n_bytes + 1:].strip() # strip once here since we always will
#print(raw)
#print(out)
return out
@property
def _remote_platform(self):
remote_cmd = "uname -a | awk '{ print tolower($1) }'"
return self._ssh(remote_cmd).decode(self.encoding)
@property
def cypher_command(self):
# this one is a little backwards, because we can control
# whatever cypher we want, unlike in other cases
return cypher_command_lookup[self._cache_class.cypher]
def checksum(self):
remote_cmd = (f'{self.cypher_command} {self.rpath} | '
'awk \'{ print $1 }\';')
hex_ = self._ssh(remote_cmd).decode(self.encoding)
log.debug(hex_)
return bytes.fromhex(hex_)
def _stat_cmd(self, stat_format=StatResult.stat_format, path=None):
# TODO use _stat_format_darwin for cases where gstat is missing
cmd = 'gstat' if self.remote_platform == 'darwin' else 'stat'
if path is None:
path = self.rpath
if path == '':
_path = path
else:
_path = f' "{path}"'
return f'{cmd} -c {stat_format}{_path}'
def stat(self):
remote_cmd = self._stat_cmd()
out = self._ssh(remote_cmd)
try:
return StatResult(out)
except ValueError as e:
if out.endswith(b'Permission denied'):
raise PermissionError(out.decode())
elif out.endswith(b'No such file or directory'):
raise FileNotFoundError(out.decode())
else:
log.error(remote_cmd)
raise ValueError(out) from e
def exists(self):
try:
st = self.stat()
return bool(st) # FIXME
except FileNotFoundError: # FIXME there will be more types here ...
pass
@property
def __parent(self): # no longer needed since we inherit from path directly
# because the identifiers are paths if we move
# file.ext to another folder, we treat it as if it were another file
# at least for this SshRemote path, if we move a file on our end
# the we had best update our cache
# if someone else moves the file on the remote, well, then
# that file simply vanishes since we weren't notified about it
# if there is a remote transaction log we can replay if there isn't
# we have to assume the file was deleted or check all the names and
# hashes of new files to see if it has moved (and not been changed)
# a move and change without a sync will be bad for us
# If you have an unanchored path then resolve()
# always operates under the assumption that the
# current working directory which I think is incorrect
# as soon as you start passing unresolved paths around
# the remote system doesn't know what context you are in
# so we need to fail loudly
# basically force people to manually resolve their paths
return self.__class__(self.cache.parent) # FIXME not right ...
def is_dir(self):
remote_cmd = self._stat_cmd(stat_format="%F")
out = self._ssh(remote_cmd)
return out == b'directory'
def is_file(self):
remote_cmd = self._stat_cmd(stat_format="%F")
out = self._ssh(remote_cmd)
return out == b'regular file'
@property
def children(self):
# this is amusingly bad, also children_recursive ... drop the maxdepth
#("find ~/files/blackfynn_local/SPARC\ Consortium -maxdepth 1 "
#"-exec stat -c \"'%n' %o %s %W %X %Y %Z %g %u %f\" {} \;")
# chechsums when listing children? maybe ...
#\"'%n' %o %s %W %X %Y %Z %g %u %f\"
if self.is_dir():
# no children if it is a file sadly
remote_cmd = (f"cd {self.rpath};"
f"{self._stat_cmd(path='')} {{.,}}*;"
"echo '----';"
f"{self.cypher_command} {{.,}}*;" # FIXME fails on directories destroying alignment
'cd "${OLDPWD}"')
out = self._ssh(remote_cmd)
stats, checks = out.split(b'\r\n----\r\n')
#print(stats)
stats = {sr.name:sr for s in stats.split(b'\r\n')
for sr in (StatResult(s),)}
checks = {fn:bytes.fromhex(cs) for l in checks.split(b'\r\n')
if not b'Is a directory' in l
for cs, fn in (l.decode(self.encoding).split(' ', 1),)}
return stats, checks # TODO
def _mkdir_child(self, child_name):
raise NotImplementedError('implement in subclass and/or fix instantiation/existence issues')
def __repr__(self):
return f'{self.__class__.__name__}({self.rpath!r}, host={self.host!r})'
SshRemote._bind_flavours()
| [
"tgbugs@gmail.com"
] | tgbugs@gmail.com |
09c0fefdd010970f39b250148bf0b0160b5f65a1 | a00fdfc743262d3d9253bab1f2e8b10f99f013ee | /Bambu/bambuToNero.py | 88f058034181c1d5bdb4ff97c5bcf43358b2fc8b | [] | no_license | pdoming/NeroProducer | 2a97101002c626d7f23f3c80e1abfaacc5c81968 | 8082361fa0a05c83cc6c6aacb1bdd5de24f65115 | refs/heads/master | 2021-01-15T17:35:58.814592 | 2015-07-25T16:50:39 | 2015-07-25T16:50:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,400 | py | from MitAna.TreeMod.bambu import mithep, analysis
import os
mitdata = os.environ['MIT_DATA']
from MitPhysics.Mods.GoodPVFilterMod import goodPVFilterMod
from MitPhysics.Mods.JetCorrectionMod import jetCorrectionMod
from MitPhysics.Mods.JetIdMod import jetIdMod
from MitPhysics.Mods.MetCorrectionMod import metCorrectionMod
from MitPhysics.Mods.PFTauIdMod import pfTauIdMod
pfTauIdMod.AddCutDiscriminator(mithep.PFTau.kDiscriminationByRawCombinedIsolationDBSumPtCorr3Hits, 5., False)
from MitPhysics.Mods.ElectronIdMod import electronIdMod
from MitPhysics.Mods.MuonIdMod import muonIdMod
from MitPhysics.Mods.PhotonIdMod import photonIdMod
from MitPhysics.Mods.SeparatePileUpMod import separatePileUpMod
generatorMod = mithep.GeneratorMod(
IsData = False,
CopyArrays = False,
MCMETName = "GenMet"
)
electronTightId = electronIdMod.clone('ElectronTightId',
IsFilterMode = False,
InputName = electronIdMod.GetOutputName(),
OutputName = 'TightElectronId',
IdType = mithep.ElectronTools.kPhys14Tight,
IsoType = mithep.ElectronTools.kPhys14TightIso
)
muonTightId = muonIdMod.clone('MuonTightId',
IsFilterMode = False,
InputName = muonIdMod.GetOutputName(),
OutputName = 'TightMuonId',
IdType = mithep.MuonTools.kMuonPOG2012CutBasedIdTight,
IsoType = mithep.MuonTools.kPFIsoBetaPUCorrected
)
muonTightIdMask = mithep.MaskCollectionMod('TightMuons',
InputName = muonIdMod.GetOutputName(),
MaskName = muonTightId.GetOutputName(),
OutputName = 'TightMuons'
)
fatJetCorrectionMod = mithep.JetCorrectionMod('FatJetCorrection',
InputName = 'AKt8PFJetsCHS',
CorrectedJetsName = 'CorrectedFatJets',
RhoAlgo = mithep.PileupEnergyDensity.kFixedGridFastjetAll
)
if analysis.isRealData:
fatJetCorrectionMod.AddCorrectionFromFile(mitdata + "/74X_dataRun2_Prompt_v1_L1FastJet_AK8PFchs.txt")
fatJetCorrectionMod.AddCorrectionFromFile(mitdata + "/74X_dataRun2_Prompt_v1_L2Relative_AK8PFchs.txt")
fatJetCorrectionMod.AddCorrectionFromFile(mitdata + "/74X_dataRun2_Prompt_v1_L3Absolute_AK8PFchs.txt")
fatJetCorrectionMod.AddCorrectionFromFile(mitdata + "/74X_dataRun2_Prompt_v1_L2L3Residual_AK8PFchs.txt")
else:
fatJetCorrectionMod.AddCorrectionFromFile(mitdata + "/MCRUN2_74_V9_L1FastJet_AK8PFchs.txt")
fatJetCorrectionMod.AddCorrectionFromFile(mitdata + "/MCRUN2_74_V9_L2Relative_AK8PFchs.txt")
fatJetCorrectionMod.AddCorrectionFromFile(mitdata + "/MCRUN2_74_V9_L3Absolute_AK8PFchs.txt")
fatJetIdMod = jetIdMod.clone('FatJetId',
InputName = fatJetCorrectionMod.GetOutputName(),
OutputName = 'GoodFatJets',
MVATrainingSet = mithep.JetIDMVA.nMVATypes
)
photonMediumId = photonIdMod.clone('PhotonMediumId',
IsFilterMode = False,
InputName = photonIdMod.GetOutputName(),
OutputName = 'PhotonMediumId',
IdType = mithep.PhotonTools.kPhys14Medium,
IsoType = mithep.PhotonTools.kPhys14Medium
)
photonTightId = photonMediumId.clone('PhotonTightId',
OutputName = 'PhotonTightId',
IdType = mithep.PhotonTools.kPhys14Tight,
IsoType = mithep.PhotonTools.kPhys14Tight
)
head = 'HEAD'
tag = 'BAMBU_041'
fillers = []
fillers.append(mithep.nero.EventFiller(
RhoAlgo = mithep.PileupEnergyDensity.kFixedGridFastjetAll
))
fillers.append(mithep.nero.VertexFiller(
VerticesName = goodPVFilterMod.GetOutputName()
))
fillers.append(mithep.nero.JetsFiller(
JetsName = jetIdMod.GetOutputName(),
VerticesName = goodPVFilterMod.GetOutputName(),
JetIDMVA = jetIdMod.GetJetIDMVA()
))
fillers.append(mithep.nero.TausFiller(
TausName = pfTauIdMod.GetOutputName()
))
fillers.append(mithep.nero.LeptonsFiller(
ElectronsName = electronIdMod.GetOutputName(),
MuonsName = muonIdMod.GetOutputName(),
ElectronIdsName = electronTightId.GetOutputName(),
MuonIdsName = muonTightId.GetOutputName(),
VerticesName = goodPVFilterMod.GetOutputName(),
PFCandsName = mithep.Names.gkPFCandidatesBrn,
NoPUPFCandsName = separatePileUpMod.GetPFNoPileUpName(),
PUPFCandsName = separatePileUpMod.GetPFPileUpName()
))
fillers.append(mithep.nero.FatJetsFiller(
FatJetsName = fatJetIdMod.GetOutputName()
))
fillers.append(mithep.nero.MetFiller(
MetName = metCorrectionMod.GetOutputName(),
MuonsName = muonTightIdMask.GetOutputName(),
GenMetName = generatorMod.GetMCMETName()
))
fillers.append(mithep.nero.PhotonsFiller(
PhotonsName = photonIdMod.GetOutputName(),
MediumIdName = photonMediumId.GetOutputName(),
TightIdName = photonTightId.GetOutputName(),
VerticesName = goodPVFilterMod.GetOutputName()
))
fillers.append(mithep.nero.MonteCarloFiller())
fillers.append(mithep.nero.TriggerFiller())
fillers.append(mithep.nero.AllFiller())
neroMod = mithep.NeroMod(
Info = 'Nero',
Head = head,
Tag = tag,
FileName = 'nero.root',
PrintLevel = 0
)
for filler in fillers:
neroMod.AddFiller(filler)
sequence = goodPVFilterMod
if not analysis.isRealData:
sequence *= generatorMod
sequence *= separatePileUpMod * \
jetCorrectionMod * \
jetIdMod * \
metCorrectionMod * \
pfTauIdMod * \
electronIdMod * \
muonIdMod * \
photonIdMod * \
electronTightId * \
muonTightId * \
muonTightIdMask * \
fatJetCorrectionMod * \
fatJetIdMod * \
photonMediumId * \
photonTightId * \
neroMod
analysis.SetAllowNoHLTTree(True)
analysis.setSequence(sequence)
| [
"yiiyama@mit.edu"
] | yiiyama@mit.edu |
ecc82156cf415769d849cd147b8a00a30b5f94b0 | 28cb13c23d37ea87d57eae173d85692df8cf4a87 | /lab9_i_10/backend/todos/migrations/0001_initial.py | a8df615cf957eda7058e18ec8573d20d050894da | [] | no_license | jeremi420/aplikacje-internetowe-21686-185ic | e25ad1912a53242be5e7e7d46d6192bc59de9578 | a08c937fadcbe9986b4bb2c5f0a3a768ee6d7e96 | refs/heads/main | 2023-03-07T10:18:24.601142 | 2021-02-16T23:44:40 | 2021-02-16T23:44:40 | 311,286,151 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 605 | py | # Generated by Django 3.1.5 on 2021-01-17 19:46
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Todo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=120)),
('description', models.TextField()),
('completed', models.BooleanField(default=False)),
],
),
]
| [
"jerwier420@gmail.com"
] | jerwier420@gmail.com |
7d32ccc1b8c31ca35bf202a233e4f3f95ec0f4cc | 23239bbcca2fe6dc2b14688063b7f75abdc0e29d | /QuoteEngine/TextIngestor.py | 46fbd256e017acec8e388625f83aafe6be85321f | [] | no_license | luke-fs/udacity_meme_generator | 1d110aedbfbb203f9a071bc28daa51b576a64fc6 | 30c9cc5c9ea8fa5cb76a78dc66a651e2ef231712 | refs/heads/main | 2023-03-30T04:27:14.174334 | 2021-04-04T14:41:10 | 2021-04-04T14:41:10 | 353,123,922 | 0 | 0 | null | 2021-04-04T14:41:10 | 2021-03-30T19:51:19 | HTML | UTF-8 | Python | false | false | 872 | py |
from typing import List
import subprocess
import os
from .IngestorInterface import IngestorInterface
from .QuoteModel import QuoteModel
class TextIngestor(IngestorInterface):
"""The TXT Ingestor Class, to ingestor TXT quotes
Args:
IngestorInterface: This class inherits from Interface
"""
allowed_extensions = ['txt']
@classmethod
def parse(cls, path: str) -> List[QuoteModel]:
if not cls.can_ingest(path):
raise Exception('cannot ingest exception')
file_ref = open(path, "r")
quotes = []
for line in file_ref.readlines():
line = line.strip('\n\r').strip()
if len(line) > 0:
parse = line.split(' - ')
new_quote = QuoteModel(parse[0], parse[1])
quotes.append(new_quote)
file_ref.close()
return quotes | [
"luke.strauer1@googlemail.com"
] | luke.strauer1@googlemail.com |
e8b3288c3110b8995c15af8e6e5b8b9a674bc56d | 3d4d6dc268e605f81c280d94cdf16d7633313a15 | /app/app.py | 998ea2648ab00a7a7fb927656282f36feeed4e18 | [] | no_license | mcadhoc/Flask-AppBuilder-Skeleton | 6e0e4d78b0aee11c7632442e7f9dfd0a45d371f2 | e840fd3db6e065cf87e7c1333aea26292080f3fc | refs/heads/master | 2020-06-20T07:21:56.102240 | 2019-07-16T19:17:42 | 2019-07-16T19:17:42 | 197,040,763 | 0 | 0 | null | 2019-07-16T19:17:43 | 2019-07-15T17:09:54 | Python | UTF-8 | Python | false | false | 1,493 | py | from flask import Flask, render_template, redirect, url_for,request
from flask import make_response
app = Flask(__name__)
@app.route("/")
def home():
return "hi"
@app.route("/index")
def index():
return render_template('login.html', message='')
url = 'https://www.foxnews.com/politics/aoc-squad-news-conference-trump-call-go-back-home'
@app.route('/login', methods=['GET', 'POST'])
def login():
message = None
if request.method == 'GET':
datafromjs = request
print(datafromjs)
# Add Summarization and Bias code here.
result = subjectivity(url)
resp = make_response('{"response": "'+str(result)+'"}')
resp.headers['Content-Type'] = "application/json"
print(resp)
return resp
from textblob import TextBlob
from bs4 import BeautifulSoup
from urllib.request import urlopen
from lxml import etree
def subjectivity(url):
html = urlopen(url)
soup = BeautifulSoup(html, "lxml")
# https://stackoverflow.com/questions/22799990/beatifulsoup4-get-text-still-has-javascript
for script in soup(["script", "style"]):
script.decompose()
text = soup.get_text()
lines = (line.strip() for line in text.splitlines())
chunks = (phrase.strip() for line in lines for phrase in line.split(" "))
text = '\n'.join(chunk for chunk in chunks if chunk)
return TextBlob(text).sentiment.subjectivity
# return text.encode("utf-8")
if __name__ == "__main__":
app.run(debug = True)
| [
"yottzumm@gmail.com"
] | yottzumm@gmail.com |
3dc58c5a41c2665912c4b9f79f021b245d79b4cb | c76585f0e1a8cc35018bb11c6c01733fa2226df3 | /predictor-service/settings.py | b65c225a235d823d45160bfc596d9bff87c25056 | [
"MIT",
"Apache-2.0"
] | permissive | akvelon/Bitcoin-Transaction-Optimization | 9e8099b3ae64612bbaf95603e470763b22da9c5e | d5a9eb4322370b81721023daf8dd0eef0d3189a8 | refs/heads/master | 2023-06-08T18:28:03.464518 | 2023-05-29T16:10:41 | 2023-05-29T16:10:41 | 173,724,819 | 8 | 1 | Apache-2.0 | 2022-11-21T22:18:54 | 2019-03-04T10:35:27 | Jupyter Notebook | UTF-8 | Python | false | false | 949 | py | """
Copyright 2019 Akvelon Inc.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
"""
import json
class Settings:
# Settings keys
MODEL_PATH = 'model_path'
SCALER_PATH = 'scaler_path'
# Other constants
CONFIG_PATH = 'config/config.json'
def __init__(self):
with open(Settings.CONFIG_PATH) as f:
self.settings = json.load(f)
def __getitem__(self, key):
return self.settings.get(key, '')
| [
"sergey.kubasov@akvelon.com"
] | sergey.kubasov@akvelon.com |
6e61a410a9a7f1971524fc55c3bfdf11aa95a064 | ceba110faf303886894c256739c759842d0f36e7 | /mymoney/core/tests/test_validators.py | b31f4512e412d79bf186b3732ec103b5037e1679 | [
"BSD-3-Clause"
] | permissive | dss2194/mymoney-server | 2ded0eec3a6fe31df0a25161d09002778ec76e47 | 40dc9fdd08b3561287a9153342b25c58de8ad8ce | refs/heads/master | 2022-03-06T13:21:06.482583 | 2018-09-06T12:23:56 | 2018-09-09T10:33:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,294 | py | from django.test import TestCase
from rest_framework import serializers
from ..validators import MinMaxValidator
class MinMaxValidatorTestCase(TestCase):
def test_no_min(self):
validator = MinMaxValidator(field_min='min', field_max='max')
validator(data={'max': 10})
def test_no_max(self):
validator = MinMaxValidator(field_min='min', field_max='max')
validator(data={'min': 10})
def test_zero_lower(self):
validator = MinMaxValidator(field_min='min', field_max='max')
validator(data={'min': 0, 'max': 10})
def test_zero_greater(self):
validator = MinMaxValidator(field_min='min', field_max='max')
with self.assertRaises(serializers.ValidationError):
validator(data={'min': 10, 'max': 0})
def test_greater(self):
validator = MinMaxValidator(field_min='min', field_max='max')
with self.assertRaises(serializers.ValidationError):
validator(data={'min': -10, 'max': -20})
def test_equal(self):
validator = MinMaxValidator(field_min='min', field_max='max')
validator(data={'min': 5.5, 'max': 5.5})
def test_lower(self):
validator = MinMaxValidator(field_min='min', field_max='max')
validator(data={'min': 5.4, 'max': 5.5})
| [
"yannick.chabbert@gmail.com"
] | yannick.chabbert@gmail.com |
29faaabb15279e3864e8a6f022d909da552617c5 | edab5b4487eb5619d717acd64a988930ba069b4a | /src/pool/colors.py | 8304ae2b4540b5142d4580d8f6ef6aa2acb40fe2 | [] | no_license | 18-500-b9/pool-simulator | e7a9313a567834797dfe5e0796796c202bc86650 | e2efd5a9b8d70cebf8cb670fc8a587851a909e14 | refs/heads/master | 2020-04-21T03:04:41.149379 | 2019-03-21T13:55:38 | 2019-03-21T13:55:38 | 169,274,104 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 405 | py | """
RGB color values for PyGame
"""
AQUA = (0, 255, 255)
BLACK = (0, 0, 0)
BLUE = (0, 0, 255)
FUCHSIA = (255, 0, 255)
GRAY = (128, 128, 128)
GREEN = (0, 128, 0)
LIME = (0, 255, 0)
MAROON = (128, 0, 0)
NAVY_BLUE = (0, 0, 128)
OLIVE = (128, 128, 0)
PURPLE = (128, 0, 128)
RED = (255, 0, 0)
SILVER = (192, 192, 192)
TEAL = (0, 128, 128)
WHITE = (255, 255, 255)
YELLOW = (255, 255, 0)
ORANGE = (255, 165, 0)
| [
"samuelkim523@gmail.com"
] | samuelkim523@gmail.com |
c8ad214213403b0a248e5896f4ef5208e2cff18b | bb0ba7523f3ea5adf18acb5fe82bdfdc7423c9c9 | /Final_App/basic_calculation.py | c96bae878c1babe3fdb118535ca1a27c3ef8899b | [] | no_license | MairisLV/Final_Project | a899ea363301198088da7b8c90d9cca3c12b03aa | d94c6753650665764e518cd5196404206e90ee18 | refs/heads/main | 2023-04-28T07:21:12.423801 | 2021-05-11T20:09:12 | 2021-05-11T20:09:12 | 366,501,594 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 488 | py | class Calculate_Loan():
def __init__(self, loan_amount, term, interest_rate):
self.loan_amount = loan_amount
self.term = term
self.interest_rate = interest_rate
def interest(self):
# S=K(1+i)^n
for period in range(int(self.term)):
total = float(self.loan_amount) * float((1+float(self.interest_rate))**float(period+1))
total = total - int(self.loan_amount)
return total
| [
"mairis.baumanis@gmail.com"
] | mairis.baumanis@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.