blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9f8a99374525225c281f0b9898ed98c7f551646f
|
042a57026d63476c55e1c5f3ae25e017cc9a5f5e
|
/modules/common/Utils.py
|
a918e687e2f86f36281143c01d65f7822fa46355
|
[] |
no_license
|
opentargets/genetics-input-support
|
92ca81cbbc739fd5041be7957111bdf61735affa
|
cfb9946e9dfa048a7193a6f0e6283866a7a7b657
|
refs/heads/main
| 2023-09-05T10:03:53.842214
| 2021-11-19T16:22:31
| 2021-11-19T16:22:31
| 429,443,143
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,452
|
py
|
import logging
import shutil
import subprocess
from addict import Dict
import os
logger = logging.getLogger(__name__)
#print(os.environ["PATH"])
class Utils(object):
def __init__(self, config, outputs):
self.config = config
self.output_dir = outputs.prod_dir
@staticmethod
def check_path_command(cmd, yaml_cmd):
cmd_result = shutil.which(cmd)
if cmd_result is None:
logger.info(cmd + " not found. Using the path from config.yaml")
cmd_result = yaml_cmd
logger.debug(f"{cmd} path {cmd_result}")
return cmd_result
def gsutil_multi_copy_to(self, destination_bucket):
# print(os.environ["PATH"])
# cmd_result = shutil.which("gsutil")
# cmd = "gsutil -q -m cp -r " + self.yaml.output_dir + "/* gs://" + destination_bucket + "/"
# subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
proc = subprocess.Popen(
["gsutil", "-m", "cp", "-r", self.output_dir + "/*", "gs://" + destination_bucket + "/"])
try:
outs, errs = proc.communicate()
except:
proc.kill()
outs, errs = proc.communicate()
@staticmethod
def resource_for_stage(resource):
resource_stage = Dict()
resource_stage.path = ""
resource_stage.uri = resource.uri
resource_stage.output_filename = os.path.basename(resource.uri)
return resource_stage
|
[
"cinzia.malangone@gmail.com"
] |
cinzia.malangone@gmail.com
|
360ef2d9ed0c32fa420dfd9c5e8733d60b7c329e
|
fea94f3477b15cb97b5496963e611ed9879b72f6
|
/Jabbar/__init__.py
|
e98ccc757116b1ecc96cc6f1ff546ecf4bc37aa8
|
[] |
no_license
|
vaporstack/Supybot-plugins
|
83835fe9c74a9918360a6e9d74395de3c2643ba5
|
bb1dea4f3448ff90e3ea43fecd08d95fd5d9d863
|
refs/heads/master
| 2020-03-11T21:44:17.254722
| 2018-04-19T21:54:14
| 2018-04-19T21:54:14
| 130,273,843
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,717
|
py
|
###
# Copyright (c) 2010, futurestack
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
"""
Add a description of the plugin (to be presented to the user inside the wizard)
here. This should describe *what* the plugin does.
"""
import supybot
import supybot.world as world
# Use this for the version of this plugin. You may wish to put a CVS keyword
# in here if you're keeping the plugin in CVS or some similar system.
__version__ = ""
# XXX Replace this with an appropriate author or supybot.Author instance.
__author__ = supybot.Author('futurestack','st4tic software', 'info@futurestack.com')
# This is a dictionary mapping supybot.Author instances to lists of
# contributions.
__contributors__ = {}
# This is a url where the most recent plugin package can be downloaded.
__url__ = '' # 'http://supybot.com/Members/yourname/Jabbar/download'
import config
import plugin
reload(plugin) # In case we're being reloaded.
# Add more reloads here if you add third-party modules and want them to be
# reloaded when this plugin is reloaded. Don't forget to import them as well!
if world.testing:
import test
Class = plugin.Class
configure = config.configure
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
|
[
"andrew@vaporstack.org"
] |
andrew@vaporstack.org
|
79c5d669fae418a9a451fad0c612337a2fb21735
|
e54cdaedad52619a52f10cc8ca8bf86e1c659788
|
/affeBlog/affeBlog/urls.py
|
da94260d45e4d76813e8f8135d36e0391b563c8f
|
[] |
no_license
|
imaffe/affeBlogDir
|
e91a4f47cd4235f9e05756dfbe38ed1087dc8a84
|
7facab5738be73d66ca7a029e5ad453be79404dd
|
refs/heads/master
| 2020-04-04T00:12:02.766437
| 2018-11-17T06:23:52
| 2018-11-17T06:23:52
| 155,643,767
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 858
|
py
|
"""affeBlog URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import include, path
from posts import views
urlpatterns = [
path('admin/', admin.site.urls),
path(r'posts/', include("posts.urls")), # appname.views.function_name
]
|
[
"affeisme@gmail.com"
] |
affeisme@gmail.com
|
035b76f9df5d4c7f34d0bd0354f512060951a653
|
a95c3f8e8c511e07f13f5c84a8c2d809628bc00a
|
/Feb-1/warming-up/SortInside_0202_LeeYoongyo.py
|
e86f44752fb11b826e57e46d4884ba2b1664a152
|
[] |
no_license
|
NakyungIm/Algorithm-winter
|
5c0612869c11a31c497b4e736dddf3d6b7fbb329
|
7795d7abf5d1416bbc5b748756764711e6d8f936
|
refs/heads/main
| 2023-03-10T20:55:37.671776
| 2021-02-26T13:15:57
| 2021-02-26T13:15:57
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 195
|
py
|
#BOJ 1427 소트인사이드 (이윤교)
import sys
input = sys.stdin.readline
n = list(map(str,input()))
L = []
for i in n:
L.append(i)
result = sorted(L,reverse=True)
print(''.join(result))
|
[
"lykee2000@naver.com"
] |
lykee2000@naver.com
|
81710191d2ae3d96c953ee5a5c41de82ed7ecc38
|
0875f97d2d24031f483c16090e80c90164134d6b
|
/models/tests/treatment_test.py
|
4b8037d0c5089a25e263cbac2586fc67073e75b2
|
[] |
no_license
|
NikTheGeek1/vet_management_app
|
c74830d1ea518b1483f96af85e515837c47fc443
|
27149e73c6e8895b08dbdaa5eb621ac4b33fc799
|
refs/heads/main
| 2023-01-21T18:03:35.586757
| 2020-12-02T10:34:45
| 2020-12-02T10:34:45
| 316,482,405
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 600
|
py
|
import unittest
from models.treatment import Treatment
class TreatmentTest(unittest.TestCase):
def setUp(self):
self.treatment = Treatment(
'Anthelmintics',
'These are used to eliminate parasitic worms, which infest their systems and steal important nutrients.'
)
def test_title(self):
self.assertEqual('Anthelmintics', self.treatment.title)
def test_description(self):
self.assertEqual('These are used to eliminate parasitic worms, which infest their systems and steal important nutrients.', self.treatment.description)
|
[
"ntheodoropoulos@outlook.com"
] |
ntheodoropoulos@outlook.com
|
600da661603eb4e4e159c375aff356090c18c74c
|
3b0682938d8a214c0ccd04e84920467a087c2569
|
/recommender/mysql_reviews.py
|
6bb1a908da1d4867055ed6b080799d1b29ba2f3a
|
[] |
no_license
|
Catchi-Nichi/Catchinichi-AI
|
8eff987bcd68bd708752764189bbdb2e33ab561b
|
59429f0bd5d070a851ecc86be36cce0f39f36372
|
refs/heads/main
| 2023-05-31T12:14:09.954707
| 2021-06-08T11:09:03
| 2021-06-08T11:09:03
| 344,741,038
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,378
|
py
|
import pymysql
import numpy as numpy
import pandas as pd
import requests
import time
db = pymysql.connect(
user="username",
host="host url",
port=3306,
password="password",
database="database",
charset="utf8",
)
cursor = db.cursor(pymysql.cursors.DictCursor)
def get_cursor():
db = pymysql.connect(
user="username",
host="host url",
port=3306,
password="password",
database="database",
charset="utf8",
)
cursor = db.cursor(pymysql.cursors.DictCursor)
return cursor
def insert(
cursor,
brand=None,
comment=None,
en_name=None,
kr_brand=None,
kr_name=None,
longevity=None,
mood=None,
userNick=None,
category=0,
stars=None,
):
sql = "INSERT INTO reviews (brand, comment, en_name, kr_brand, kr_name, longevity, mood, userNick, stars, category) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
val = (brand, comment, en_name, kr_brand, kr_name, longevity, mood, userNick, stars)
cursor.execute(sql, val)
db.commit()
print("inserted row ID: ", cursor.lastrowid)
def delete(cursor, name):
sql = "DELETE FROM reviews WHERE name = %s"
value = (name,)
cursor.execute(sql, value)
db.commit()
print(cursor.rowcount, "record(s) deleted")
def get_reviews(cursor):
sql = "SELECT * FROM reviews"
cursor.execute(sql)
result = cursor.fetchall()
return result
def addUser(cursor, userNick, email):
sql = "INSERT INTO users (nick, email) VALUES (%s, %s)"
val = (userNick, email)
cursor.execute(sql, val)
db.commit()
def truncate(cursor):
sql = "TRUNCATE reviews"
cursor.execute(sql)
db.commit()
def upload_data(
brand=None,
comment=None,
en_name=None,
kr_brand=None,
kr_name=None,
longevity=None,
mood=None,
userNick=None,
category=0,
stars=None,
):
nichi_list = [
"diptyque",
"jomalone",
"santa maria novella",
"byredo",
"acqua di parma",
"creed",
"masion margiella",
]
if brand in nichi_list:
category = 1
url = "https://ziho-dev.com/review/addReview"
obj = {
"brand": brand,
"comment": comment,
"en_name": en_name,
"longevity": longevity,
"mood": mood,
"nick": userNick,
"category": category,
"stars": stars,
}
r = requests.post(url, obj)
print(r)
if __name__ == "__main__":
print(get_reviews(cursor))
# truncate(cursor)
# print("hi")
# for i in range(40):
# nick = "a" + str(i)
# email = nick + "@gmail.com"
# addUser(cursor, nick, email)
reviews = pd.read_csv("recommender/user_review_jiho.csv")
for index, row in reviews.iterrows():
userNick = str(row["userId"])
en_name = str(row["name"]).lower().replace(" ", "").strip()
brand = str(row["brand"]).lower().replace(" ", "").strip()
stars = int(row["rating"])
upload_data(en_name=en_name, userNick=userNick, brand=str(brand), stars=stars)
# print(en_name, brand, stars)
# time.sleep(0.5)
# v = [userNick, en_name, brand, stars]
# print(v)
# insert(cursor, userNick="a" + v[0], en_name=v[1], brand=v[2], stars=v[3])
df = pd.DataFrame(get_reviews(cursor))
print(df.head(10))
|
[
"rswfaf@gmail.com"
] |
rswfaf@gmail.com
|
82ff43b741b940189fc894e87ef4e5edfb124cb2
|
fa3e527114cd5799dddb0a25067da4923eae354e
|
/FastSim/CEPC/models/train_v2.py
|
d742eb6e71f86cd3c002779727f8150c0991ea9b
|
[] |
no_license
|
wenxingfang/FastSim_ML
|
e64c6b56ce2afd703d1ddda0ada2de6f65fde049
|
d2f1abbb2f6879313d5f4f137b64c4d8bf10fe83
|
refs/heads/master
| 2022-11-28T01:35:39.727895
| 2020-08-03T15:47:37
| 2020-08-03T15:47:37
| 284,734,310
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 19,988
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
file: train_v1.py
Add pre-trained regression net
description: main training script for [arXiv/1705.02355]
author: Luke de Oliveira (lukedeo@manifold.ai),
Michela Paganini (michela.paganini@yale.edu)
"""
from __future__ import print_function
import argparse
from collections import defaultdict
import logging
import h5py
import numpy as np
import os
#os.environ["CUDA_VISIBLE_DEVICES"]="-1" #do not use GPU
from six.moves import range
from sklearn.preprocessing import LabelEncoder
from sklearn.utils import shuffle
import sys
import yaml
from keras.models import model_from_json
from keras.models import model_from_yaml
from keras.models import load_model
import math
if __name__ == '__main__':
logger = logging.getLogger(
'%s.%s' % (
__package__, os.path.splitext(os.path.split(__file__)[-1])[0]
)
)
logger.setLevel(logging.INFO)
else:
logger = logging.getLogger(__name__)
def binary_crossentropy(target, output):
output = -target * np.log(output) - (1.0 - target) * np.log(1.0 - output)
return output
def bit_flip(x, prob=0.05):
""" flips a int array's values with some probability """
x = np.array(x)
selection = np.random.uniform(0, 1, x.shape) < prob
x[selection] = 1 * np.logical_not(x[selection])
return x
def get_parser():
parser = argparse.ArgumentParser(
description='Run CalGAN training. '
'Sensible defaults come from [arXiv/1511.06434]',
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument('--nb-epochs', action='store', type=int, default=50,
help='Number of epochs to train for.')
parser.add_argument('--batch-size', action='store', type=int, default=2,
help='batch size per update')
parser.add_argument('--latent-size', action='store', type=int, default=32,
help='size of random N(0, 1) latent space to sample')
parser.add_argument('--disc-lr', action='store', type=float, default=2e-5,
help='Adam learning rate for discriminator')
parser.add_argument('--gen-lr', action='store', type=float, default=2e-4,
help='Adam learning rate for generator')
parser.add_argument('--adam-beta', action='store', type=float, default=0.5,
help='Adam beta_1 parameter')
parser.add_argument('--prog-bar', action='store_true',
help='Whether or not to use a progress bar')
parser.add_argument('--no-attn', action='store_true',
help='Whether to turn off the layer to layer attn.')
parser.add_argument('--debug', action='store_true',
help='Whether to run debug level logging')
parser.add_argument('--d-pfx', action='store',
default='params_discriminator_epoch_',
help='Default prefix for discriminator network weights')
parser.add_argument('--g-pfx', action='store',
default='params_generator_epoch_',
help='Default prefix for generator network weights')
parser.add_argument('--dataset', action='store', type=str,
help='yaml file with particles and HDF5 paths (see '
'github.com/hep-lbdl/CaloGAN/blob/master/models/'
'particles.yaml)')
parser.add_argument('--reg-model-in', action='store',type=str,
default='',
help='input of trained reg model')
parser.add_argument('--reg-weight-in', action='store',type=str,
default='',
help='input of trained reg weight')
parser.add_argument('--gen-model-out', action='store',type=str,
default='',
help='output of trained gen model')
parser.add_argument('--gen-weight-out', action='store',type=str,
default='',
help='output of trained gen weight')
parser.add_argument('--dis-model-out', action='store',type=str,
default='',
help='output of trained dis model')
parser.add_argument('--dis-weight-out', action='store',type=str,
default='',
help='output of trained dis weight')
parser.add_argument('--comb-model-out', action='store',type=str,
default='',
help='output of trained combined model')
parser.add_argument('--comb-weight-out', action='store',type=str,
default='',
help='output of trained combined weight')
parser.add_argument('--gen-out', action='store',type=str,
default='',
help='output of trained gen model')
parser.add_argument('--comb-out', action='store',type=str,
default='',
help='output of trained combined model')
parser.add_argument('--dis-out', action='store',type=str,
default='',
help='output of dis model')
return parser
if __name__ == '__main__':
parser = get_parser()
parse_args = parser.parse_args()
# delay the imports so running train.py -h doesn't take 5,234,807 years
import keras.backend as K
import tensorflow as tf
session_conf = tf.ConfigProto()
session_conf.gpu_options.allow_growth = True
session = tf.Session(config=session_conf)
K.set_session(session)
from keras.layers import (Activation, AveragePooling2D, Dense, Embedding,
Flatten, Input, Lambda, UpSampling2D)
from keras.layers.merge import add, concatenate, multiply
from keras.models import Model
from keras.optimizers import Adam
from keras.utils.generic_utils import Progbar
#from keras.utils.vis_utils import plot_model
K.set_image_dim_ordering('tf')
from ops import (minibatch_discriminator, minibatch_output_shape, Dense3D,
calculate_energy, scale, inpainting_attention)
from architectures import build_generator_3D, build_discriminator_3D, build_regression, build_discriminator_3D_v1, build_discriminator_3D_v2 ,build_discriminator_3D_v3, build_generator_3D_v1
# batch, latent size, and whether or not to be verbose with a progress bar
if parse_args.debug:
logger.setLevel(logging.DEBUG)
# set up all the logging stuff
formatter = logging.Formatter(
'%(asctime)s - %(name)s'
'[%(levelname)s]: %(message)s'
)
hander = logging.StreamHandler(sys.stdout)
hander.setFormatter(formatter)
logger.addHandler(hander)
nb_epochs = parse_args.nb_epochs
batch_size = parse_args.batch_size
latent_size = parse_args.latent_size
verbose = parse_args.prog_bar
no_attn = parse_args.no_attn
disc_lr = parse_args.disc_lr
gen_lr = parse_args.gen_lr
adam_beta_1 = parse_args.adam_beta
reg_model_in = parse_args.reg_model_in
reg_weight_in = parse_args.reg_weight_in
logger.debug('parameter configuration:')
logger.debug('number of epochs = {}'.format(nb_epochs))
logger.debug('batch size = {}'.format(batch_size))
logger.debug('latent size = {}'.format(latent_size))
logger.debug('progress bar enabled = {}'.format(verbose))
logger.debug('Using attention = {}'.format(no_attn == False))
logger.debug('discriminator learning rate = {}'.format(disc_lr))
logger.debug('generator learning rate = {}'.format(gen_lr))
logger.debug('Adam $\beta_1$ parameter = {}'.format(adam_beta_1))
import h5py
d = h5py.File(parse_args.dataset, 'r')
first = np.expand_dims(d['Barrel_Hit'][:],-1)
mc_info = d['MC_info'][:]
d.close()
###### do normalization ##############
mc_info[:,0] = (mc_info[:,0]-90)/45
mc_info[:,1] = (mc_info[:,1])/10
mc_info[:,2] = (mc_info[:,2])/100
first, mc_info = shuffle(first, mc_info, random_state=0)
sizes = [ first.shape[1], first.shape[2], first.shape[3], 1]
print("first:",first.shape,",mc info:", mc_info.shape)
print(sizes)
logger.info('Building discriminator')
calorimeter = Input(shape=sizes)
input_Info = Input(shape=(3, ))
#features =build_discriminator_3D(image=calorimeter, mbd=True, sparsity=False, sparsity_mbd=False)
#features =build_discriminator_3D(image=calorimeter, mbd=False, sparsity=False, sparsity_mbd=False)
#features =build_discriminator_3D_v1(image=calorimeter, mbd=False, sparsity=False, sparsity_mbd=False)
#features =build_discriminator_3D_v2(image=calorimeter, epsilon=0.001)
features =build_discriminator_3D_v3(image=calorimeter, info=input_Info, epsilon=0.001)
print('features:',features.shape)
#energies = calculate_energy(calorimeter)
#print('energies:',energies.shape)
## construct MBD on the raw energies
#nb_features = 10
#vspace_dim = 10
#minibatch_featurizer = Lambda(minibatch_discriminator,
# output_shape=minibatch_output_shape)
#K_energy = Dense3D(nb_features, vspace_dim)(energies)
## constrain w/ a tanh to dampen the unbounded nature of energy-space
#mbd_energy = Activation('tanh')(minibatch_featurizer(K_energy))
## absolute deviation away from input energy. Technically we can learn
## this, but since we want to get as close as possible to conservation of
## energy, just coding it in is better
#energy_well = Lambda(
# lambda x: K.abs(x[0] - x[1])
#)([total_energy, input_energy])
## binary y/n if it is over the input energy
#well_too_big = Lambda(lambda x: 10 * K.cast(x > 5, K.floatx()))(energy_well)
#p = features
#p = concatenate([features, energies])
'''
p = concatenate([
features,
scale(energies, 10),
scale(total_energy, 100),
energy_well,
well_too_big,
mbd_energy
])
'''
#print('features shape:', features.shape)
fake = Dense(1, activation='sigmoid', name='fakereal_output')(features)
#reg = build_regression(p)
#print('fake shape:', fake.shape)
#print('reg shape:', reg.shape)
#discriminator_outputs = [fake, total_energy]
discriminator_outputs = fake
#discriminator_outputs = [fake]+reg
#print('discriminator_outputs shape:', discriminator_outputs.shape)
discriminator_losses = 'binary_crossentropy'
# ACGAN case
'''
if nb_classes > 1:
logger.info('running in ACGAN for discriminator mode since found {} '
'classes'.format(nb_classes))
aux = Dense(1, activation='sigmoid', name='auxiliary_output')(p)
discriminator_outputs.append(aux)
# change the loss depending on how many outputs on the auxiliary task
if nb_classes > 2:
discriminator_losses.append('sparse_categorical_crossentropy')
else:
discriminator_losses.append('binary_crossentropy')
'''
#discriminator = Model(calorimeter, discriminator_outputs, name='discriminator')
discriminator = Model([calorimeter, input_Info] , discriminator_outputs, name='discriminator')
#print('discriminator check:', len(set(discriminator.inputs)), len(discriminator.inputs))
discriminator.compile(
optimizer=Adam(lr=disc_lr, beta_1=adam_beta_1),
loss=discriminator_losses
)
logger.info('Building generator')
latent = Input(shape=(latent_size, ), name='z')
input_info = Input(shape=(3, ), dtype='float32')
generator_inputs = [latent, input_info]
# ACGAN case
'''
if nb_classes > 1:
logger.info('running in ACGAN for generator mode since found {} '
'classes'.format(nb_classes))
# label of requested class
image_class = Input(shape=(1, ), dtype='int32')
lookup_table = Embedding(nb_classes, latent_size, input_length=1,
embeddings_initializer='glorot_normal')
emb = Flatten()(lookup_table(image_class))
# hadamard product between z-space and a class conditional embedding
hc = multiply([latent, emb])
# requested energy comes in GeV
h = Lambda(lambda x: x[0] * x[1])([hc, scale(input_energy, 100)])
generator_inputs.append(image_class)
else:
# requested energy comes in GeV
h = Lambda(lambda x: x[0] * x[1])([latent, scale(input_energy, 100)])
'''
h = concatenate([latent, input_info])
#img_layer = build_generator_3D(h, 30 , 30 , 29)
img_layer = build_generator_3D_v1(h, 30 , 30 , 29)
print('img_layer shape:',img_layer.shape)
'''
if not no_attn:
logger.info('using attentional mechanism')
# resizes from (3, 96) => (12, 12)
zero2one = AveragePooling2D(pool_size=(1, 8))(
UpSampling2D(size=(4, 1))(img_layer0))
img_layer1 = inpainting_attention(img_layer1, zero2one)
# resizes from (12, 12) => (12, 6)
one2two = AveragePooling2D(pool_size=(1, 2))(img_layer1)
img_layer2 = inpainting_attention(img_layer2, one2two)
'''
output_info = Lambda(lambda x: x)(input_info) # same as input
generator_outputs = [img_layer ,output_info]
generator = Model(generator_inputs, generator_outputs, name='generator')
generator.compile(
optimizer=Adam(lr=gen_lr, beta_1=adam_beta_1),
loss='binary_crossentropy'
)
print('h3')
######### regression part ##########################
reg_model = load_model(parse_args.reg_model_in, custom_objects={'tf': tf})
reg_model.trainable = False
reg_model.name = 'regression'
reg_model.compile(
optimizer=Adam(lr=gen_lr, beta_1=adam_beta_1),
loss='binary_crossentropy'
)
###################################
# build combined model
# we only want to be able to train generation for the combined model
discriminator.trainable = False
combined_outputs = [discriminator( generator(generator_inputs)), reg_model((generator(generator_inputs))[0]) ]
print('h31')
combined_losses = ['binary_crossentropy', 'mae']
combined = Model(generator_inputs, combined_outputs, name='combined_model')
print('h4')
combined.compile(
optimizer=Adam(lr=gen_lr, beta_1=adam_beta_1),
loss=combined_losses
)
logger.info('commencing training')
print('total sample:', first.shape[0])
disc_outputs_real = np.ones(batch_size)
disc_outputs_fake = np.zeros(batch_size)
loss_weights = np.ones(batch_size)
combined_loss_weights = [np.ones(batch_size), 1*np.ones(batch_size)]
for epoch in range(nb_epochs):
logger.info('Epoch {} of {}'.format(epoch + 1, nb_epochs))
nb_batches = int(first.shape[0] / batch_size)
if verbose:
progress_bar = Progbar(target=nb_batches)
epoch_gen_loss = []
epoch_disc_loss = []
for index in range(nb_batches):
if verbose:
progress_bar.update(index)
else:
if index % 100 == 0:
logger.info('processed {}/{} batches'.format(index + 1, nb_batches))
elif index % 10 == 0:
logger.debug('processed {}/{} batches'.format(index + 1, nb_batches))
# generate a new batch of noise
noise = np.random.normal(0, 1, (batch_size, latent_size))
# get a batch of real images
image_batch = first[index * batch_size:(index + 1) * batch_size]
info_batch = mc_info[index * batch_size:(index + 1) * batch_size]
sampled_energies = np.random.uniform(0.1, 1, (batch_size, 1))
sampled_theta = np.random.uniform(-1 , 1, (batch_size, 1))
sampled_phi = np.random.uniform(-1 , 1, (batch_size, 1))
sampled_info = np.concatenate((sampled_theta, sampled_phi, sampled_energies),axis=-1)
generator_inputs = [noise, sampled_info]
#generator_inputs = [noise]
generated_images = generator.predict(generator_inputs, verbose=0)
#disc_outputs_real = [np.ones(batch_size), info_batch]
# downweight the energy reconstruction loss ($\lambda_E$ in paper)
real_batch_loss = discriminator.train_on_batch(
[image_batch, info_batch],
disc_outputs_real,
loss_weights
)
#print('real_batch_loss=',real_batch_loss)
# note that a given batch should have either *only* real or *only* fake,
# as we have both minibatch discrimination and batch normalization, both
# of which rely on batch level stats
fake_batch_loss = discriminator.train_on_batch(
generated_images,
disc_outputs_fake,
loss_weights ##should we put reg here also?
)
'''
print('fake_batch_loss=',fake_batch_loss)
'''
if index == (nb_batches-1):
real_pred = discriminator.predict_on_batch([image_batch, info_batch])
fake_pred = discriminator.predict_on_batch(generated_images)
print('real_pred:\n',real_pred)
print('fake_pred:\n',fake_pred)
print('binary_crossentropy real\n:', binary_crossentropy(disc_outputs_real, real_pred))
print('binary_crossentropy fake\n:', binary_crossentropy(disc_outputs_fake, fake_pred))
epoch_disc_loss.append(
(np.array(fake_batch_loss) + np.array(real_batch_loss)) / 2)
# we want to train the genrator to trick the discriminator
# For the generator, we want all the {fake, real} labels to say
gen_losses = []
# we do this twice simply to match the number of batches per epoch used to
# train the discriminator
for _ in range(2):
noise = np.random.normal(0, 1, (batch_size, latent_size))
sampled_energies = np.random.uniform(0.1, 1, (batch_size, 1))
sampled_theta = np.random.uniform(-1 , 1, (batch_size, 1))
sampled_phi = np.random.uniform(-1 , 1, (batch_size, 1))
sampled_info = np.concatenate((sampled_theta, sampled_phi, sampled_energies),axis=-1)
combined_inputs = [noise, sampled_info]
combined_outputs = [np.ones(batch_size), sampled_info]
#combined_outputs = [trick]
gen_losses.append(combined.train_on_batch(
combined_inputs,
combined_outputs,
combined_loss_weights
))
epoch_gen_loss.append(np.mean(np.array(gen_losses), axis=0))
logger.info('Epoch {:3d} Generator loss: {}'.format(
epoch + 1, np.mean(epoch_gen_loss, axis=0)))
logger.info('Epoch {:3d} Discriminator loss: {}'.format(
epoch + 1, np.mean(epoch_disc_loss, axis=0)))
# save weights every epoch
'''
generator .save_weights(parse_args.gen_weight_out, overwrite=True)
discriminator.save_weights(parse_args.dis_weight_out, overwrite=True)
combined .save_weights(parse_args.comb_weight_out, overwrite=True)
gen_yaml_string = generator.to_yaml()
dis_yaml_string = discriminator.to_yaml()
comb_yaml_string = combined.to_yaml()
open(parse_args.gen_model_out, 'w').write(gen_yaml_string)
open(parse_args.dis_model_out, 'w').write(dis_yaml_string)
open(parse_args.comb_model_out, 'w').write(comb_yaml_string)
'''
generator.save(parse_args.gen_out)
discriminator.save(parse_args.dis_out)
combined .save(parse_args.comb_out)
print('done reg training')
|
[
"1473717798@qq.com"
] |
1473717798@qq.com
|
2566d6f5b764a90a4a8e2da78cba0dd0d3ed944e
|
bfb3dfe40bfc33465bdc140cfa1a948342de8374
|
/price_dimension/models/product_prices_table.py
|
2a7f929b89df795e4a95e3ae93032241d6de87dd
|
[] |
no_license
|
apesquero/manzano
|
31e3d9b81925d6025b760f93f5b896e60b255cb5
|
e94d450e263974c35bea446fd3f85d95e957dabb
|
refs/heads/9.0
| 2021-01-15T08:42:13.549941
| 2017-04-28T17:22:14
| 2017-04-28T17:22:14
| 99,569,666
| 0
| 0
| null | 2017-08-07T11:11:48
| 2017-08-07T11:11:48
| null |
UTF-8
|
Python
| false
| false
| 1,652
|
py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2017 Solucións Aloxa S.L. <info@aloxa.eu>
# Alexandre Díaz <alex@aloxa.eu>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api
import openerp.addons.decimal_precision as dp
class product_prices_table(models.Model):
_name = 'product.prices_table'
pos_x = fields.Float(string="X", required=True)
pos_y = fields.Float(string="Y", required=True)
value = fields.Float(string="Value", digits=dp.get_precision('Product Price'))
sale_product_tmpl_id = fields.Many2one('product.template', 'Product Template')
# cost_product_tmpl_id = fields.Many2one('product.template', 'Product Template')
supplier_product_id = fields.Many2one('product.supplierinfo', 'Product Supplier Info')
|
[
"dev@redneboa.es"
] |
dev@redneboa.es
|
b98dca501e5fa74b73d5c079eefb1e05f2d5cc33
|
178eae9ae8182858e4ccc0921f684ddf50c7d2aa
|
/day33/socketserver新模块/client_.py
|
e80f4817411199d4849e95da16b964ba046fd9e0
|
[] |
no_license
|
gaoi311/python_learning
|
fa11e88b83ad8870ff357dd55e8318a35c7fd12e
|
44c822da0dba406772627c71d12f94bd287ea2a6
|
refs/heads/master
| 2021-03-01T02:29:50.284017
| 2020-03-08T05:17:41
| 2020-03-08T05:17:41
| 245,747,273
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 168
|
py
|
import socket
sk = socket.socket()
sk.connect(('127.0.0.1',8080))
msg_s = input('>>>')
sk.send(msg_s.encode('utf-8'))
print(sk.recv(1024).decode('utf-8'))
sk.close()
|
[
"13956965930@163.com"
] |
13956965930@163.com
|
011abc92879e004e3a386dd893625876fef766c5
|
80453e94cfcda83b5796f17f5e3a5a8616c50288
|
/network_ui_react_frontend/src/tools/fsm_diff/cli.py
|
856ef41ef1965258ac67d842260695d0492176fa
|
[
"Apache-2.0"
] |
permissive
|
benthomasson/network-ui
|
eaca0ab0ed8a5ab3ba9d2e2aab81b1e38248b649
|
8c3e114d331a57848acb4004ccf090148fd36e86
|
refs/heads/react
| 2023-01-13T07:56:38.914187
| 2019-06-19T14:58:56
| 2019-06-19T14:58:56
| 145,854,921
| 1
| 1
|
Apache-2.0
| 2023-01-06T01:08:15
| 2018-08-23T13:07:43
|
HTML
|
UTF-8
|
Python
| false
| false
| 2,318
|
py
|
"""
Usage:
fsm_diff [options] <a> <b> [<output>]
Options:
-h, --help Show this page
--debug Show debug logging
--verbose Show verbose logging
"""
from __future__ import print_function
from docopt import docopt
import logging
import sys
import yaml
logger = logging.getLogger('cli')
def fsm_diff(a_name, b_name, a, b, silent=True):
a_states = {x['label'] for x in a['states']}
b_states = {x['label'] for x in b['states']}
missing_in_a = b_states - a_states
missing_in_b = a_states - b_states
if (missing_in_b) and not silent:
print("Extra states in " + a_name + ":\n ", "\n ".join(list(missing_in_b)))
if (missing_in_a) and not silent:
print("Extra states in " + b_name + ":\n ", "\n ".join(list(missing_in_a)))
new_states = missing_in_b.union(missing_in_a)
a_transitions = {tuple(sorted(x.items())) for x in a['transitions']}
b_transitions = {tuple(sorted(x.items())) for x in b['transitions']}
missing_in_a = b_transitions - a_transitions
missing_in_b = a_transitions - b_transitions
if (missing_in_b) and not silent:
print("Extra transitions in " + a_name + ":\n ", "\n ".join(map(str, missing_in_b)))
if (missing_in_a) and not silent:
print("Extra transitions in " + b_name + ":\n ", "\n ".join(map(str, missing_in_a)))
new_transitions = missing_in_b.union(missing_in_a)
data = dict(states=[dict(label=x) for x in list(new_states)],
transitions=[dict(x) for x in list(new_transitions)])
return data
def main(args=None):
if args is None:
args = sys.argv[1:]
parsed_args = docopt(__doc__, args)
if parsed_args['--debug']:
logging.basicConfig(level=logging.DEBUG)
elif parsed_args['--verbose']:
logging.basicConfig(level=logging.INFO)
else:
logging.basicConfig(level=logging.WARNING)
with open(parsed_args['<a>']) as f:
a = yaml.load(f.read())
with open(parsed_args['<b>']) as f:
b = yaml.load(f.read())
data = fsm_diff(parsed_args['<a>'], parsed_args['<b>'], a, b, silent=False)
if parsed_args['<output>']:
with open(parsed_args['<output>'], 'w') as f:
f.write(yaml.dump(data, default_flow_style=False))
return 0
|
[
"ben.thomasson@gmail.com"
] |
ben.thomasson@gmail.com
|
8f9aa4a342dfbfefc5c1d1b17f393ef4350ff355
|
a95075110b8d0dc3d486bb502e62f73ea50bd270
|
/face_detect/sample/learning_opencv3_with_python_sample/Chapter 3_Code/contours.py
|
fb2df781e4a0e9de36e93b45fffea2280f8b3152
|
[
"MIT"
] |
permissive
|
minatuyang/RASP-ATTA
|
5e715377e7de0dc6f49ed0a3f98b9701a9798a83
|
e182248da2f9f131e4e1aca5a2198b6ae910424e
|
refs/heads/master
| 2020-04-03T09:47:39.903356
| 2018-11-14T11:48:26
| 2018-11-14T11:48:26
| 155,176,168
| 1
| 0
|
MIT
| 2018-11-11T08:51:51
| 2018-10-29T08:21:18
|
Python
|
UTF-8
|
Python
| false
| false
| 413
|
py
|
import cv2
import numpy as np
img = np.zeros((200, 200), dtype=np.uint8)
img[50:150, 50:150] = 255
ret, thresh = cv2.threshold(img, 127, 255, 0)
image, contours, hierarchy = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
color = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR)
img = cv2.drawContours(color, contours, -1, (0,255,0), 2)
cv2.imshow("contours", color)
cv2.waitKey()
cv2.destroyAllWindows()
|
[
"noreply@github.com"
] |
minatuyang.noreply@github.com
|
8c0b3e0914cf0ca877a1751732c662a29e0c9ea8
|
9a0d89613acc1a5308bc88adec0a22e0179893ca
|
/catalog/urls.py
|
fbce3c8f006f5523a6aa735acd12cb83192a9c48
|
[] |
no_license
|
henrymbuguak/Library-Website-Created-Using-Django-Web-Framework
|
bddc20e7df070427fa8cc1d59981ada4474d9205
|
3b5a7f27f91fed2400b2ff6579aed5672498a075
|
refs/heads/master
| 2021-08-16T00:37:41.404814
| 2017-11-18T17:07:47
| 2017-11-18T17:07:47
| 110,505,643
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 514
|
py
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^books/$', views.BookListView.as_view(), name='books'),
url(r'^book/(?P<pk>\d+)$', views.BookDetailView.as_view(), name='book-detail'),
url(r'^authors/$', views.AuthorListView.as_view(), name='authors'),
url(r'^author/(?P<pk>\d+)$', views.AuthorDetailView.as_view(), name='author-detail'),
url(r'^mybooks/$', views.LoanedBooksByUserListView.as_view(), name='my-borrowed'),
]
|
[
"henrymbuguak@gmail.com"
] |
henrymbuguak@gmail.com
|
08adccf03c06d1bda8e125df2d4f98d452bef1fc
|
8171b5d2f5f4ecd240288aa51a9d40fe17b6c9e9
|
/apps/billing/migrations/0013_auto_20160419_1133.py
|
7c8578251f614a9ffba448362a17a1d8111e3a66
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
SVArago/alexia
|
c34fadd24100095b37ef21b3d4cf1bca01f2dfa6
|
19b54ac1c6486de46666a41625c8c0d8fcdbc611
|
refs/heads/master
| 2023-01-12T06:36:17.127012
| 2022-12-21T22:16:10
| 2022-12-21T22:16:10
| 44,772,731
| 3
| 0
|
NOASSERTION
| 2018-10-30T20:33:29
| 2015-10-22T20:54:59
|
Python
|
UTF-8
|
Python
| false
| false
| 688
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-04-19 09:33
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('billing', '0012_auto_20160412_1420'),
]
operations = [
migrations.RemoveField(
model_name='permanentproduct',
name='stockproduct',
),
migrations.AlterField(
model_name='order',
name='synchronized',
field=models.BooleanField(default=False, help_text='Designates whether this transaction is imported by the organization.', verbose_name='synchronized'),
),
]
|
[
"cjaebbers@gmail.com"
] |
cjaebbers@gmail.com
|
1d4f63c579309d3daaf3459e02a65087f4d5941b
|
89283ca90688d17a440258896135d978e12db268
|
/1551_MinimumOperationstoMakeArrayEqual.py
|
244327f865227d945820c4bb8edf2e7e6090f033
|
[] |
no_license
|
anushavoloju/LeetCode-Practice
|
6433744d8bacc4d34d20536975c0bcc93b1ff3a0
|
64d36a9a556ec1a262b7685edaceee10bf4fbd61
|
refs/heads/master
| 2022-12-04T21:52:40.355469
| 2020-08-20T20:20:06
| 2020-08-20T20:20:06
| 286,263,422
| 0
| 0
| null | 2020-08-20T20:20:07
| 2020-08-09T15:25:49
|
Python
|
UTF-8
|
Python
| false
| false
| 359
|
py
|
class Solution:
def minOperations(self, n: int) -> int:
mid = n
res = 0
if mid % 2 == 1:
mid = mid - 2
else:
mid = mid - 1
while mid > 0:
res = res + n - mid
mid = mid - 2
return res
s = Solution()
print(s.minOperations(3))
print(s.minOperations(6))
|
[
"anusha@Anushas-MacBook-Air.local"
] |
anusha@Anushas-MacBook-Air.local
|
b8df56163b91887f23a964adba1ab86e80928ced
|
d68bde9e2c83fd9d57aa6d626f0c68efe74c29d6
|
/scripts/create_default_config.py
|
1327703a4b5192a95159bdd2071f0d3d4e049223
|
[] |
no_license
|
wizzdev-pl/esp32_examples
|
354b2af5483b76f7acc6c0fc381c42492ea1ba59
|
edfcc1496f50ed92a9c056f702a0468f16f4079a
|
refs/heads/master
| 2022-11-28T21:22:03.340696
| 2020-08-14T07:21:54
| 2020-08-14T07:21:54
| 268,744,637
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 896
|
py
|
import os
import json
PATH_TO_JSON = './src/config.json'
if os.path.exists(PATH_TO_JSON):
print(f"Config exists at: ", os.path.abspath(PATH_TO_JSON))
exit(1)
else:
config_dict = {
"ssid": "ssid",
"password": "password",
"local_endpoint": "",
"aws_endpoint": "",
'client_id': "default_id",
'topic': "default_topic",
'use_aws': False,
'data_aquisition_period_ms': 2000,
'data_publishing_period_s': 20,
'use_dht': False,
'dht_pin': 4,
'wifi_connection_timeout': 5000, # ms
'mqtt_port': 1883,
'mqtt_port_ssl': 8883,
'mqtt_timeout': 4000, # ms
"AP_config_done": False
}
with open(PATH_TO_JSON, 'w+') as f:
f.write(json.dumps(config_dict, indent=4))
print(f"Default config written to {os.path.abspath(PATH_TO_JSON)}")
exit(0)
|
[
"dmian.kurek@wizzdev.pl"
] |
dmian.kurek@wizzdev.pl
|
6e32cc1faea70c80e3b1a5752c3892127c2528d3
|
ab394ee9ac2e025d520cbe665cb0ec9d01fd3deb
|
/contacts/views.py
|
21fe8e7bbb5edab2df4444c4e8fbe38947ff2e73
|
[] |
no_license
|
toku31/btre_project
|
2f86d121f497bd9b8b131408b7bcea4e133c269d
|
2c97c655fa39f104d67961ac5ac98c967500376d
|
refs/heads/main
| 2023-02-18T10:36:38.078431
| 2021-01-09T10:25:56
| 2021-01-09T10:25:56
| 287,742,896
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,615
|
py
|
from django.shortcuts import render, redirect
from django.contrib import messages
from django.core.mail import send_mail
from .models import Contact
def contact(request):
if request.method == 'POST':
print('HELLO')
# return
listing_id = request.POST['listing_id']
listing = request.POST['listing']
name = request.POST['name']
email = request.POST['email']
phone = request.POST['phone']
message = request.POST['message']
user_id = request.POST['user_id']
realtor_email = request.POST['realtor_email']
# Check if user has made inquery already
if request.user.is_authenticated:
user_id =request.user.id
has_contracted = Contact.objects.all().filter(listing_id=listing_id, user_id=user_id)
if has_contracted:
messages.error(request, 'You have already made an inquery for this listing')
return redirect('/listings/'+listing_id)
contact = Contact(listing=listing ,listing_id=listing_id, name=name ,email=email, phone=phone, message=message, user_id=user_id)
contact.save()
# Send email
send_mail(
'Property Listing Inquery',
'There has been an inquery for ' + listing + '. Sign into the admin panel for more info', 'maple9877@gmail.com',
[realtor_email, 'palmtree8880@gmail.com'],
fail_silently=False
)
messages.success(request, 'Your request has been submitted, a realtor will get back to you soon')
return redirect('/listings/'+listing_id)
|
[
"garden627@yahoo.co.jp"
] |
garden627@yahoo.co.jp
|
3539eb7f599c5a37af6771ab7df88a316ddfc448
|
e292ea10855a50c8098ede4da1fd4d0896323e8c
|
/Django/Exam_Project_Rest/my_project/bears/views.py
|
c847446b54e3ec628139dc8062515c0ba2c21a8b
|
[] |
no_license
|
dbzahariev/Python-and-Django
|
f794212a21158d524bd1a7d9d5411cd58ba65f3e
|
ba15758db3ee0726a7e5c80c96c2b106206ae66a
|
refs/heads/master
| 2020-05-18T18:44:35.030202
| 2019-05-25T13:24:35
| 2019-05-25T13:24:35
| 184,572,887
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,500
|
py
|
from rest_framework import exceptions
from rest_framework import generics
from rest_framework.permissions import IsAuthenticated
from .models import Bear
from .serializers import BearSerializer, BearCreateSerializer
# Create your views here.
# class BearList(generic.ListView):
# model = Bear
# template_name = 'bear_list.html'
# context_object_name = 'bears'
class MethodSerializerView(object):
'''
Utility class for get different serializer class by method.
For example:
method_serializer_classes = {
('GET', ): MyModelListViewSerializer,
('PUT', 'PATCH'): MyModelCreateUpdateSerializer
}
'''
method_serializer_classes = None
def get_serializer_class(self):
assert self.method_serializer_classes is not None, (
'Expected view %s should contain method_serializer_classes '
'to get right serializer class.' %
(self.__class__.__name__,)
)
for methods, serializer_cls in self.method_serializer_classes.items():
if self.request.method in methods:
return serializer_cls
raise exceptions.MethodNotAllowed(self.request.method)
class Bears(MethodSerializerView, generics.ListCreateAPIView):
queryset = Bear.objects.all()
serializer_class = BearSerializer
method_serializer_classes = {
('GET'): BearSerializer,
('POST'): BearCreateSerializer,
}
permission_classes = [IsAuthenticated]
class BearDetail(MethodSerializerView, generics.RetrieveUpdateDestroyAPIView):
queryset = Bear.objects.all()
# serializer_class = BearSerializer
method_serializer_classes = {
('GET'): BearSerializer,
('PUT', 'PATCH'): BearCreateSerializer,
}
permission_classes = [IsAuthenticated]
# class BearDetails(LoginRequiredMixin, generic.DetailView):
# login_url = '/accounts/login/'
# model = Bear
# template_name = 'bear_detail.html'
# context_object_name = 'bear'
# class UserBears(LoginRequiredMixin, generic.ListView):
# login_url = '/accounts/login/'
# model = Bear
# template_name = 'bear_list.html'
# context_object_name = 'bears'
#
# def get_queryset(self):
# user_id = int(self.request.user.id)
# try:
# author = ProfileUser.objects.all().filter(user__pk=user_id)[0]
# bears = Bear.objects.all().filter(author=author.pk)
# return bears
# except:
# return []
|
[
"d.zahariev@devrealm.bg"
] |
d.zahariev@devrealm.bg
|
42916cafa5ff0a0d45c22ea8101061172aa42f27
|
832fb0f946bcfe123f3ab749248f23fe566e5e60
|
/sensor_http.py
|
639995cca3aef49b1df202b37882d5e18d0ad1fb
|
[] |
no_license
|
thewishy/clock
|
1d756e4ee983a0520241383ec5a2172836ca1235
|
5ecfa268bffd8bf8eecc0893545956ab4c1b3a26
|
refs/heads/master
| 2022-10-06T17:09:51.036552
| 2022-09-07T21:06:04
| 2022-09-07T21:06:04
| 120,080,639
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,459
|
py
|
"""
Very simple HTTP server in python.
"""
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
import SocketServer
from multiprocessing import Queue
import time
class QueuingHTTPServer(HTTPServer):
def __init__(self, server_address, RequestHandlerClass, sonos_queue, light_queue, lux_queue, bind_and_activate=True):
HTTPServer.__init__(self, server_address, RequestHandlerClass, bind_and_activate)
self.sonos_queue = sonos_queue
self.light_queue = light_queue
self.lux_queue = lux_queue
class S(BaseHTTPRequestHandler):
def _set_headers(self):
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
def do_GET(self):
#print self.path
if (self.path == "/light_toggle"):
print "Light Requested"
self.server.light_queue.put("Toggle");
if (self.path.startswith('/brightness/')):
self.server.lux_queue.put(int(self.path[12:]))
self._set_headers()
self.wfile.write("Request Processed\n")
def log_message(self, format, *args):
return
def run(sonos_queue,light_queue,lux_queue,server_class=QueuingHTTPServer, handler_class=S, port=8000):
server_address = ('', port)
httpd = server_class(server_address, handler_class, sonos_queue,light_queue,lux_queue)
print 'Starting httpd...'
httpd.serve_forever()
|
[
"steve_scott11@hotmail.com"
] |
steve_scott11@hotmail.com
|
a6bcf3961f6f428397c402c8eccff8c101d1631a
|
5757fc68b39418b1e0831143d87e6fa88a31adc9
|
/pyobs_weather/celery.py
|
4f88a1eb1b85cbf66855976f3f3756a9660049a2
|
[] |
no_license
|
pyobs/pyobs-weather
|
764c1c9fffdefbddf7e40dc9cb18bcf2821a7ed9
|
f375dc77878ab7c6ee306401c6501237d1521610
|
refs/heads/master
| 2023-08-17T09:12:44.930231
| 2023-01-22T15:46:50
| 2023-01-22T15:46:50
| 239,447,823
| 0
| 1
| null | 2023-06-27T06:33:22
| 2020-02-10T06:59:34
|
Python
|
UTF-8
|
Python
| false
| false
| 312
|
py
|
import os
from celery import Celery
from celery.schedules import crontab
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'pyobs_weather.settings')
app = Celery('pyobs_weather')
app.config_from_object('django.conf:settings')
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
|
[
"thusser@uni-goettingen.de"
] |
thusser@uni-goettingen.de
|
0b71af583b8ea2bdb4028918a5495ccd1d50167b
|
16a65967d9da41aa414bd6a99bbbf72eabd5b3e2
|
/memory.py
|
3c325dcccd148cfef37fc27dac7e2588ec167453
|
[] |
no_license
|
deltanovember/interpy
|
682c9937bec9abc697fea532ed21c33c435e86e7
|
488311855e355e78392d9b96dda55f34f57eadbc
|
refs/heads/master
| 2021-01-10T20:17:05.728145
| 2013-05-21T21:22:46
| 2013-05-21T21:22:46
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,731
|
py
|
# implementation of card game - Memory
import simplegui
import random
import time
WIDTH = 817
# create frame and add a button and labels
frame = simplegui.create_frame("Memory", WIDTH, 100)
label = frame.add_label("Moves = 0")
# helper function to initialize globals
def init():
global deck, num_moves, state, exposed, WIDTH, num_moves, last_card, pause_cards, pause_time, num_moves, state
deck = range(0,9) + range(0,9)
exposed = []
for card in deck:
exposed.append(False)
random.shuffle(deck)
num_moves = 0
last_card = -1
pause_cards = []
pause_time = 0
num_moves = 0
state = 0
label.set_text("Moves = " + str(num_moves))
frame.add_button("Restart", init)
init()
# define event handlers
def mouseclick(pos):
global num_moves, state, last_card, pause_cards, pause_time
print 'state' + str(state)
if 0 == state:
state = 1
# add game state logic here
card = (int) (pos[0]/(WIDTH/16))
print card
if not exposed[card]:
exposed[card] = True
if 0 == state:
state = 1
if 1 == state:
state = 2
num_moves+=1
label.set_text("Moves = " + str(num_moves))
elif 2 == state:
state = 1
if deck[last_card] == deck[card]:
print 'match'
elif last_card >= 0:
exposed[last_card] = False
exposed[card] = False
pause_cards = [card, last_card]
pause_time = time.time()
last_card = card
pass
# cards are logically 50x100 pixels in size
def draw(canvas):
global pause_cards
spacing = 51.4
offset = 10
x = offset
counter = 0
for num in deck:
x1 = counter * spacing
card_width = spacing-7
if exposed[counter]:
canvas.draw_text(str(num), (x, 65), 60, "White")
else:
if counter in pause_cards and time.time() - pause_time < 0.5:
canvas.draw_text(str(num), (x, 65), 60, "White")
else:
if counter in pause_cards:
pause_cards.remove(counter)
canvas.draw_polygon([(x1, 0), (x1 + card_width, 0), (x1 + card_width, 194), (x1, 194)], 6, "Brown", "Green")
counter+=1
x += spacing
pass
# initialize global variables
init()
# register event handlers
frame.set_mouseclick_handler(mouseclick)
frame.set_draw_handler(draw)
# get things rolling
frame.start()
# Always remember to review the grading rubric
|
[
"don@unswrc.com"
] |
don@unswrc.com
|
3a9780d2beb7da0235154761627c60066467499c
|
10fce7584328e0354a096f21f3a7c8b0bf09e479
|
/products/models.py
|
5ef2bcc497e9801969e46283af42a443b4ca9857
|
[] |
no_license
|
mamazinho/olist-ts-mini-django-tests
|
f23632d9f5ef2846a025bee3a33cc41d96c47a9e
|
062b439ecb519070d6594f1df7e817265f36b1d4
|
refs/heads/main
| 2023-06-08T08:57:04.995475
| 2021-06-21T15:10:11
| 2021-06-21T15:10:11
| 378,972,462
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 228
|
py
|
from django.db import models
class Product(models.Model):
name = models.CharField(max_length=100)
description = models.CharField(max_length=200)
price = models.FloatField()
class Meta:
managed = True
|
[
"matheus.tessaroli@olist.com"
] |
matheus.tessaroli@olist.com
|
5a4f0ad27d6061aac3a8de0fda690a203fd4a5f9
|
077c91b9d5cb1a6a724da47067483c622ce64be6
|
/fuzz_pyretic_mesh_proactive_firewall_no_close_check_loop_mcs_with_max_replays_5/interreplay_24_l_2/openflow_replay_config.py
|
9924a09d953b9cd6949a7fbc3362765d4b5fa087
|
[] |
no_license
|
Spencerx/experiments
|
0edd16398725f6fd9365ddbb1b773942e4878369
|
aaa98b0f67b0d0c0c826b8a1565916bf97ae3179
|
refs/heads/master
| 2020-04-03T10:11:40.671606
| 2014-06-11T23:55:11
| 2014-06-11T23:55:11
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,022
|
py
|
from config.experiment_config_lib import ControllerConfig
from sts.topology import *
from sts.control_flow import OpenFlowReplayer
from sts.simulation_state import SimulationConfig
from sts.input_traces.input_logger import InputLogger
simulation_config = SimulationConfig(controller_configs=[ControllerConfig(start_cmd='./pyretic.py -m p0 pyretic.examples.firewall_for_sts_no_close', label='c1', address='127.0.0.1', cwd='../pyretic', kill_cmd='ps aux | grep -e pox -e pyretic | grep -v simulator | cut -c 9-15 | xargs kill -9')],
topology_class=MeshTopology,
topology_params="num_switches=3",
patch_panel_class=BufferedPatchPanel,
multiplex_sockets=False,
kill_controllers_on_exit=True)
control_flow = OpenFlowReplayer(simulation_config, "experiments/fuzz_pyretic_mesh_proactive_firewall_no_close_check_loop_mcs/interreplay_24_l_2/events.trace")
# wait_on_deterministic_values=False
# delay_flow_mods=False
# Invariant check: 'None'
|
[
"cs@cs.berkeley.edu"
] |
cs@cs.berkeley.edu
|
5fa7064e2345bdb14079c8c4d3481ce2fa9583c0
|
a6889d0905012e61edaa105ff406d64c20a7a694
|
/trees/trees.py
|
d7cbacfba48f2edf56a89b201b23644120493cf2
|
[] |
no_license
|
aayc/kattis
|
70e1fd3e0f4dd95a23d7234efd35d0f0b06b17f1
|
7b5ab6792773a6fa4096e721db07de4200d5e52d
|
refs/heads/master
| 2020-08-02T03:40:12.016962
| 2019-12-26T14:01:43
| 2019-12-26T14:01:43
| 211,223,170
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 403
|
py
|
from collections import Counter
trees = {}
n = 0
while True:
try:
m = input()
if m in trees:
trees[m] += 1
else:
trees[m] = 1
n += 1
except:
break
results = []
for k, v in trees.items():
results.append((k, v / n * 100))
results = sorted(results, key=lambda x: x[0])
for r in results:
print(r[0], "{0:.6f}".format(r[1]))
|
[
"aaron.y.chan64@gmail.com"
] |
aaron.y.chan64@gmail.com
|
e0a6d9ae8ba4515ce6283ab246553d4460d23f65
|
7f30dbec583671631926529dba973d35980c4ab2
|
/randomEncounter.py
|
1e4450a39ae05b871686086db5bb3d3c642fb052
|
[] |
no_license
|
abottchen/dnd-skt
|
17d6861d8841f5ab2147638735a3e099bea42a9a
|
3428ead7b3153fb2ae42f035392d2a3cba04a5b5
|
refs/heads/master
| 2020-04-23T14:19:11.154116
| 2019-06-29T22:01:28
| 2019-06-29T22:01:28
| 171,227,560
| 0
| 0
| null | 2019-04-11T18:03:03
| 2019-02-18T06:30:21
|
Python
|
UTF-8
|
Python
| false
| false
| 1,883
|
py
|
#!/usr/local/bin/python3
import sys
import getopt
import json
import re
import random
from colorcodes import clr
from enctable import enctabledict
def rollDice(var):
n = int(var.group(1))
d = int(var.group(2))
neg = False
if var.group(3):
if var.group(3)[0:1] == "-":
neg = True
a = int(var.group(3))
else:
a = int(var.group(3)[1:])
else:
a = 0
# print("Rolling " + str(n) + "d" + str(d) + (" + ","")[neg] + str(a))
s = 0
for x in range(0, n):
s = s + random.randint(1,d)
s = s + a
return clr.YELLOW + clr.BOLD + str(s) + clr.ENDC + clr.GREEN
def printEnc(name):
f = open('randomEncounters.json')
encounters = json.load(f)
print(clr.BOLD + clr.LIGHTGRAY + clr.BG_BLUE + name + clr.ENDC)
for x in encounters["encounters"]:
if x["name"] == name:
text = x["text"]
text = text.replace("\\n", "\n\n")
text = re.sub('(\d+)d(\d+)([\+|-]\d+)?', rollDice, text)
print(clr.GREEN + text + clr.ENDC)
print(clr.YELLOW + x["treasure"] + clr.ENDC)
for y in x["creatures"]:
print(clr.RED + y + clr.ENDC)
def main(argv):
terrain = ""
try:
opts, args = getopt.getopt(argv,"ht:",["terrain="])
except getopt.GetoptError:
print('randomEncounter.py -t <terrain>')
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print(f'''
randomEncounter.py -t <terrain>'
Valid terrains:
Forest
Grassland
Hills
Mountains
Road
Sea
Tundra
''')
sys.exit()
elif opt in ("-t", "--terrain"):
terrain = arg
terrain = terrain.lower()
print('Terrain: ' + terrain)
tableroll = random.randint(1,100)
print("Roll:", tableroll)
for rng, enc in enctabledict.enctable[terrain].items():
low,high = rng.split("-")
if int(low) <= tableroll <= int(high):
printEnc(enc)
if __name__ == "__main__":
main(sys.argv[1:])
|
[
"adam.bottchen@puppet.com"
] |
adam.bottchen@puppet.com
|
100032dcdabbdafdf847e8133584eed2c21c0b5d
|
f13acd0d707ea9ab0d2f2f010717b35adcee142f
|
/AtCoder_Virtual_Contest/macle_20230407/b/main.py
|
9e60f21474f8e6a8f81ec7e665156f2e7b528406
|
[
"CC0-1.0",
"LicenseRef-scancode-public-domain"
] |
permissive
|
KATO-Hiro/AtCoder
|
126b9fe89fa3a7cffcbd1c29d42394e7d02fa7c7
|
bf43320bc1af606bfbd23c610b3432cddd1806b9
|
refs/heads/master
| 2023-08-18T20:06:42.876863
| 2023-08-17T23:45:21
| 2023-08-17T23:45:21
| 121,067,516
| 4
| 0
|
CC0-1.0
| 2023-09-14T21:59:38
| 2018-02-11T00:32:45
|
Python
|
UTF-8
|
Python
| false
| false
| 520
|
py
|
# -*- coding: utf-8 -*-
def main():
import sys
input = sys.stdin.readline
n, p, q, r = map(int, input().split())
a = list(map(int, input().split()))
total = 0
s = set([0])
# 累積和を取る + setを使って事実上の二分探索
for ai in a:
total += ai
if (total - r) in s and (total - r - q) in s and (total - r - q - p) in s:
print("Yes")
exit()
s.add(total)
print("No")
if __name__ == "__main__":
main()
|
[
"k.hiro1818@gmail.com"
] |
k.hiro1818@gmail.com
|
24ffcda20b6d4bd7f3a45f796d8b1cff7fda2807
|
329dbaecc92242ef1ff9f28b68b42ef3c1e35c14
|
/SQLaORM/migrations/0001_initial.py
|
23531576508de11f967a2150fae6a29ac07530ee
|
[] |
no_license
|
jrojasm1966/TareasDjango
|
4be9819c280f29f51ed2519022ed104805e04ec0
|
236a7bdb247b9e973ea22399cae4d97b3f63acfc
|
refs/heads/master
| 2023-08-21T22:34:37.279012
| 2021-10-12T03:43:33
| 2021-10-12T03:43:33
| 403,107,322
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 656
|
py
|
# Generated by Django 3.2.7 on 2021-10-08 22:59
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Wizard',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=45)),
('house', models.CharField(max_length=45)),
('pet', models.CharField(max_length=45)),
('qyear', models.IntegerField()),
],
),
]
|
[
"jarm1966@gmail.com"
] |
jarm1966@gmail.com
|
73a9ba5c2dcbeff3f29c12bb1de301c14a259a06
|
db97d32264bcd1754ba5f46cb583d462a7a2e384
|
/panel.py
|
4e343bab14e4c794568474eda804f9c6605454df
|
[] |
no_license
|
debainhit/PythonDataAnalysis
|
a27dfc61a4ccb294a5cc86d195dbc2f5e040ba89
|
44285d8a30c629ca2dfd10668523431917446cf9
|
refs/heads/master
| 2021-09-06T14:35:57.507181
| 2018-02-07T16:13:41
| 2018-02-07T16:13:41
| 88,264,439
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,269
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Date : 2018-02-05 15:47:32
# @Author : debainhit (18068920049@163.com)
# @Link : https://github.com/debainhit
# @Version : $Id$
# ---------------------------------------------------------------
# pandas.Panel(data, items, major_axis, minor_axis, dtype, copy)
# paras:
# data: Data takes various form like ndarray, series, map,
# list, dict, constants and another DataFrame
# items: axis = 0
# major_axis: axis = 1
# minor_axis: axis = 2
# dtype = Data type of each column
# copy = Copy data, Default, false
# ---------------------------------------------------------------
import pandas as pd
import numpy as np
print "Create Panel"
# from ndarrays, dict of DataFrames
p = pd.Panel()
print p
data = np.random.rand(2, 4 ,5)
print data
p = pd.Panel(data)
print p
data = { 'Item1' : pd.DataFrame(np.random.randn(4, 3)),
'Item2' : pd.DataFrame(np.random.randn(4, 2))}
p = pd.Panel(data)
print p
print "Selecting the Data from Panel"
#items, Major_axis, Minor_axis
data = { 'Item1' : pd.DataFrame(np.random.randn(4, 3)),
'Item2' : pd.DataFrame(np.random.randn(4, 2))}
p = pd.Panel(data)
print p['Item1']
print p.major_xs(1)
print p.minor_xs(1)
|
[
"chengzhenghitwh@gmail.com"
] |
chengzhenghitwh@gmail.com
|
59f8f30713b022bcc53db159c2e0194300e12b1e
|
e575373d8ef5470966938946ab6bf0efb9c154c0
|
/setup.py
|
af1146b9eafc3eee14aba9c0e853a6429d08bb63
|
[
"MIT"
] |
permissive
|
gmacgilchrist/so_idealized
|
57cc7885c8f9916e291d79eaa5281552b3ea39a9
|
d8681d1405e52c51add927a06302b7c68b84cbd7
|
refs/heads/master
| 2022-12-27T01:48:06.208667
| 2020-09-21T21:01:34
| 2020-09-21T21:01:34
| 297,397,628
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 272
|
py
|
from setuptools import find_packages, setup
setup(
name='src',
packages=find_packages(),
version='0.1.0',
description='Set-up and analysis of an idealized Southern Ocean channel model configuration',
author='Graeme MacGilchrist',
license='MIT',
)
|
[
"graemem@princeton.edu"
] |
graemem@princeton.edu
|
197ba5a6aab4dcc6a7d1ac30581f1c017feccd53
|
e605c792cb250269584e2cd4ba47a8e4101e2fa6
|
/demoapp/migrations/0003_books.py
|
97c4151b53e593e8f89e29b8321fd1b73c6fa7e9
|
[] |
no_license
|
krishkheloji/djangoproject
|
671a8f98340ad98e8ed399c5b2ddd3c2654bc4e3
|
ada4d942f9275360f636bd50353243dd6efb5f1c
|
refs/heads/master
| 2023-07-03T04:04:08.682941
| 2021-08-12T08:34:08
| 2021-08-12T08:34:08
| 395,248,550
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 636
|
py
|
# Generated by Django 3.2.5 on 2021-08-08 14:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('demoapp', '0002_student'),
]
operations = [
migrations.CreateModel(
name='Books',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('author', models.CharField(max_length=100)),
('pdf', models.FileField(upload_to='books/pdfs/')),
],
),
]
|
[
"khelojikrish@gmail.com"
] |
khelojikrish@gmail.com
|
35644d7f163503d3b612a6e1d0bb4eece09b47f3
|
81e99594f65a27e01f8cb57b719330f391583bc1
|
/inference_test/downloads.py
|
1b947154a775044d6fad8f24651dc10ce27f2e5e
|
[] |
no_license
|
kberci/Deep-Learning-based-Object-Detection
|
9376a57eb03f2ee525985e4dc5a8bc24b4f931e8
|
dfb3263bfe3ce42553a5a6e486061fef4b0727ff
|
refs/heads/master
| 2023-03-22T06:55:10.747070
| 2021-03-18T11:00:18
| 2021-03-18T11:00:18
| 348,910,179
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,080
|
py
|
from google_drive_downloader import GoogleDriveDownloader as gdd
from pathlib import Path
import pandas as pd
import requests
import os
def download_images(csv_path, save_path):
d = pd.read_csv(csv_path, squeeze=True)
dict_list = [{d.columns[i]: item[i] for i in range(len(d.columns))} for item in d.values]
for img in dict_list:
img_path = save_path / img['name']
if not img_path.is_file():
resp = requests.get(img['url'], allow_redirects=True)
print("Saving: ", img_path)
open(img_path, 'wb').write(resp.content)
else:
print("(Image " + img_path + " already exists, skip download.)")
def download_models(csv_path, save_path):
d = pd.read_csv(csv_path, squeeze=True)
dict_list = [{d.columns[i]: item[i] for i in range(len(d.columns))} for item in d.values]
for model in dict_list:
config_path = save_path / model['name']
if not config_path.is_file():
gdd.download_file_from_google_drive(file_id=model['id'].split('/')[-2], dest_path=config_path)
else:
print("Model was already downloaded: ", config_path)
PATH_TO_CONFIGS = Path('config')
PATH_TO_IMAGES = Path('images')
PATH_TO_MODELS = Path('models')
# Create directories if they do not exist and download images
if not PATH_TO_IMAGES.is_dir():
os.mkdir(PATH_TO_IMAGES)
download_images(PATH_TO_CONFIGS / 'images.csv', PATH_TO_IMAGES)
else:
print("Image folder already exists, skip downloading images.")
# Create directories if they do not exist and download models
model_folders = ['edgetpu', 'mobilenet', 'mobiledet', 'yolo4', 'yolo5', 'dnn_files']
if not PATH_TO_MODELS.is_dir():
os.mkdir(PATH_TO_MODELS)
else:
print("Model folder already exists.")
for model_folder in model_folders[:1]:
if not (PATH_TO_MODELS / model_folder).is_dir():
os.mkdir(PATH_TO_MODELS / model_folder)
else:
print("Sub-model folder already exists.")
download_models((PATH_TO_CONFIGS / model_folder).with_suffix('.csv'), PATH_TO_MODELS / model_folder)
|
[
"k.berci95@gmail.com"
] |
k.berci95@gmail.com
|
58f02912a1e6a3f85274b0a72a026837088ef5f8
|
75e9ccb4aeaa9ee5a108adb0e787415caa4a124c
|
/src/voice_example.py
|
80086e94a23adbaf0dd5be287a0c116418b32e99
|
[] |
no_license
|
hosod/ros_server
|
532a1aa086f0789455577f65233c9f57e0917f62
|
6f7ffbdc91c66c6287c09a2eab9a82ea169d62a8
|
refs/heads/master
| 2020-09-21T14:49:03.142771
| 2020-02-10T05:08:23
| 2020-02-10T05:08:23
| 224,821,913
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,369
|
py
|
# -*- coding: utf-8 -*-
#! /usr/bin/env python
import roslib
roslib.load_manifest('ros_class5_hosoda')
import rospy
import tf
import math
from geometry_msgs.msg import PoseStamped, Quaternion
from geometry_msgs.msg import Point, PointStamped
import sys
sys.path.append('/home/robovie/catkin_ws/src')
from layer2.msg import HTEntityList, HTEntity
from tf.transformations import quaternion_from_euler
import forecast
from std_msgs.msg import String
from tos_voice_gestures_tools import gesture, pause
def __lookup_ht():
ht_msg = rospy.wait_for_message('/ht', HTEntityList)
# print(type(ht_msg))
# listener = tf.TransformListener()
range_min = 2.0
listener.waitForTransform('/map', '/base_link', rospy.Time(), rospy.Duration(4.0))
for human in ht_msg.list:
pt_human = PointStamped()
pt_human.header.frame_id = human.header.frame_id
pt_human.point.x = human.x
pt_human.point.y = human.y
try:
now = rospy.Time.now()
pt_human.header.stamp = now
listener.waitForTransform('/map', '/base_link', now, rospy.Duration(3.0))
pt_bl = listener.transformPoint('/base_link', pt_human)
except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException) as e:
print('hoge', e)
continue
range_human = math.sqrt(pt_bl.point.x**2 + pt_bl.point.y**2)
print(range_human)
if range_human < range_min:
# print('found')
return (True, human, pt_bl)
else:
# print('not found')
continue
return (False, None, None)
def lookup_ht():
rate = rospy.Rate(10)
while not rospy.is_shutdown():
try:
flag, human, human_bl = __lookup_ht()
break
except:
continue
rate.sleep()
return (flag, human, human_bl)
# goal_vel = rospy.Publisher('/move_base_simple/goal', PoseStamped, queue_size=10)
# human: HTEntity
# human_bl: PointStamped human in base_link
def get_goal(human, human_bl):
x = human_bl.point.x
y = human_bl.point.y
angle = math.atan2(y, x)
goal = PoseStamped()
goal.header.frame_id = human_bl.header.frame_id
goal.header.stamp = rospy.Time.now()
q = quaternion_from_euler(0,0,angle)
goal.pose.orientation = Quaternion(q[0], q[1], q[2], q[3])
rospy.loginfo(goal)
goal_vel.publish(goal)
if __name__ == "__main__":
global goal_vel
goal_vel = rospy.Publisher('/move_base_simple/goal', PoseStamped, queue_size=10)
global pub_voice_gestures
pub_voice_gestures = rospy.Publisher("/tos_voice_gestures", String, latch=True, queue_size=10)
rospy.init_node('lookup_ht', anonymous=True)
global listener
listener = tf.TransformListener()
# kyoto = forecast.get_forecasts()
today = '今日の天気は晴れ'
tomorrow = '明日の天気は晴時々曇'
rate = rospy.Rate(5)
while not rospy.is_shutdown():
flag, human, human_bl = ()
if flag:
get_goal(human, human_bl)
print('こんにちは '+today + tomorrow)
pub_voice_gestures.publish(gesture("sds_wave", 'こんにちは!' + pause(1) + today + 'で'+ pause(1) + tomorrow + 'です'))
rospy.sleep(6)
break
# print(flag)
rate.sleep()
|
[
"hosoda.0406.1997@gmail.com"
] |
hosoda.0406.1997@gmail.com
|
d47abaa5e716b6c22d025f228fc71b2bbb7057d9
|
8ab61e98b8b4efa7378ad50ee12ea5ec81b8c310
|
/thredo/mixin.py
|
a46f1c9843c402b33c51e360d5947e7fae4763ba
|
[
"MIT"
] |
permissive
|
RalphWalters/thredo
|
5a791d0848067e2b028c3040874476d582509543
|
ea109c693036764dd192527f9b6bba18d3b18042
|
refs/heads/master
| 2020-04-05T05:18:56.942365
| 2018-11-08T23:25:54
| 2018-11-08T23:25:54
| 156,589,958
| 1
| 0
|
MIT
| 2018-11-07T18:21:27
| 2018-11-07T18:21:27
| null |
UTF-8
|
Python
| false
| false
| 1,447
|
py
|
# mixin.py
__all__ = ['ThredoMixIn']
from . import io
from . import core
class ThredoMixIn:
'''
Mixin class that can be used to make standard socketserver objects to
use thredo
'''
_threads = None
def server_activate(self):
super().server_activate()
self.socket = io.Socket(self.socket)
def serve_forever(self):
while True:
self.handle_request()
def handle_request(self):
try:
self._handle_request_noblock()
except core.ThreadCancelled:
threads = self._threads
self._threads = None
if threads:
for thread in threads:
thread.cancel()
raise
def process_request_thread(self, request, client_address):
try:
self.finish_request(request, client_address)
except Exception:
self.handle_error(request, client_address)
finally:
self.shutdown_request(request)
def process_request(self, request, client_address):
t = core.spawn(self.process_request_thread, request, client_address)
if self._threads is None:
self._threads = []
self._threads.append(t)
def server_close(self):
super().server_close()
threads = self._threads
self._threads = None
if threads:
for thread in threads:
thread.join()
|
[
"dave@dabeaz.com"
] |
dave@dabeaz.com
|
2f7b19ed574c263b4b687fdd6ae77d6d253a9c37
|
da2f7050ba0f04bc1f1ff91fa5537139ddc07508
|
/tests/test_c_hardened_scales.py
|
70d79817ba895ed8d9b681b402681336691fece9
|
[] |
no_license
|
Draw2Cards/hardened_scales
|
04855f247ed23a6bf8056274dfb1a0ae4bad5c15
|
75097e45c502f1fe7980cf466b21059d68bdb206
|
refs/heads/main
| 2023-06-15T18:45:36.229750
| 2021-07-11T19:46:55
| 2021-07-11T19:46:55
| 385,033,384
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 860
|
py
|
import unittest
from cards.arcbound_worker import CArcboundWorker
from cards.hardened_scales import CHardenedScales
from game import Game
from player import Player
class TestHardenedScales(unittest.TestCase):
def test__add_counter(self):
game = Game()
player = Player(game)
p_aw = player.cast(CArcboundWorker(game))
player.cast(CHardenedScales(game))
game.counter_mgr.addCounters(p_aw, "+1/+1", 1)
self.assertEqual(3, p_aw.getCountersCount("+1/+1"))
def test__add_counter_after_hs_destroy(self):
game = Game()
player = Player(game)
p_aw = player.cast(CArcboundWorker(game))
p_hs = player.cast(CHardenedScales(game))
game.destroyCard(p_hs)
game.counter_mgr.addCounters(p_aw, "+1/+1", 1)
self.assertEqual(2, p_aw.getCountersCount("+1/+1"))
|
[
"drawkka@gmail.com"
] |
drawkka@gmail.com
|
f35e22e27c3c4b5d1b7d3f72b4c65f441b6973b0
|
8a517dfb19fcfc092228703ff264a133e9d7667d
|
/biblioteca/urls.py
|
8dde6bf9b79ce41ba56283c63eb141fa982420f1
|
[] |
no_license
|
rafalordi/projeto_integrador
|
e0dac9fa97202709dce58ad866913fb43eb8848c
|
c672b5f73804666cbd57b62d3b6fc5223de002d0
|
refs/heads/master
| 2023-09-03T12:22:42.606286
| 2021-11-07T02:32:59
| 2021-11-07T02:32:59
| 400,849,176
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 215
|
py
|
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('livros/', include('livros.urls')),
path('auth/', include('usuarios.urls'))
]
|
[
"rafa.lordi@hotmail.com"
] |
rafa.lordi@hotmail.com
|
6ac21dab1c7122e1d5743376f82d3444cceb3563
|
d906173f593a1d7a382a0a39beb894c510d7dd69
|
/Vina_merge.py
|
06e4903868ec5638ece9e7c159fde98f6aa56246
|
[] |
no_license
|
AndPdb/Vina_scripts
|
222c9eaa25d58ed32bc794e9e4a981138482a332
|
d5e3e1d036510d5e39b1dc0f4bc41d446682fd4c
|
refs/heads/master
| 2021-06-18T05:18:19.247423
| 2021-03-24T13:12:55
| 2021-03-24T13:12:55
| 185,991,822
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,800
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Jul 18 11:26:39 2019
@author: Andrea Pasquadibisceglie
This script merges the output PDBQT and the rigid input PDBQT derived from Flexible Docking Simulation with AutoDock Vina, producing a PDB file called "merge.pdb" in the output directory.
"""
from io import StringIO
import subprocess
import argparse
import os
from os import path
###SOME PATHS###
#mgltools directory path
MGLTOOLSDIR = "/home/andrea/Programs/mgltools_x86_64Linux2_1.5.7rc1/"
#path for utilities and pythonsh of mgltools. In linux OS these should be the same
utilities24 = "{0}MGLToolsPckgs/AutoDockTools/Utilities24/".format(MGLTOOLSDIR)
pythonsh = "{0}bin/pythonsh".format(MGLTOOLSDIR)
#COMMAND LINE PARSING#
parser = argparse.ArgumentParser(prog='Vina_merge', usage='%(prog)s [options]')
Input = parser.add_argument_group('Input')
Input.add_argument('-w', '--worDir', action='store', help='working directory (default: current working directory)', metavar='')
Input.add_argument('-r', '--rigid', action='store', help='rigid PDBQT', required=True, metavar='')
Input.add_argument('-p', '--poses', action='store', help='vina output PDBQT', required=True, metavar='')
Input.add_argument('-n', '--number', action='store', help='rank of the docking pose to merge (default: 1)', metavar='')
Output = parser.add_argument_group('Output')
Output.add_argument('-o', '--outDir', action='store', help='output directory (default: current working directory)', metavar='')
args = parser.parse_args()
#REQUIRED VALUES#
rigid=args.rigid
poses=args.poses
nm_poses=path.splitext(path.basename(poses))[0]
#DEFAULT VALUES#
if args.worDir is None:
worDir=os.getcwd()
else:
worDir=args.worDir
if args.number is None:
number="1"
else:
number=args.number
if args.outDir is None:
outDir=os.getcwd()
else:
outDir=args.outDir
#SOME FUNCTIONS#
def pdbqt2pdb(pdbqt):
subprocess.run([pythonsh, utilities24+"pdbqt_to_pdb.py", "-f {0}".format(pdbqt), "-o {0}".format(pdbqt)[:-2]])
#########################################
###(1)EXTRACT THE MODEL YOU WANT MERGE###
with open (poses, "r") as fi:
lines = fi.readlines()
for i, l in enumerate(lines):
if l.split()[0] == "MODEL" and l.split()[1] == number:
model_n = l.split()[0] + l.split()[1]
j=i #remember starting position
#extract pose n
with open (path.join(worDir, ("{0}_{1}.pdbqt".format(nm_poses,model_n))), "w") as model:
for k in range(j, len(lines)):
if lines[k].startswith("ENDMDL") == False:
model.write(lines[k])
else:
model.write(lines[k])
break
###(2)CONVERT PDBQT TO PDB###
pdbqt2pdb(model.name) #io.TextIOWrapper
pdbqt2pdb(rigid)
#save path
modelpdb = (model.name)[:-2]
rigidpdb = (rigid)[:-2]
###(3)SPLIT LIGAND AND FLEX RESIDUES###
with open (modelpdb, "r") as fi:
flex = StringIO()
lig = StringIO()
for lines in fi.readlines():
if lines.startswith("ATOM"):
flex.write(lines)
elif lines.startswith("HETATM"):
lig.write(lines)
###(4)MAKE LIST OF FLEX RESIDUES###
reslist=[]
contflex = flex.getvalue()
listaflex = contflex.split("\n")
for line in listaflex:
if line != "":
reslist.append(line[17:26])
###(5)MERGE###
merge=StringIO()
with open (rigidpdb, "r") as fi:
for liner in fi.readlines():
merge.write(liner) #line of the rigid
if liner[17:26] in reslist:
for linef in listaflex:
if linef[17:26] == liner[17:26]:
merge.write(linef+"\n") #all lines of the flex residues matched
reslist.remove(liner[17:26]) #remove the flex from the list
#if liner.startswith("TER"):
# continue
merge.write(lig.getvalue()) #merge the ligand at the end
merge.write("END")
###(6)RENUMBERING###
atnum=1
remerge=StringIO()
for line in (merge.getvalue()).split("\n"):
if line.startswith("ATOM") and int(line.split()[1])==(atnum+1):
atnum = atnum+1
remerge.write(line+"\n")
#check if the atom number is correct
elif line.startswith("ATOM") and int(line.split()[1])!=(atnum+1):
#replace the substring using 5 position aligned on left
newline = line[:6] + "{0:>5}".format(str(atnum+1)) + line[11:]
# newline = line.replace("{0:>5}".format(line.split()[1]),
# "{0:>5}".format(str(atnum+1)))
remerge.write(newline+"\n")
atnum = atnum+1
else:
remerge.write(line+"\n")
###(7)WRITE MERGED FILE###
with open (path.join(outDir, ("{0}_merge.pdbqt".format(nm_poses))), "w") as fo:
cont = remerge.getvalue()
fo.write(cont)
|
[
"and.pdb92@gmail.com"
] |
and.pdb92@gmail.com
|
ec0331a25eff92fae3203dd2a3ec3aef112fe052
|
de5cddc7e5df6afe35a71e37314de9ba64df29bf
|
/measurement/sensor_reader.py
|
9baf6211a002a5122b549cb0d428053aa30c879a
|
[] |
no_license
|
mandreascz/orangepi-thermometer
|
2a4ce53a012387ecc8e14f5a9b22b658ee753e34
|
e4c9f83d9c71a41940cf5a9c56c2334fcc8300be
|
refs/heads/master
| 2020-04-11T09:49:23.100213
| 2018-12-13T20:44:42
| 2018-12-13T20:44:42
| 161,693,301
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,593
|
py
|
from pyA20.gpio import gpio
import dht
import riprova
import sqlite3
import time
import multiprocessing
from collections import namedtuple
import random
import math
# initialize GPIO
# DHT22_PIN = port.PA13
class Measurer:
def __init__(self, pin):
self.sensor = self._init_sensor(pin)
self.db_conn = sqlite3.connect('measurement_db.db', isolation_level=None)
@riprova.retry(backoff=riprova.ConstantBackoff(interval=2, retries=5))
def _read(self):
result = self.sensor.read()
if not result.is_valid():
raise ValueError('Reading not valid !')
return result
def _save_to_db(self, temperature, humidity):
cur = self.db_conn.cursor()
cur.execute('INSERT INTO measurements(temperature, humidity) VALUES (?, ?);', (temperature, humidity))
cur.close()
def _init_sensor(self, pin):
gpio.init()
return dht.DHT22(pin=pin)
def do(self):
while True:
try:
result = self._read()
self._save_to_db(result.temperature, result.humidity)
except Exception as e:
print(e)
finally:
time.sleep(60)
def spawn_process(self):
process = multiprocessing.Process(target=self.do)
process.start()
class Mocker(Measurer):
result = namedtuple('result', ['temperature', 'humidity'])
def _init_sensor(self, pin):
return None
def _read(self):
return self.result(temperature=math.fabs(random.random()*40), humidity=random.randint(10, 100))
|
[
"andrej.mudroch@threatmark.com"
] |
andrej.mudroch@threatmark.com
|
4cb4ef1f26cb8416278e80e05d5406194163c3b8
|
f8f2536fa873afa43dafe0217faa9134e57c8a1e
|
/aliyun-python-sdk-sae/aliyunsdksae/request/v20190506/CreateNamespaceRequest.py
|
eb3f5a3f1d1c14f16ca02e9d70f2d9aebac1a158
|
[
"Apache-2.0"
] |
permissive
|
Sunnywillow/aliyun-openapi-python-sdk
|
40b1b17ca39467e9f8405cb2ca08a85b9befd533
|
6855864a1d46f818d73f5870da0efec2b820baf5
|
refs/heads/master
| 2022-12-04T02:22:27.550198
| 2020-08-20T04:11:34
| 2020-08-20T04:11:34
| 288,944,896
| 1
| 0
|
NOASSERTION
| 2020-08-20T08:04:01
| 2020-08-20T08:04:01
| null |
UTF-8
|
Python
| false
| false
| 1,912
|
py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RoaRequest
from aliyunsdksae.endpoint import endpoint_data
class CreateNamespaceRequest(RoaRequest):
def __init__(self):
RoaRequest.__init__(self, 'sae', '2019-05-06', 'CreateNamespace')
self.set_uri_pattern('/pop/v1/paas/namespace')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_NamespaceName(self):
return self.get_query_params().get('NamespaceName')
def set_NamespaceName(self,NamespaceName):
self.add_query_param('NamespaceName',NamespaceName)
def get_NamespaceDescription(self):
return self.get_query_params().get('NamespaceDescription')
def set_NamespaceDescription(self,NamespaceDescription):
self.add_query_param('NamespaceDescription',NamespaceDescription)
def get_NamespaceId(self):
return self.get_query_params().get('NamespaceId')
def set_NamespaceId(self,NamespaceId):
self.add_query_param('NamespaceId',NamespaceId)
|
[
"sdk-team@alibabacloud.com"
] |
sdk-team@alibabacloud.com
|
867bc24c88ff7a451fb8c0e86c35f2cdd36b6a63
|
80e7eb64f6524c1088167f3960c83c5c87c46cf8
|
/codetest/codee12034.py
|
512472006b0437a5d2bf0ea4258d212bccb45fc3
|
[] |
no_license
|
kaisprites/python-lecture
|
fc95aaf37472036581ec188ed82d33fc0a26f6d8
|
ad58d540ef12fa5444fba3473bc439a67613c36e
|
refs/heads/master
| 2023-02-15T08:22:02.467693
| 2021-01-11T09:24:43
| 2021-01-11T09:24:43
| 327,744,573
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 85
|
py
|
target = input()
mult = 1
for i in range(1, int(target)+1):
mult *= i
print(mult)
|
[
"revoneschetzaine@gmail.com"
] |
revoneschetzaine@gmail.com
|
669fd9c737ef104b59785644e983740fff849ae4
|
321a8e450f8332c6023b889dd0968cf83e7daf52
|
/Crash course/Classes/dog.py
|
588df0dd5b741ec4c0af4a302c0461789eb43c29
|
[] |
no_license
|
ejluciano/Python
|
250d4d3528b9328a4827dcb969d9a3d7f51b943d
|
9c3ca698753bd17c6784fbde68a3a845c4a30664
|
refs/heads/main
| 2023-04-20T09:55:47.863494
| 2021-05-05T15:09:07
| 2021-05-05T15:09:07
| 363,134,354
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 523
|
py
|
class Dog:
def __init__(self,name,age):
self.name = name
self.age = age
def sit(self):
print(f"{self.name} is now sitting.")
def roll_over(self):
print(f"{self.name} rolled over")
my_dog = Dog('Willie', 6)
your_dog = Dog("Swarley", 8)
print(f"My dog's name is {my_dog.name}")
print(f"My dog is {my_dog.age} years old.")
my_dog.sit()
my_dog.roll_over()
print(f"My dog's name is {my_dog.name}")
print(f"My dog is {my_dog.age} years old.")
your_dog.sit()
your_dog.roll_over()
|
[
"username@example.com"
] |
username@example.com
|
1f45c9f0c9f794040bece096caf6b9ba5dad03b3
|
bdd9b28844f8e6398f0582bed649dc611eddcdc1
|
/test/runner.py
|
b8378b37acdf66a3b4ad8f13335327ce4897a9b9
|
[
"Apache-2.0"
] |
permissive
|
LuizGsa21/p4-conference-central
|
e082b28fb75560ea8058b031907782a79a886d34
|
e27da59675229116aa5b8c7e7288d79692294f24
|
refs/heads/master
| 2016-09-06T19:34:55.500179
| 2015-09-09T21:23:06
| 2015-09-10T01:00:29
| 42,084,299
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,104
|
py
|
import sys
import os
import logging
# --- UPDATE PATHS
sys.path.insert(1, '/usr/local/google_appengine') # App Engine libraries
sys.path.insert(1, '/usr/local/google_appengine/lib/yaml/lib') # App Engine yaml
# If you are having trouble setting up the paths, checkout this guide.
# https://cloud.google.com/appengine/docs/python/tools/localunittesting?hl=en#Python_Writing_Datastore_and_memcache_tests
# --- END UPDATE PATHS
# add absolute path of parent directory so we can import from conference.py and models.py
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
# Ensure that the google.appengine.* packages are available
# in tests as well as all bundled third-party packages.
import dev_appserver
dev_appserver.fix_sys_path()
if '__main__' == __name__:
import unittest
# suppress warnings during test
logging.getLogger().setLevel(logging.ERROR)
# Discover and run tests.
suite = unittest.loader.TestLoader().discover(os.path.dirname(os.path.realpath(__file__)), pattern='test_*.py')
unittest.TextTestRunner(verbosity=2).run(suite)
|
[
"LArantesSa@icloud.com"
] |
LArantesSa@icloud.com
|
c628625980003bb812fffbf3adec67eb3a4b6b5b
|
c3179dc6c11770fe877d9b08bebd28448ee66ba8
|
/mtdnn/common/optimizer.py
|
fd7532fca02f7bc79157d15057c3b8d920e72623
|
[
"MIT"
] |
permissive
|
microsoft/MT-DNN
|
2a0f102916a1b092f25b4999834177bd38319c53
|
e5c3e07f3a8e55067433714ce261a6d28ba73d22
|
refs/heads/master
| 2023-06-29T23:57:42.108328
| 2020-07-02T02:22:06
| 2020-07-02T02:22:06
| 215,127,881
| 151
| 28
|
MIT
| 2023-06-12T21:28:37
| 2019-10-14T19:25:46
|
Python
|
UTF-8
|
Python
| false
| false
| 3,833
|
py
|
# coding=utf-8
# Copyright (c) Microsoft. All rights reserved.
from copy import deepcopy
from functools import wraps
import torch
from torch.nn import Parameter
class EMA:
def __init__(self, gamma, model):
super(EMA, self).__init__()
self.gamma = gamma
self.shadow = {}
self.model = model
self.setup()
def setup(self):
for name, para in self.model.named_parameters():
if para.requires_grad:
self.shadow[name] = para.clone()
def cuda(self):
for k, v in self.shadow.items():
self.shadow[k] = v.cuda()
def update(self):
for name, para in self.model.named_parameters():
if para.requires_grad:
self.shadow[name] = (1.0 - self.gamma) * para + self.gamma * self.shadow[name]
def swap_parameters(self):
for name, para in self.model.named_parameters():
if para.requires_grad:
temp_data = para.data
para.data = self.shadow[name].data
self.shadow[name].data = temp_data
def state_dict(self):
return self.shadow
# Adapted from
# https://github.com/pytorch/pytorch/blob/master/torch/nn/utils/weight_norm.py
# and https://github.com/salesforce/awd-lstm-lm/blob/master/weight_drop.py
def _norm(p, dim):
"""Computes the norm over all dimensions except dim"""
if dim is None:
return p.norm()
elif dim == 0:
output_size = (p.size(0),) + (1,) * (p.dim() - 1)
return p.contiguous().view(p.size(0), -1).norm(dim=1).view(*output_size)
elif dim == p.dim() - 1:
output_size = (1,) * (p.dim() - 1) + (p.size(-1),)
return p.contiguous().view(-1, p.size(-1)).norm(dim=0).view(*output_size)
else:
return _norm(p.transpose(0, dim), 0).transpose(0, dim)
def _dummy(*args, **kwargs):
# We need to replace flatten_parameters with a nothing function
return
class WeightNorm(torch.nn.Module):
def __init__(self, weights, dim):
super(WeightNorm, self).__init__()
self.weights = weights
self.dim = dim
def compute_weight(self, module, name):
g = getattr(module, name + "_g")
v = getattr(module, name + "_v")
return v * (g / _norm(v, self.dim))
@staticmethod
def apply(module, weights, dim):
# Terrible temporary solution to an issue regarding compacting weights
# re: CUDNN RNN
if issubclass(type(module), torch.nn.RNNBase):
module.flatten_parameters = _dummy
if weights is None: # do for all weight params
weights = [w for w in module._parameters.keys() if "weight" in w]
fn = WeightNorm(weights, dim)
for name in weights:
if hasattr(module, name):
print("Applying weight norm to {} - {}".format(str(module), name))
weight = getattr(module, name)
del module._parameters[name]
module.register_parameter(name + "_g", Parameter(_norm(weight, dim).data))
module.register_parameter(name + "_v", Parameter(weight.data))
setattr(module, name, fn.compute_weight(module, name))
module.register_forward_pre_hook(fn)
return fn
def remove(self, module):
for name in self.weights:
weight = self.compute_weight(module)
delattr(module, name)
del module._parameters[name + "_g"]
del module._parameters[name + "_v"]
module.register_parameter(name, Parameter(weight.data))
def __call__(self, module, inputs):
for name in self.weights:
setattr(module, name, self.compute_weight(module, name))
def weight_norm(module, weights=None, dim=0):
WeightNorm.apply(module, weights, dim)
return module
|
[
"noreply@github.com"
] |
microsoft.noreply@github.com
|
6101e44287eed33ada178d8f0882b7bcc05b506b
|
b9dcba451ac9c1447a2307b1b959798020683970
|
/Final Project/BiasMFspark.py
|
04ed907db05981c9ab8cd2a81b1fca97940afd3d
|
[] |
no_license
|
KuanChihLee/Parallel_Analysis_in_Spark
|
2bd53dff33c1782563e69731e18ffd5e0a20439f
|
94e549ce0084338675ad8d0a424189b6b24dfd4e
|
refs/heads/master
| 2020-05-01T18:46:19.118240
| 2020-01-24T22:48:40
| 2020-01-24T22:48:40
| 177,631,434
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 13,492
|
py
|
# -*- coding: utf-8 -*-
import numpy as np
from time import time
import csv
import sys,argparse
from pyspark import SparkContext
from operator import add
from pyspark.mllib.random import RandomRDDs
def readRatings(file,sparkContext):
return sparkContext.textFile(file, use_unicode=False).map(eval).map(lambda (bid,stars,uid,city,name,state): (uid,bid,stars))
def swap((x,y)):
return (y,x)
def tuple_adding(pair1,pair2):
""" Helper function that adds each elememt individually
return a tuple
"""
val11, val12 = pair1
val21, val22 = pair2
return (val11+val21, val12+val22)
def globalMean(R):
''' return scalar: global mean- the average rating
'''
x, num = R.map(lambda (i,j,x): (x,1)).reduce(tuple_adding)
return 1.0 * x / num
def generateUserProfiles(R,d,seed,sparkContext,N):
''' Create User Profiles
i: user id
ui: dimemsion d, which is latent dimension
bu: the average rating given a particular user
return (i, (ui,bu))
'''
U = R.map(lambda (i,j,x): (i,(x,1))).reduceByKey(tuple_adding, numPartitions = N).map(lambda (i,(x,num)): (i, x/num))
numUsers = U.count()
randRDD = RandomRDDs.normalVectorRDD(sparkContext, numUsers, d,numPartitions=N, seed=seed)
U = U.zipWithIndex().map(swap).repartition(N)
randRDD = randRDD.zipWithIndex().map(swap).repartition(N)
return U.join(randRDD).map(lambda (idx, ((i,bu),ui)): (i, (ui,bu)))
def generateItemProfiles(R,d,seed,sparkContext,N):
''' Create Item Profiles
j: item id
vj: dimemsion d, which is latent dimension
bv: the average rating given a particular item
return (j, (vj,bv))
'''
V = R.map(lambda (i,j,x): (j,(x,1))).reduceByKey(tuple_adding, numPartitions = N).map(lambda (j,(x,num)): (j, x/num))
numItems = V.count()
randRDD = RandomRDDs.normalVectorRDD(sparkContext, numItems, d, numPartitions=N, seed=seed)
V = V.zipWithIndex().map(swap).repartition(N)
randRDD = randRDD.zipWithIndex().map(swap).repartition(N)
return V.join(randRDD).map(lambda (idx, ((j,bv),vj)): (j, (vj,bv)))
def pred_diff(r,u,v,bu,bv,mean):
return u.T.dot(v) + mean + (bu-mean) + (bv-mean) - r
def joinAndPredictAll(R,U,V,Rmean,N):
''' return (i,j,ui,vj,bu,bv,rij,delta)
'''
return R.map(lambda (i,j,rij): (i, (rij,j))).join(U, numPartitions = N) \
.map(lambda (i, ((rij,j),(ui,bu))): (j, (i,ui,bu,rij))).join(V, numPartitions = N) \
.map(lambda (j, ((i,ui,bu,rij),(vj,bv))): (i,j,ui,vj,bu,bv,rij,pred_diff(rij,ui,vj,bu,bv,Rmean)))
def SE(joinedRDD):
''' return scalar: square error
'''
return joinedRDD.map(lambda x: x[-1]**2).reduce(add)
def normSq(profileRDD,param):
''' return scalar: norm square error
'''
return param * profileRDD.map(lambda (i, (ui,bu)): np.dot(ui.T,ui)+bu**2).reduce(add)
def custom_add_gradients(pair1,pair2):
""" Helper function that adds two gradients.
"""
ui1,bu1,grad_u1,grad_bu1 =pair1
ui2,bu2,grad_u2,grad_bu2 =pair2
return (ui1,bu1,grad_u1+grad_u2,grad_bu1+grad_bu2)
def gradient_u(delta,v):
''' return d-dimension vector: gradient of square error given u
'''
return 2*delta*v
def gradient_bu(delta):
''' return scalar: gradient of square error given bu
'''
return 2*delta
def adaptU(joinedRDD,gamma,lam,N):
''' return new U (updating)
'''
return joinedRDD.map(lambda (i,j,ui,vj,bu,bv,rij,delta): (i, (ui,bu,gradient_u(delta,vj),gradient_bu(delta)))) \
.reduceByKey(custom_add_gradients, numPartitions = N) \
.mapValues(lambda (ui,bu,grad_u,grad_bu): (ui-gamma*(grad_u+2*lam*ui),bu-gamma*(grad_bu+2*lam*bu)))
def gradient_v(delta,u):
''' return d-dimension vector: gradient of square error given v
'''
return 2*delta*u
def gradient_bv(delta):
''' return scalar: gradient of square error given bv
'''
return 2*delta
def adaptV(joinedRDD,gamma,mu,N):
''' return new V (updating)
'''
return joinedRDD.map(lambda (i,j,ui,vj,bu,bv,rij,delta): (j, (vj,bv,gradient_v(delta,ui),gradient_bv(delta)))) \
.reduceByKey(custom_add_gradients, numPartitions = N) \
.mapValues(lambda (vj,bv,grad_v,grad_bv): (vj-gamma*(grad_v+2*mu*vj),bv-gamma*(grad_bv+2*mu*bv)))
def train(args, folds, latent, lam, mu):
cross_val_rmses = []
time_list = []
for k in folds:
train_folds = [folds[j] for j in folds if j is not k ]
if len(train_folds)>0:
train = train_folds[0]
for fold in train_folds[1:]:
train=train.union(fold)
train.repartition(args.N).cache()
test = folds[k].repartition(args.N).cache()
Mtrain=train.count()
Mtest=test.count()
print("Initiating fold %d with %d train samples and %d test samples" % (k,Mtrain,Mtest) )
else:
train = folds[k].repartition(args.N).cache()
test = train
Mtrain=train.count()
Mtest=test.count()
print("Running single training over training set with %d train samples. Test RMSE computes RMSE on training set" % Mtrain )
i = 0
change = 1.e99
obj = 1.e99
#Generate user profiles
U = generateUserProfiles(train,int(latent),args.seed,sc,args.N).repartition(args.N).cache()
V = generateItemProfiles(train,int(latent),args.seed,sc,args.N).repartition(args.N).cache()
Rmean = globalMean(train)
numUsers = U.count()
numItems = V.count()
print "Training set contains %d users and %d items" %(numUsers,numItems)
start = time()
gamma = args.gain
while i<args.maxiter:# and change > args.epsilon:
i += 1
joinedRDD = joinAndPredictAll(train,U,V,Rmean,args.N).cache()
oldObjective = obj
obj = SE(joinedRDD)
trainRMSE = np.sqrt(1.*obj/Mtrain)
#obj += normSq(U,lam) + normSq(V,mu)
#change = np.abs(obj-oldObjective)
testRMSE = np.sqrt(1.*SE(joinAndPredictAll(test,U,V,Rmean,args.N))/Mtest)
U.unpersist()
V.unpersist()
gamma = args.gain / i**args.power
U = adaptU(joinedRDD,gamma,lam,args.N).repartition(args.N).cache()
V = adaptV(joinedRDD,gamma,mu,args.N).repartition(args.N).cache()
now = time()-start
#if i % 10 == 0:
print "Iteration: %d\tTime: %f\tTranRMSE: %f\tTestRMSE: %f" % (i,now,trainRMSE,testRMSE)
joinedRDD.unpersist()
cross_val_rmses.append(testRMSE)
time_list.append(now)
train.unpersist()
test.unpersist()
return cross_val_rmses, time_list, U, V
def prediction(Rmean,ui,vj,bu,bv):
return Rmean + (bu-Rmean) + (bv-Rmean) + ui.T.dot(vj)
def test(sparkContext, args):
print "loading training data to estimate Global Mean..."
try:
population = readRatings(args.data, sparkContext)
Rmean = globalMean(population)
print "Success estimating Global Mean"
except:
print "Fail estimating Global Mean"
return None
print "loading model..."
try:
U = sparkContext.textFile(args.modelDir+"model_BiasU", use_unicode=False).map(eval)\
.map(lambda (i,(ui,bu)): (i, (np.array(ui),bu))).cache()
V = sparkContext.textFile(args.modelDir+"model_BiasV", use_unicode=False).map(eval)\
.map(lambda (j,(vj,bv)): (j, (np.array(vj),bv))).cache()
print "Success loading model"
except:
print "Fail loading model"
return None
if args.predict == 'user':
userprofile = U.map(lambda (i, (ui,bu)): (1, (i,ui,bu)) if i == args.id else None) \
.filter(lambda x: x is not None)
itemprofile = V.map(lambda (j, (vj,bv)): (1,(j,vj,bv))).repartition(args.N)
joinedprofile = itemprofile.join(userprofile)\
.map(lambda (c, ((j,vj,bv),(i,ui,bu))): (prediction(Rmean,ui,vj,bu,bv),j))\
.sortByKey(ascending=False)
print "Recommend user %s Top 10 items: " % args.id
print joinedprofile.take(10)
elif args.predict == 'item':
itemprofile = V.map(lambda (j, (vj,bv)): (1,(j,vj,bv)) if j == args.id else None)\
.filter(lambda x: x is not None)
userprofile = U.map(lambda (i, (ui,bu)): (1,(i,ui,bu))).repartition(args.N)
joinedprofile = userprofile.join(itemprofile)\
.map(lambda (c, ((i,ui,bu),(j,vj,bv))): (prediction(Rmean,ui,vj,bu,bv),i))\
.sortByKey(ascending=False)
print "Recommend item %s to Top 10 uers: " % args.id
print joinedprofile.take(10)
if __name__=="__main__":
parser = argparse.ArgumentParser(description = 'Parallele Matrix Factorization.',formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('data',help = 'Directory containing folds. The folds should be named fold0, fold1, ..., foldK.')
parser.add_argument('folds',type = int,help = 'Number of folds')
parser.add_argument('--mode',default='train',help ="Default is training mode, others will be test mode")
parser.add_argument('--predict',default='user',help ="Default is to recommend user items, keyin 'item' will recommend item to users")
parser.add_argument('--id',default=1,help ="user or item id")
parser.add_argument('--gain',default=0.001,type=float,help ="Gain")
parser.add_argument('--power',default=0.2,type=float,help ="Gain Exponent")
parser.add_argument('--epsilon',default=1.e-99,type=float,help ="Desired objective accuracy")
parser.add_argument('--lam',default=1.0,type=float,help ="Regularization parameter for user features")
parser.add_argument('--mu',default=1.0,type=float,help ="Regularization parameter for item features")
parser.add_argument('--d',default=10,type=int,help ="Number of latent features")
parser.add_argument('--maxiter',default=20,type=int, help='Maximum number of iterations')
parser.add_argument('--N',default=40,type=int, help='Parallelization Level')
parser.add_argument('--seed',default=1234567,type=int, help='Seed used in random number generator')
parser.add_argument('--cv',default=None, help='Do cross validation. If is none, only do training')
parser.add_argument('--output',default=None, help='Output the best U and V')
parser.add_argument('--latents', type=float, nargs='+', help='Regularization parameter latent dimensions List')
parser.add_argument('--regul', type=float, nargs='+', help='Regularization parameters List, lambda and mu')
parser.add_argument('--plotCSV', default='plotCSV_bias', help='File that write latent dimensions corresponding CV RMSE')
parser.add_argument('--modelDir', default='./', help='Enter root folder containing saved parameters eg. ./model_BiasU')
verbosity_group = parser.add_mutually_exclusive_group(required=False)
verbosity_group.add_argument('--verbose', dest='verbose', action='store_true')
verbosity_group.add_argument('--silent', dest='verbose', action='store_false')
parser.set_defaults(verbose=False)
args = parser.parse_args()
sc = SparkContext(appName='Parallel MF with Bias SVD')
if not args.verbose :
sc.setLogLevel("ERROR")
folds = {}
if args.cv is not None:
for k in range(args.folds):
folds[k] = readRatings(args.data+"/part-0000"+str(k),sc)
else:
folds[0] = readRatings(args.data,sc)
if args.mode == 'train' or args.mode == 'Train':
csvData = list()
minrmse = 9999
bestlatent = -1
bestre = -1
for latent in args.latents:
for re in args.regul:
cv_rmses, time_list, U, V = train(args, folds, latent, re, re)
cur_kfold_rmse = np.mean(cv_rmses)
timecost = np.sum(time_list)
print "Latent %d, regularization %f, average error is: %f, total time cost %f" % (latent, re, cur_kfold_rmse, timecost)
if minrmse > cur_kfold_rmse:
minrmse = cur_kfold_rmse
best_U = U
best_V = V
bestlatent = latent
bestre = re
U.unpersist()
V.unpersist()
storeplot = [latent, re, cur_kfold_rmse, timecost]
csvData.append(storeplot)
print "Best Latent %d, Best Regul %d, RMSE %f" % (bestlatent, bestre, minrmse)
with open(args.plotCSV, 'w') as csvFile:
writer = csv.writer(csvFile)
writer.writerows(csvData)
csvFile.close()
if args.output is not None:
print "Saving U, V, bu and bv RDDs"
best_U = best_U.map(lambda (i, (ui,bu)): (i,(list(ui),bu)))
best_V = best_V.map(lambda (j, (vj,bv)): (j,(list(vj),bv)))
best_U.saveAsTextFile(args.output+'_BiasU')
best_V.saveAsTextFile(args.output+'_BiasV')
elif args.mode == 'test' or args.mode == 'Test':
test(sc, args)
else:
print "Pleas try --mode train or --mode test"
|
[
"noreply@github.com"
] |
KuanChihLee.noreply@github.com
|
619195669e27d0ab7477d8b2c38a6066e7b97bc6
|
d1e4b7ba4b2f356cffb5d9f8f0269cd1593ab3a6
|
/Multinomial_conversation.py
|
6edacca56aa2f60900f89754896b023e41a89a70
|
[] |
no_license
|
mikeliu8492/TextBayes
|
c3c5339c8bb57b40e2f3645c578c7ea1a4b42891
|
a9e0439c4f2998a6a7b42291d985c519a75a9b62
|
refs/heads/master
| 2020-07-27T15:31:38.726970
| 2016-11-10T22:44:35
| 2016-11-10T22:44:35
| 73,426,889
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,531
|
py
|
import math
from oddsProb import *
#dictionary of all unique vocabulary words
total_vocab = dict([])
#dictionay of unique words belonging to "positive" class (labeled +1)
positive_dict = dict([])
#total count of words, including duplicates, belonging to the positive dictionary
total_pos = 0
#dictionay of unique words belonging to "negative" class (labeled -1)
negative_dict = dict([])
#total count of words, including duplicates, belonging to the negative dictionary
total_neg = 0
#true positives
both_pos = 0
#false negatives
std_only_pos_test_neg = 0
#false positives
std_only_neg_test_pos = 0
#true negatives
both_neg = 0
#macros for the training file and testing file
TRAINING_FILE = 'fisher_train_2topic.txt'
TESTING_FILE = 'fisher_test_2topic.txt'
"""
Laplace smoothing function
num = frequency of a particular word in the particular dictionary
denom = total number of words in that same dictionary
len(total_vocab) = total number of unique words in the overall vocabulary
"""
def smooth(num, denom):
my_num = (float)(num+1)
my_denom = (float)(denom+len(total_vocab))
return my_num/my_denom
#implement training
with open(TRAINING_FILE, "r") as train_file:
for line in train_file:
#split each line up
word_split = line.split()
index = 0
for word in word_split:
if index > 0:
#parse each individual word and its frequency
filtered = word.strip('\n')
split_position = filtered.find(":")
current_word = filtered[:split_position]
current_stat = (int)(filtered[split_position+1:])
#if not in universal dictionary, add it to the universal dictionary
if(total_vocab.get(current_word) == None):
total_vocab[current_word] = True
"""
if document is labeled positive, check each word to see if in positive dictionary
if not, then add it to the positive dictionary and hash the frequency to that word label
if it is, then increment the frequency that is existing by the amount parsed
"""
if ((int)(word_split[0]) == 1):
if positive_dict.get(current_word) == None:
positive_dict[current_word] = current_stat
else:
positive_dict[current_word] += current_stat
total_pos += current_stat
elif ((int)(word_split[0]) == -1):
"""
if document is labeled positive, check each word to see if in positive dictionary
if not, then add it to the positive dictionary and hash the frequency to that word label
if it is, then increment the frequency that is existing by the amount parsed
"""
if negative_dict.get(current_word) == None:
negative_dict[current_word] = current_stat
else:
negative_dict[current_word] += current_stat
total_neg += current_stat
index += 1
#now with the testing
with open(TESTING_FILE, "r") as test_file:
#parse each line by using the split() function
for line in test_file:
word_split = line.split()
index = 0
#take the first element and set it as your "correct" class label
correct_std = (int)(word_split[0])
#initialize your test assessment class label
guessed_std = 0
"""
set your priors for the class frequency,
since we have 440 positive documents, 438 negative documents, 878 total documents in training sets
the priors are calculated as follow:
"document of particular class"/total_documents
"""
pos_prob = math.log(float(440)/878, 2)
neg_prob = math.log(float(438)/878,2)
for word in word_split:
if index > 0:
#parse the words and frequency from test document
filtered = word.strip('\n')
split_position = filtered.find(":")
current_word = filtered[:split_position]
current_stat = (int)(filtered[split_position+1:])
"""
if word is already in positive dictionary, get the frequency from the dictionary,
smooth it using the function, and update the probability
else, substitute 0 as the frequency, use the smooth function, and update the probability
"""
if(positive_dict.get(current_word) != None):
single_prob = smooth(positive_dict.get(current_word), total_pos)
pos_prob += math.log(math.pow(single_prob, current_stat), 2)
else:
pos_prob += math.log(smooth(0, total_pos), 2)
"""
if word is already in negative dictionary, get the frequency from the dictionary,
smooth it using the function, and update the probability
else, substitute 0 as the frequency, use the smooth function, and update the probability
"""
if(negative_dict.get(current_word) != None):
single_prob = smooth(negative_dict.get(current_word), total_neg)
neg_prob += math.log(math.pow(single_prob, current_stat), 2)
else:
neg_prob += math.log(smooth(0, total_neg), 2)
index += 1
"""
if the positive probability per Bayes' rule is greater than negative,
evaluate the document as "positive"
else, evaluate as negative
"""
if pos_prob > neg_prob:
guessed_std = 1
else:
guessed_std = -1
if(correct_std == 1 and guessed_std == 1):
#if standard is + and eval is +, increment "true positive"
both_pos += 1
elif(correct_std == -1 and guessed_std == -1):
#if standard is - and eval is -, increment "true negative"
both_neg += 1
elif(correct_std == 1 and guessed_std == -1):
#if standard is + and eval is -, increment "false negative"
std_only_pos_test_neg += 1
elif(correct_std == -1 and guessed_std == 1):
std_only_neg_test_pos += 1
print "vocab size " + str(len(total_vocab))
print "pos length " + str(total_neg)
print("\n\n")
total = both_pos + both_neg + std_only_pos_test_neg+ std_only_neg_test_pos
print "True Positive " + str(both_pos)
print "True Negative " + str(both_neg)
print "False Negative " + str(std_only_pos_test_neg)
print "False Positive " + str(std_only_neg_test_pos)
print "Positive classification rate: " + str(float(both_pos+std_only_neg_test_pos)/total)
print "negative classification rate: " + str(float(both_neg+std_only_pos_test_neg)/total)
print "Accurate Classification rate: " + str(float(both_neg+both_pos)/total)
my_array = []
"""
ODDS RATIO CALCULATIONS
"""
#iterate through positive dictionary, this is your target reference dictionary for now
for key in positive_dict:
num = 1
denom = 1
#for each existing word, get the frequency in current dictionary
num = smooth(positive_dict.get(key), total_pos)
"""
now look for the same word in the negative dictionary
if exists, use the frequency listed in dictionary there with the smooth function
if not exists, then use 0 as your frequency for your smoothing function
"""
if(negative_dict.get(key) == None):
denom = smooth(0, total_neg)
else:
denom = smooth(negative_dict.get(key), total_neg)
"""
place the odds calculation into an object that tracks it for a given word in a given dictionary
num = likelihood in current target dictionary
denom = likelihood in opposite dictionary
key = word you are looking for in reference dictionary
"""
temp = oddsProb(key, num/denom)
my_array.append(temp)
sorted_list = sorted(my_array, key=lambda x: x.my_prob, reverse=True)
print("\n\n\nPOSITIVE ODDS")
index = 0
for item in sorted_list:
print item.my_word, item.my_prob
index +=1
if index == 10:
break
#iterate through negative dictionary, this is your current reference dictionary
my_array = []
index = 0
for key in negative_dict:
num = 1
denom = 1
#for each existing word, get the frequency in current dictionary
num = smooth(negative_dict.get(key), total_neg)
"""
now look for the same word in the positive dictionary
if exists, use the frequency listed in dictionary there with the smooth function
if not exists, then use 0 as your frequency for your smoothing function
"""
if(positive_dict.get(key) == None):
denom = smooth(0, total_pos)
else:
denom = smooth(positive_dict.get(key), total_pos)
"""
place the odds calculation into an object that tracks it for a given word in a given dictionary
num = likelihood in current target dictionary
denom = likelihood in opposite dictionary
key = word you are looking for in reference dictionary
"""
temp = oddsProb(key, num/denom)
my_array.append(temp)
sorted_list = sorted(my_array, key=lambda x: x.my_prob, reverse=True)
print ("\n\nNEGATIVE ODDS")
for item in sorted_list:
print item.my_word, item.my_prob
index +=1
if index == 10:
break
"""
LIKELIHOOD CALCULATIONS
"""
#sort frequency of items in negative dictionary, since all items have the same denominator ignore the denominator
sort_x = sorted(negative_dict.iteritems(), key=lambda (k,v): (v,k), reverse = True)
#only print top 10
print("\n\n\nLIKELIHOOD POSITIVE")
index = 0
for item in sort_x:
print item[0]
index += 1
if index == 10:
break
#sort frequency of items in positive dictionary, since all items have the same denominator ignore the denominator
sort_x = sorted(positive_dict.iteritems(), key=lambda (k,v): (v,k), reverse = True)
#only print top 10
print("\n\n\nLIKELIHOOD NEGATIVE")
index = 0
for item in sort_x:
print item[0]
index += 1
if index == 10:
break
|
[
"mikeliu2@illinois.edu"
] |
mikeliu2@illinois.edu
|
154d998b5a9543245bfb166d690f58e66475779b
|
c7a5797548d400ce3f2044b6871ae9fcb7b18136
|
/code/dataset.py
|
1419dd0d03be5cacf69e325023bc9758eb6a1730
|
[] |
no_license
|
vincent861223/translator
|
19877a6b9d57b913e09217497eaa9c2432bd6c68
|
6fc0692e4f678e8ccbc7fc531ab56163e7ab0b60
|
refs/heads/master
| 2020-08-12T21:44:14.755962
| 2019-10-15T13:12:41
| 2019-10-15T13:12:41
| 214,848,207
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,946
|
py
|
from torch.utils.data import Dataset
from utils import read_datafile
from lang import Lang
import torch
import random
class SentencePairDataset(Dataset):
def __init__(self, pairs, lang1, lang2, max_len=10):
self.pairs = pairs
self.lang1 = lang1
self.lang2 = lang2
lang1_sents = [pair[0] for pair in self.pairs]
lang2_sents = [pair[1] for pair in self.pairs]
self.max_len_l1 = max(map(len, lang1_sents))
self.max_len_l2 = max(map(len, lang2_sents))
#self.max_len = max(self.max_len_l1, self.max_len_l2)
self.max_len = max_len
def __str__(self):
return 'lang1_name: {}\n'.format(self.lang1.language) + \
'lang2_name: {}\n'.format(self.lang2.language) + \
'max_len_l1: {}\n'.format(self.max_len_l1) + \
'max_len_l2: {}\n'.format(self.max_len_l2) + \
'max_len: {}\n'.format(self.max_len) + \
'n_pairs: {}\n'.format(self.__len__())
def __getitem__(self, index):
sent1, sent2 = self.pairs[index]
indexed_sent1 = [self.lang1.word2index[w] for w in sent1]
indexed_sent2 = [self.lang2.word2index[w] for w in sent2]
indexed_sent1 = self.truncate_or_pad(indexed_sent1, bos=True, eos=True)
indexed_sent2_bos = self.truncate_or_pad(indexed_sent2, bos=True)
indexed_sent2_eos = self.truncate_or_pad(indexed_sent2, eos=True)
return {'org': [sent1, sent2], 'indexed': [indexed_sent1, indexed_sent2_bos, indexed_sent2_eos]}
def __len__(self):
return len(self.pairs)
def truncate_or_pad(self, sent, bos=False, eos=False):
n_special = (1 if bos else 0) + (1 if eos else 0)
if len(sent) > self.max_len-n_special: return ([self.lang1.word2index['BOS']] if bos else []) + sent[:self.max_len-n_special] + ([self.lang1.word2index['EOS']] if eos else [])
else: return ([self.lang1.word2index['BOS']] if bos else []) + sent + ([self.lang1.word2index['EOS']] if eos else []) + [self.lang1.word2index['PAD']] * (self.max_len - n_special - len(sent))
def collate_fn(batch):
source = torch.tensor([b['indexed'][0] for b in batch])
target_bos = torch.tensor([b['indexed'][1] for b in batch])
target_eos = torch.tensor([b['indexed'][2] for b in batch])
return source, target_bos, target_eos
def create_datasets(data_file, lang1_name, lang2_name, max_len=10, percentage=0.1):
pairs = read_datafile(data_file)
lang1_sents = [pair[0] for pair in pairs]
lang2_sents = [pair[1] for pair in pairs]
lang1 = Lang(lang1_name, lang1_sents)
lang2 = Lang(lang2_name, lang2_sents)
choose = [1 if random.random() > percentage else 0 for i in range(len(pairs))]
train_idx = [i for i, k in enumerate(choose) if k == 1]
test_idx = [i for i, k in enumerate(choose) if k == 0]
train_pairs = [pair for i, pair in enumerate(pairs) if i in train_idx]
test_pairs = [pair for i, pair in enumerate(pairs) if i in test_idx]
train_dataset = SentencePairDataset(train_pairs, lang1, lang2, max_len)
test_dataset = SentencePairDataset(test_pairs, lang1, lang2, max_len)
return train_dataset, test_dataset
|
[
"B05505019@ntu.edu.tw"
] |
B05505019@ntu.edu.tw
|
964e6bd904ccf37690a358e3cf71979d034c192d
|
ee128cce3827e0536dddf338ee6df271569f18fb
|
/bioinfoweb/bioinfow/wsgi.py
|
59b99aef0195a1899c0bf4e525bfcca1a8a199f5
|
[] |
no_license
|
dbour-hologic/bioinfoweb
|
2790856afeb85cdae9c5088320dd3bef9bc4a198
|
0f2c0f8f4c518c4bfd50d1f5e5c1d29392bc8a11
|
refs/heads/master
| 2020-05-21T15:24:07.940342
| 2017-03-14T15:33:10
| 2017-03-14T15:33:10
| 54,579,042
| 0
| 0
| null | 2017-07-28T16:56:51
| 2016-03-23T17:17:10
|
HTML
|
UTF-8
|
Python
| false
| false
| 486
|
py
|
"""
WSGI config for bioinfoweb project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "bioinfow.settings")
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
|
[
"david.bour@hologic.com"
] |
david.bour@hologic.com
|
f811119ee4e4e331645aef7887d8c800f2be2054
|
39004ba32d7696266827dd9f7a09a75a776bfc4a
|
/applied-python/daily-practice/Day1.py
|
612ec987ca779adab8d74bf914405e6ea4a1ac8f
|
[] |
no_license
|
himmat8074/265093-applied-python
|
b4f7c5469af007a84cac71ba0226836ab773b91b
|
3e492f80e872a2622c6426a0878a6f3ebb3beb83
|
refs/heads/main
| 2023-04-03T21:27:22.282581
| 2021-04-26T10:17:48
| 2021-04-26T10:17:48
| 358,822,356
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 580
|
py
|
print("------------------------\n")
# program to check the IDE compatibilty for python
print("Hello world")
print("------------------------\n")
#using print for simple operations
print(5-4+3)
print((5*2)/5)
print("------------------------\n")
#producing a float by operation on a float and a integer
print(2 * 6.1)
print(1+2+3+4.0+5)
print("------------------------\n")
#using exponential
print(5**2)
print(4**3**2)
print("------------------------\n")
#square root also can be determined using exponential
print(9**(1/2))
print(8**(1/3))
print("------------------------\n")
|
[
"himmat.8074@gmail.com"
] |
himmat.8074@gmail.com
|
4d5013f66a2660352bfcd3500081db926e71cc68
|
d7309934899d865545f485cdc4627d7b47387ef0
|
/multi/test/weight/tmp/03/process.py
|
ebbb9933ef1fd0bf5875c216f022d7842754e3de
|
[] |
no_license
|
johnpzh/mobile-node-tracking
|
ef3b0c5b6825157fb8f343c5159aa10a6c00bb96
|
de27ff4202f3bc3d3c911efe897eda64faa809a1
|
refs/heads/master
| 2021-05-30T15:39:06.361999
| 2016-03-09T00:15:44
| 2016-03-09T00:15:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,138
|
py
|
#! /usr/local/bin/python3
import datetime
import subprocess
import sys
# Do experiments many time
time = datetime.datetime.today()
time_fmt = '%Y%m%d-%H%M%S'
path = '20150316-221404'
# reuilt files are in 'path/'
result_file = path + '-result'
# Process the results
rf = open(result_file, 'r')
vt_file = open('m_num-m-force-03_vs_vt', 'w')
ec_file = open('m_num-m-force-03_vs_ec', 'w')
vt = dict()
ec = dict()
count = dict()
for line in rf:
results = line.split()
var = int(results[0])
time = float(results[1])
energy = float(results[2])
if not (var in vt.keys()):
vt[var] = 0.0
ec[var] = 0.0
count[var] = 0
vt[var] += time
ec[var] += energy
count[var] += 1
vars = sorted(vt.keys())
for var in vars:
#print('{0} {1} {2}'.format(var, vt[var]/count, ec[var]/count))
vt_file.write('{0} {1:.2f}\n'.format(var, vt[var] / count[var]))
ec_file.write('{0} {1:.2f}\n'.format(var, ec[var] / count[var]))
vt_file.close()
ec_file.close()
rf.close()
subprocess.call(['mv', result_file, path])
subprocess.call(['mv', vt_file.name, path])
subprocess.call(['mv', ec_file.name, path])
|
[
"chuqixiaozhu@gmail.com"
] |
chuqixiaozhu@gmail.com
|
3dc785b42876b4704e691efd65dee8fea212944d
|
8a42e8ef22dd15a62cd407910de96b0873fe5252
|
/Faculty/daofaculty.py
|
d2ca416daf43f585649c79ac323b9425b5a3c0ca
|
[] |
no_license
|
Vini-S/Fintek_Project
|
9293300c798cb5e9f9b84d34972392b411849320
|
406b939832f4a3f03ff8645500502a98c4d7ca75
|
refs/heads/master
| 2020-06-19T11:31:37.413669
| 2019-07-13T08:16:41
| 2019-07-13T08:16:41
| 196,693,104
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,962
|
py
|
from schedule.models import Users
from django.db import connection
class Student:
def studentview(self):
cursor = connection.cursor()
query = "select schedule_student_view.s_id, schedule_student_view.s_f_name, schedule_student_view.s_l_name, schedule_student_view.s_Email_id, schedule_student_view.c_code_id,schedule_course.c_name from schedule_course INNER JOIN schedule_student_view ON schedule_student_view.c_code_id=schedule_course.c_code"
cursor.execute(query)
row = cursor.fetchall()
return row
def studentsearch(self, name):
cursor = connection.cursor()
query = "select s_id, s_f_name, s_l_name, s_Email_id, c_code_id from schedule_student_view where s_f_name=%s"
values = (name)
cursor.execute(query, values)
row = cursor.fetchall()
return row
def sviewleave(self):
cursor = connection.cursor()
query = "select s_email, s_date, e_date, l_reason, s_status from schedule_student_leave"
cursor.execute(query)
row = cursor.fetchall()
print(row)
return row
class Faculty:
def facultyview(self):
cursor = connection.cursor()
query = "select f_id, f_f_name, f_l_name, f_Email_id, f_phno from schedule_faculty_view "
cursor.execute(query)
row1 = cursor.fetchall()
return row1
def facultysearch(self, name):
cursor = connection.cursor()
query = "select f_id, f_f_name, f_l_name, f_Email_id, f_phno from schedule_faculty_view where f_f_name=%s"
values = (name)
cursor.execute(query, values)
row = cursor.fetchall()
return row
def addleave(self, emailid, s_date, e_date, reason):
cursor = connection.cursor()
query = "insert into schedule_faculty_leave(f_email, s_date, e_date, l_reason) values (%s,%s,%s,%s)"
values = (emailid, s_date, e_date, reason)
cursor.execute(query,values)
connection.commit()
return 1
def viewfleavebysession(self, emailid): #(Auto fill form)
cursor = connection.cursor()
query = "select f_Email_id from schedule_faculty_view where f_Email_id=%s"
values = (emailid)
cursor.execute(query,values)
row = cursor.fetchall()
return row
def viewleave(self):
cursor = connection.cursor()
query = "select f_email, s_date, e_date, l_reason, f_status from schedule_faculty_leave"
cursor.execute(query)
row = cursor.fetchall()
return row
class Course:
def courseview(self):
cursor = connection.cursor()
query = "select c_code,c_name from schedule_course"
cursor.execute(query)
row = cursor.fetchall()
return row
def coursesearch(self, name):
cursor = connection.cursor()
query = "select c_code,c_name from schedule_course where c_name=%s"
values = (name)
cursor.execute(query, values)
row = cursor.fetchall()
return row
class Module:
def moduleview(self):
cursor = connection.cursor()
query = "select m_id,m_name from schedule_chapters"
cursor.execute(query)
row = cursor.fetchall()
return row
|
[
"noreply@github.com"
] |
Vini-S.noreply@github.com
|
94ba0bf359287436cd70c65d0163697a08c085e2
|
04576fc68868ed37794065b59be1df5c3467494c
|
/python/config.py
|
8f42560d83e37804974f21aa0be3d75d8ff12843
|
[] |
no_license
|
SCandWZH/data_augment
|
f5cf1cb413ffb47c5cb1d98b4cab11797d80d5bd
|
cc01779618f969190f9397c70a60d22cb4e2ce76
|
refs/heads/master
| 2020-05-03T14:51:48.725196
| 2017-04-19T07:45:18
| 2017-04-19T07:45:18
| 178,689,876
| 1
| 0
| null | 2019-03-31T13:11:52
| 2019-03-31T13:11:52
| null |
UTF-8
|
Python
| false
| false
| 720
|
py
|
INPUT_DIR = '/path/to/data/similarity/original/'
OUTPUT_DIR = '/path/to/data/similarity/augmented'
IMAGE_LIST = '/path/to/data/similarity/augmented/image.list'
TRAIN_LIST = '/path/to/data/similarity/augmented/train.list'
VAL_LIST = '/path/to/data/similarity/augmented/val.list'
DEBUG = False
TARGET_WIDTH = 64
TARGET_HEIGHT = 64
RANDOM_ITER = 100
# resize
RANDOM_RESIZE_LB = 0.8
RANDOM_RESIZE_UB = 1.2
# crop
RANDOM_CROP_LB = 0.8
RANDOM_CROP_UB = 1.0
# rotate
RANDOM_ROTATE_LB = -10
RANDOM_ROTATE_UB = 10
# blur
RANDOM_BLUR_RADIUS_LB = 2
RANDOM_BLUR_RADIUS_UB = 4
# papper salt
RANDOM_PEPPER_SALT_LB = 0.01
RANDOM_PEPPER_SALT_UB = 0.04
# train/val
TRAIN_RATIO = 0.9
VAL_RATIO = 0.1
#
DEBUG_TEST_COUNT = 1
|
[
"handong1587@163.com"
] |
handong1587@163.com
|
aa91c7c66238b43831ac24ad4022e149574e68ba
|
3ab494cac87a9f3c5ba17c903ffdbba7e72c305f
|
/algorithm/day5/bit별의 암호화.py
|
8ecc738f387101f2aa40d4a65b72b236d2580ee5
|
[] |
no_license
|
sochic2/TIL
|
6036cae002ce4c4ba5e7d2175e668c664de209de
|
eb2709f5ac1a4b9c79dda0e647f14044c7a4fb6e
|
refs/heads/master
| 2023-01-10T03:51:14.057387
| 2022-12-21T01:27:38
| 2022-12-21T01:27:38
| 162,229,719
| 4
| 1
| null | 2023-01-09T11:56:04
| 2018-12-18T04:23:54
|
Python
|
UTF-8
|
Python
| false
| false
| 328
|
py
|
def Bbit_print(a):
for i in range(7, -1, -1):
if a & (1<<i):
print(1, end="")
else:
print(0, end="")
print()
a = 0x86
key = 0xAA
print("a ==>", end=" ")
Bbit_print(a)
print("a^=key ==>", end=" ")
a ^= key
Bbit_print(a)
print("a^=key ==>", end=" ")
a ^= key
Bbit_print(a)
|
[
"netzzang12@gmail.com"
] |
netzzang12@gmail.com
|
81281a45d40ecf258744318e4370a85913d59ad1
|
368c66467b78adf62da04cb0b8cedd2ef37bb127
|
/SW expert/python/재미있는오셀로게임.py
|
068285f638a73b828278b044f56c144dd283c5a8
|
[] |
no_license
|
DJHyun/Algorithm
|
c8786ddcd8b5693fc9b3b4721fdf1eeda21611c5
|
fd6ae800886dac4ec5ff6cf2618bc2c839a76e7a
|
refs/heads/master
| 2020-07-30T16:32:49.344329
| 2020-02-25T07:59:34
| 2020-02-25T07:59:34
| 210,289,983
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,549
|
py
|
# 기본 제공코드는 임의 수정해도 관계 없습니다. 단, 입출력 포맷 주의
# 아래 표준 입출력 예제 필요시 참고하세요.
# 표준 입력 예제
"""
a = int(input()) 정수형 변수 1개 입력 받는 예제
b, c = map(int, input().split()) 정수형 변수 2개 입력 받는 예제
d = float(input()) 실수형 변수 1개 입력 받는 예제
e, f, g = map(float, input().split()) 실수형 변수 3개 입력 받는 예제
h = input() 문자열 변수 1개 입력 받는 예제
"""
# 표준 출력 예제
"""
a, b = 6, 3
c, d, e = 1.0, 2.5, 3.4
f = "ABC"
print(a) 정수형 변수 1개 출력하는 예제
print(b, end = " ") 줄바꿈 하지 않고 정수형 변수와 공백을 출력하는 예제
print(c, d, e) 실수형 변수 3개 출력하는 예제
print(f) 문자열 1개 출력하는 예제
"""
import sys
"""
아래의 구문은 input.txt 를 read only 형식으로 연 후,
앞으로 표준 입력(키보드) 대신 input.txt 파일로부터 읽어오겠다는 의미의 코드입니다.
여러분이 작성한 코드를 테스트 할 때, 편의를 위해서 input.txt에 입력을 저장한 후,
아래 구문을 이용하면 이후 입력을 수행할 때 표준 입력 대신 파일로부터 입력을 받아올 수 있습니다.
따라서 테스트를 수행할 때에는 아래 주석을 지우고 이 구문을 사용하셔도 좋습니다.
아래 구문을 사용하기 위해서는 import sys가 필요합니다.
단, 채점을 위해 코드를 제출하실 때에는 반드시 아래 구문을 지우거나 주석 처리 하셔야 합니다.
"""
sys.stdin = open("재미있는오셀로게임.txt", "r")
T = int(input())
# 여러개의 테스트 케이스가 주어지므로, 각각을 처리합니다.
for test_case in range(1, T + 1):
# ///////////////////////////////////////////////////////////////////////////////////
n, M = list(map(int, input().split()))
result = []
for m in range(M):
test_x, test_y, tz = list(map(int, input().split()))
tx, ty = test_x - 1, test_y - 1
result[tx][ty] = tz
for i in range(n // 2 - 1, n // 2 + 1):
for j in range(n // 2 - 1, n // 2 + 1):
if i == j:
result[i][j] = 2
else:
result[i][j] = 1
for i in result:
print(i)
for i in range(m):
test = list(map(int, input().split()))
tx, ty, z = test[0] - 1, test[1] - 1, test[2]
result[tx][ty] = z
for j in range(2, n):
for a in range(4):
x, y = [0, 0, j, -j], [j, -j, 0, 0]
if tx + x[a] < 0 or tx + x[a] >= n or ty + y[a] < 0 or ty + y[a] >= n:
continue
else:
if result[tx + x[a]][ty + y[a]] == z:
if x[a] == 0:
if y[a] > 0:
for aaa in range(ty, ty + y[a]):
if result[tx][aaa] != 0:
result[tx][aaa] = z
else:
for aaa in range(ty + y[a], ty):
if result[tx][aaa] != 0:
result[tx][aaa] = z
else:
if x[a] > 0:
for aaa in range(tx, tx + x[a]):
if result[aaa][ty] != 0:
result[aaa][ty] = z
else:
for aaa in range(tx + x[a], tx):
if result[aaa][ty] != 0:
result[aaa][ty] = z
for j in range(2, n):
for a in range(4):
ax, ay = [j, j, -j, -j], [-j, j, -j, j]
if tx + ax[a] < 0 or tx + ax[a] >= n or ty + ay[a] < 0 or ty + ay[a] >= n:
continue
else:
if result[tx + ax[a]][ty + ay[a]] == z:
if a == 0:
for aaa in zip(range(tx + ax[a], tx, -1), range(ty + ay[a], ty)):
if result[aaa[0]][aaa[1]] != 0:
result[aaa[0]][aaa[1]] = z
elif a == 1:
for aaa in zip(range(tx + ax[a], tx, -1), range(ty + ay[a], ty, -1)):
if result[aaa[0]][aaa[1]] != 0:
result[aaa[0]][aaa[1]] = z
elif a == 2:
for aaa in zip(range(tx + ax[a], tx), range(ty + ay[a], ty)):
if result[aaa[0]][aaa[1]] != 0:
result[aaa[0]][aaa[1]] = z
elif a == 3:
for aaa in zip(range(tx + ax[a], tx), range(ty + ay[a], ty, -1)):
if result[aaa[0]][aaa[1]] != 0:
result[aaa[0]][aaa[1]] = z
cnt1, cnt2 = 0, 0
for i in result:
for j in i:
if j == 1:
cnt1 += 1
elif j == 2:
cnt2 += 1
print(f'#{test_case} {cnt1} {cnt2}')
|
[
"djestiny4444@naver.com"
] |
djestiny4444@naver.com
|
3736ae89b67495f9dd446ec36bdd36df819643dd
|
e5a3cfa21f78259dfcf9d991f6f67f0840d9c93c
|
/jinYuan/课件/4_2-mygjviews/demo/views.py
|
d1ef943eef1673c36e885337d2d7145925ffc371
|
[] |
no_license
|
Glittering/pythonLearn
|
ac91ed73cb615e3604d1c4bfde3692cf4efef6e3
|
0097b8fd5fc587a69f6c1bad95b08fe42481bf7c
|
refs/heads/master
| 2021-01-13T05:46:24.974340
| 2017-04-17T12:58:59
| 2017-04-17T12:58:59
| 77,099,698
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 473
|
py
|
#coding:utf-8
from django.shortcuts import render_to_response
def student(request,name,age):
return render_to_response('student.html',locals())
def stud_info(request,gender,info):
return render_to_response('stud_info.html',locals())
from django.http import HttpResponseRedirect,Http404
from django.shortcuts import HttpResponseRedirect
def redirect(request):
raise Http404
#return HttpResponseRedirect('/man_info') #重定向
# Create your views here.
|
[
"zhaoshichengzhao@sina.com"
] |
zhaoshichengzhao@sina.com
|
16ef309ebb8410726e9c8e3f19116655fffb59c4
|
ce04f462afded3035c7d695192d28b81f390ee0b
|
/yyan/middlewares.py
|
392d1591034d003f5edbbaf0d81f07155ca2b0b6
|
[] |
no_license
|
yangyu823/scrapy_try
|
20e58931d0221e0af6fef75bd8b35d03f354fcab
|
891340895e530beb67a3318a1dddbdaa2ed399cd
|
refs/heads/master
| 2020-05-25T02:46:26.180298
| 2019-05-29T07:25:35
| 2019-05-29T07:25:35
| 187,586,108
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,593
|
py
|
# -*- coding: utf-8 -*-
# Define here the models for your spider middleware
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/spider-middleware.html
from scrapy import signals
class YyanSpiderMiddleware(object):
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the spider middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_spider_input(self, response, spider):
# Called for each response that goes through the spider
# middleware and into the spider.
# Should return None or raise an exception.
return None
def process_spider_output(self, response, result, spider):
# Called with the results returned from the Spider, after
# it has processed the response.
# Must return an iterable of Request, dict or Item objects.
for i in result:
yield i
def process_spider_exception(self, response, exception, spider):
# Called when a spider or process_spider_input() method
# (from other spider middleware) raises an exception.
# Should return either None or an iterable of Response, dict
# or Item objects.
pass
def process_start_requests(self, start_requests, spider):
# Called with the start requests of the spider, and works
# similarly to the process_spider_output() method, except
# that it doesn’t have a response associated.
# Must return only requests (not items).
for r in start_requests:
yield r
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
class YyanDownloaderMiddleware(object):
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the downloader middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_request(self, request, spider):
# Called for each request that goes through the downloader
# middleware.
# Must either:
# - return None: continue processing this request
# - or return a Response object
# - or return a Request object
# - or raise IgnoreRequest: process_exception() methods of
# installed downloader middleware will be called
return None
def process_response(self, request, response, spider):
# Called with the response returned from the downloader.
# Must either;
# - return a Response object
# - return a Request object
# - or raise IgnoreRequest
return response
def process_exception(self, request, exception, spider):
# Called when a download handler or a process_request()
# (from other downloader middleware) raises an exception.
# Must either:
# - return None: continue processing this exception
# - return a Response object: stops process_exception() chain
# - return a Request object: stops process_exception() chain
pass
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
|
[
"yangyu823@gmail.com"
] |
yangyu823@gmail.com
|
358a6f35cc05686e94fcea1752ed9254f130e215
|
6adbb5d1cb124bb03d3e8b5455dd1f65d10fe35f
|
/examples/hand_pose_estimation/msra/test_msra_baseline.py
|
bc9af034bb6a0cfec9a063f94482c432794878be
|
[
"LicenseRef-scancode-public-domain",
"BSD-2-Clause",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla"
] |
permissive
|
xinghaochen/caffe-pose
|
7122b2dc0a375706d253f3b89af8aa87a4fc15ee
|
43e68699c0fdc5a6fdc14b8697bf7d6cd4d298f0
|
refs/heads/master
| 2023-04-15T10:43:41.823103
| 2023-03-18T13:15:25
| 2023-03-18T13:15:25
| 116,776,703
| 4
| 5
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,776
|
py
|
import numpy as np
import os
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(BASE_DIR, 'model_msra'))
sys.path.append(os.path.join(BASE_DIR, '../../../python'))
import caffe
print os.path.join(BASE_DIR, '../../../python')
from net_handpose_baseline_msra import make_net
def make_output_pose_command(output_pose_bin, model, weights, label_list, output_name, fx, fy, ux, uy, test_id):
command = '{0} \
--model={1} \
--gpu=0 \
--weights={2} \
--label_list={3} \
--output_name={4} \
--fx={5} \
--fy={6} \
--ux={7} \
--uy={8} \
2>&1 | tee logs/test_handpose_baseline_msra_{9}.txt'.format(output_pose_bin, model, weights, label_list, output_name, fx, fy, ux, uy, test_id)
return command
# make net
# make_net()
# init caffe
caffe.set_device(0)
caffe.set_mode_gpu()
# parameters
root_dir = '/home/workspace/Datasets/MSRA/cvpr15_MSRAHandGestureDB/'
output_pose_bin = '../../../build/tools/output_pose'
fx = 240.99
fy = 240.96
ux = 160
uy = 120
test_id = sys.argv[1]
print 'test_id: {}'.format(test_id)
# --------------------------------------------------------------------------
# test
# --------------------------------------------------------------------------
print 'start testing ...'
# prepare input files
model = 'model_msra/test_handpose_baseline_msra_{}.prototxt'.format(test_id)
weights = 'snapshot_msra/handpose_baseline_msra_{}_iter_80000.caffemodel'.format(test_id)
output_name = 'output/test_handpose_baseline_msra_{}.prototxt'.format(test_id)
label_list = root_dir + 'test_label_{}.txt'.format(test_id)
cmd = make_output_pose_command(output_pose_bin, model, weights, label_list, output_name, fx, fy, ux, uy, test_id)
print cmd
os.system(cmd)
print 'finish testing ...'
|
[
"chenxinghaothu@gmail.com"
] |
chenxinghaothu@gmail.com
|
584e8b0c312c39f77ca0773ac08a2e8300c41adb
|
6c213c60eb82742672818cd4acb30dcdd75e03df
|
/pong with AI/pong with AI.py
|
95e7865b027f202b77a8cc5a0e99bb070ac1a64a
|
[
"MIT"
] |
permissive
|
Stosan/Pygame
|
ade7b969cf1d69dc452704833f80759d293501f9
|
f47976f81a5964bfce90c4ef459e619dc76f712b
|
refs/heads/main
| 2023-01-06T19:47:46.788141
| 2020-11-05T21:44:54
| 2020-11-05T21:44:54
| 310,422,540
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,175
|
py
|
import pygame, sys, random
Cap_string = input('Enter Your name and press enter: ')
yournm = Cap_string
# initialize pygame setup
pygame.init()
clock = pygame.time.Clock()
ffont = pygame.font.SysFont('Arial', 18, True)
basic_font2 = pygame.font.SysFont('Arial', 120, True)
# General setup
pygame.mixer.pre_init(44100, -16, 2, 512)
pygame.init()
clock = pygame.time.Clock()
nm = "y"
# Main Window
screen_width = 1280
screen_height = 650
screen = pygame.display.set_mode((screen_width, screen_height))
pygame.display.set_caption('Pong with AI')
# Global Variables with our sounds
bg_color = pygame.Color(255, 234, 244)
accent_color = (27, 35, 43)
blue = pygame.Color(59, 13, 211)
basic_font = pygame.font.SysFont('Garamond', 32)
plob_sound = pygame.mixer.Sound("sounds/pong.ogg")
score_sound = pygame.mixer.Sound("sounds/score.ogg")
middle_strip = pygame.Rect(screen_width / 2 - 2, 0, 4, screen_height)
itd ="AI: "
game_over = False
class Block(pygame.sprite.Sprite):
def __init__(self, path, x_pos, y_pos):
super().__init__()
self.image = pygame.image.load(path)
self.rect = self.image.get_rect(center=(x_pos, y_pos))
pat = "R"
class Player(Block):
def __init__(self, path, x_pos, y_pos, speed):
super().__init__(path, x_pos, y_pos)
self.speed = speed
self.movement = 0
def screen_constrain(self):
if self.rect.top <= 0:
self.rect.top = 0
if self.rect.bottom >= screen_height:
self.rect.bottom = screen_height
def update(self, ball_group):
self.rect.y += self.movement
self.screen_constrain()
class Ball(Block):
def __init__(self, path, x_pos, y_pos, speed_x, speed_y, paddles):
super().__init__(path, x_pos, y_pos)
self.speed_x = speed_x * random.choice((-1, 1))
self.speed_y = speed_y * random.choice((-1, 1))
self.paddles = paddles
self.active = False
self.score_time = 0
def update(self):
if self.active:
self.rect.x += self.speed_x
self.rect.y += self.speed_y
self.collisions()
else:
self.restart_counter()
def collisions(self):
if self.rect.top <= 0 or self.rect.bottom >= screen_height:
pygame.mixer.Sound.play(plob_sound)
self.speed_y *= -1
if pygame.sprite.spritecollide(self, self.paddles, False):
pygame.mixer.Sound.play(plob_sound)
collision_paddle = pygame.sprite.spritecollide(self, self.paddles, False)[0].rect
if abs(self.rect.right - collision_paddle.left) < 10 and self.speed_x > 0:
self.speed_x *= -1
if abs(self.rect.left - collision_paddle.right) < 10 and self.speed_x < 0:
self.speed_x *= -1
if abs(self.rect.top - collision_paddle.bottom) < 10 and self.speed_y < 0:
self.rect.top = collision_paddle.bottom
self.speed_y *= -1
if abs(self.rect.bottom - collision_paddle.top) < 10 and self.speed_y > 0:
self.rect.bottom = collision_paddle.top
self.speed_y *= -1
def reset_ball(self):
self.active = False
self.speed_x *= random.choice((-1, 1))
self.speed_y *= random.choice((-1, 1))
self.score_time = pygame.time.get_ticks()
self.rect.center = (screen_width / 2, screen_height / 2)
pygame.mixer.Sound.play(score_sound)
def restart_counter(self):
current_time = pygame.time.get_ticks()
countdown_number = 3
if current_time - self.score_time <= 700:
countdown_number = 3
if 700 < current_time - self.score_time <= 1400:
countdown_number = 2
if 1400 < current_time - self.score_time <= 2100:
countdown_number = 1
if 2100 < current_time - self.score_time <= 2800:
countdown_number = "go"
if current_time - self.score_time >= 2800:
self.active = True
time_counter = basic_font.render(str(countdown_number), True, accent_color)
time_counter_rect = time_counter.get_rect(center=(screen_width / 2, screen_height / 2 + 50))
pygame.draw.rect(screen, bg_color, time_counter_rect)
screen.blit(time_counter, time_counter_rect)
axt = "ah"
# design the AI opponent
class AI_Opponent(Block):
def __init__(self, path, x_pos, y_pos, speed):
super().__init__(path, x_pos, y_pos)
self.speed = speed
def update(self, ball_group):
if self.rect.top < ball_group.sprite.rect.y:
self.rect.y += self.speed
if self.rect.bottom > ball_group.sprite.rect.y:
self.rect.y -= self.speed
self.constrain()
def constrain(self):
if self.rect.top <= 0: self.rect.top = 0
if self.rect.bottom >= screen_height: self.rect.bottom = screen_height
class GameManager:
def __init__(self, ball_group, paddle_group):
self.player_score = 0
self.AI_opponent_score = 0
self.ball_group = ball_group
self.paddle_group = paddle_group
def run_game(self):
# Drawing the game objects
self.paddle_group.draw(screen)
self.ball_group.draw(screen)
# Updating the game objects
self.paddle_group.update(self.ball_group)
self.ball_group.update()
self.reset_ball()
self.draw_score()
def reset_ball(self):
if self.ball_group.sprite.rect.right >= screen_width:
self.AI_opponent_score += 1
self.ball_group.sprite.reset_ball()
if self.ball_group.sprite.rect.left <= 0:
self.player_score += 1
self.ball_group.sprite.reset_ball()
if self.AI_opponent_score == 10:
alert_AI = "AI wins!"
AI_alt = basic_font2.render(f'{alert_AI}', False, blue)
AI_rect_alt = AI_alt.get_rect(center=(screen_width / 2, screen_height / 2 + 50))
screen.blit(AI_alt, AI_rect_alt)
pygame.display.update()
self.reset_scores()
if self.player_score == 20:
alert_U = "YOU win!"
U_alt = basic_font2.render(f'{alert_U}', False, blue)
U_rect_alt = U_alt.get_rect(center=(screen_width / 2, screen_height / 2 + 50))
screen.blit(U_alt, (screen_width / 2, screen_height / 2))
self.reset_scores()
def draw_score(self):
global axt, nm, pat, yournm, itd
U_name = ffont.render(f'{yournm}', False, blue)
U_rect = U_name.get_rect(midright=(screen_width / 2, screen_height / 2))
screen.blit(U_name, (1118, screen_height / 50))
player_score = basic_font.render(str(self.player_score), True, accent_color)
AI_opponent_score = basic_font.render(str(self.AI_opponent_score), True, accent_color)
player_score_rect = player_score.get_rect(midleft=(screen_width / 2 + 40, screen_height / 2))
AI_opponent_score_rect = AI_opponent_score.get_rect(midright=(screen_width / 2 - 40, screen_height / 2))
screen.blit(player_score, player_score_rect)
screen.blit(AI_opponent_score, AI_opponent_score_rect)
Rmm = itd+pat+nm+axt
AI_name = ffont.render(f'{Rmm}', False, blue)
AI_rect = AI_name.get_rect(center=(screen_width / 2, screen_height / 2))
screen.blit(AI_name, (screen_width / 50, screen_height / 50))
def reset_scores(self):
pygame.time.delay(2000)
self.player_score = 0
self.AI_opponent_score = 0
# Game objects with images
player = Player('imgs/Paddle.png', screen_width - 20, screen_height / 2, 5)
AI_opponent = AI_Opponent('imgs/Paddle.png', 20, screen_width / 2, 5)
paddle_group = pygame.sprite.Group()
paddle_group.add(player)
paddle_group.add(AI_opponent)
ball = Ball('imgs/Ball.png', screen_width / 2, screen_height / 2, 4, 4, paddle_group)
ball_sprite = pygame.sprite.GroupSingle()
ball_sprite.add(ball)
game_manager = GameManager(ball_sprite, paddle_group)
def pong():
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_UP:
player.movement -= player.speed
if event.key == pygame.K_DOWN:
player.movement += player.speed
if event.type == pygame.KEYUP:
if event.key == pygame.K_UP:
player.movement += player.speed
if event.key == pygame.K_DOWN:
player.movement -= player.speed
# Background Stuff
screen.fill(bg_color)
pygame.draw.rect(screen, accent_color, middle_strip)
# Run the game
game_manager.run_game()
# Rendering
pygame.display.flip()
clock.tick(120)
pong()
|
[
"noreply@github.com"
] |
Stosan.noreply@github.com
|
b0d5b4e95798bd3ee2f5fb7c143b15a519704fd5
|
89be33b0c7030f5112df35c6e9e5546a13b213d9
|
/pyccuracy/actions/select_has_selected_index_action.py
|
0216765aff930f9b276f95f5a510338a612f51f7
|
[] |
no_license
|
kenjiyamamoto/pyccuracy
|
6882910d468a503ce1b38f0c90276ac432de0b9c
|
602255dc529bf1db8cd4d8a81b5123e8c566df50
|
refs/heads/master
| 2021-01-16T22:40:58.418643
| 2009-05-18T21:07:24
| 2009-05-18T21:07:24
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,922
|
py
|
# -*- coding: utf-8 -*-
# Licensed under the Open Software License ("OSL") v. 3.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.opensource.org/licenses/osl-3.0.php
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
sys.path.insert(0,os.path.abspath(__file__+"/../../../"))
from pyccuracy.page import Page
from pyccuracy.actions.action_base import ActionBase
from pyccuracy.actions.element_is_visible_base import *
class SelectHasSelectedIndexAction(ActionBase):
def __init__(self, browser_driver, language):
super(SelectHasSelectedIndexAction, self).__init__(browser_driver, language)
def matches(self, line):
reg = self.language["select_has_selected_index_regex"]
self.last_match = reg.search(line)
return self.last_match
def values_for(self, line):
return self.last_match and (self.last_match.groups()[1], int(self.last_match.groups()[2])) or tuple([])
def execute(self, values, context):
select_name = values[0]
index = values[1]
select = self.resolve_element_key(context, Page.Select, select_name)
error_message = self.language["select_is_visible_failure"]
self.assert_element_is_visible(select, self.language["select_is_visible_failure"] % select_name)
selected_index = self.browser_driver.get_selected_index(select)
if (selected_index != index):
self.raise_action_failed_error(self.language["select_has_selected_index_failure"] % (select_name, index, selected_index))
|
[
"heynemann@gmail.com"
] |
heynemann@gmail.com
|
075dacc89ba876127a7b12af477b6c83ea9ec03d
|
292d337576335b3877f553bab7e1885335e3beef
|
/backend/main.py
|
c6f3bbf111c8a90ef6ae5a69789e1acfeb514e40
|
[] |
no_license
|
enochxu/vivpro-assignment
|
5ba376261b4ec13e8a2f8378bba8ec5ac92c1240
|
f21c9b349110c49e02740323081dfeecb42cb440
|
refs/heads/main
| 2023-05-29T20:36:17.245395
| 2021-06-10T20:56:44
| 2021-06-10T20:56:44
| 375,790,870
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,695
|
py
|
import config
import json
from flask import Flask
from flask import request, jsonify
from flask_restful import Resource, Api
import pymongo
from pymongo import MongoClient
from flask_cors import CORS
app = Flask(__name__)
api = Api(app)
CORS(app, origin=['http://localhost:3000/'])
client = MongoClient(config.dbconnection)
db = client['songs']
collection = db['songs']
class GetSongs(Resource):
def get(self):
documents = collection.find({}, {'_id': 0})
response = []
for document in documents:
response.append(document)
return response
# implenting pagination
class GetSongsByPage(Resource):
def get(self):
page = int(request.args.get('page', 1))
per_page = int(request.args.get('per_page', 10))
songs = collection.find({}).sort([('_id', pymongo.ASCENDING)]).skip((page-1)*per_page).limit(per_page)
response = []
for song in songs:
del song['_id']
response.append(song)
return response
class GetSong(Resource):
def get(self, name):
song = collection.find_one({'title': name}, {'_id': 0})
if (type(song) is not dict):
return song, 400
return song
class Rate(Resource):
def put(self,song_id):
rating = int(request.args.get('rating'))
collection.update_one({'id': song_id }, {'rating': rating});
return jsonify(success=True);
api.add_resource(GetSongs, '/api/GetSongs')
api.add_resource(GetSong, '/api/GetSong/<string:name>')
api.add_resource(GetSongsByPage, '/api/GetSongsByPage')
api.add_resource(Rate, '/api/Rate/<string:song_id>')
if __name__ == '__main__':
app.run(port=5000, debug=True)
|
[
"enoch.t.xu@gmail.com"
] |
enoch.t.xu@gmail.com
|
5821d7e828656fade04f98c5a720f731cf89a534
|
1dbf485a319405c9c96e83aacbb14c37dabc5823
|
/Day 3/main.py
|
0b92488ac00d052a434b706b11ff32baee11ba4e
|
[] |
no_license
|
AbhishekSV/100daysofcode
|
ff883a1501f5b24986486786feb86d7e39337728
|
99f66462fefb6c30ebc8a8dc7703f5e7abca16f3
|
refs/heads/main
| 2023-06-07T10:14:12.050236
| 2021-07-13T18:03:15
| 2021-07-13T18:03:15
| 365,815,050
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,935
|
py
|
# Day 3 Exercise 1
number = int(input("Which number do you want to check? "))
if number % 2 == 0:
print("This is an even number.")
else:
print("This is an odd number.")
# Day 3 Exercise 2
height = float(input("enter your height in m: "))
weight = float(input("enter your weight in kg: "))
bmi = round(weight / height ** 2,2)
bmi_print = int(bmi)
if bmi < 18.5:
print(f"Your BMI is {bmi_print}, you are underweight.")
elif bmi < 25:
print(f"Your BMI is {bmi_print}, you a normal weight.")
elif bmi < 30:
print(f"Your BMI is {bmi_print}, you slightly overweight.")
elif bmi < 35:
print(f"Your BMI is {bmi_print}, you obese.")
else:
print(f"Your BMI is {bmi_print}, you are clinically obese.")
# Day 3 Exercise 3
year = int(input("Which year do you want to check? "))
#isLeapYear = None
if year % 4 == 0:
if year % 100 == 0:
if year % 400 == 0:
print("Leap Year.")
else:
print("Not Leap Year.")
else:
print("Leap Year.")
else:
print("Not Leap Year.")
# Day 3 Exercise 4
print("Welcome to Python Pizza Deliveries!")
size = input("What size pizza do you want? S, M, or L ")
add_pepperoni = input("Do you want pepperoni? Y or N ")
extra_cheese = input("Do you want extra cheese? Y or N ")
bill = 0
if size == "S":
bill += 15
elif size == "M":
bill += 20
else:
bill += 25
if add_pepperoni == "Y":
if size == "S":
bill += 2
else:
bill += 3
if extra_cheese == "Y":
bill += 1
print(f"Your final bill is: ${bill}.")
# Day 3 Exercise 5
print("Welcome to the Love Calculator!")
name1 = input("What is your name? \n")
name2 = input("What is their name? \n")
name = name1 + name2
t = name.lower().count("t")
r = name.lower().count("r")
u = name.lower().count("u")
e = name.lower().count("e")
l = name.lower().count("l")
o = name.lower().count("o")
v = name.lower().count("v")
true_love_percent = (t+r+u+e) * 10 + (l+o+v+e)
if true_love_percent < 10 or true_love_percent > 90:
print(f"Your score is {true_love_percent}, you go together like coke and mentos.")
elif true_love_percent >=40 and true_love_percent <= 50:
print(f"Your score is {true_love_percent}, you are alright together.")
else:
print(f"Your score is {true_love_percent}.")
#Exercise App
print("Welcome to Treasure Island.")
print("Your mission is to find the treasure.")
choice1 = input('You\'re at a cross road. Where do you want to go? Type "left" or "right" \n').lower()
if choice1 == "left":
choice2 = input('You\'ve come to a lake. There is an island in the middle of the lake. Type "wait" to wait for a boat. Type "swim" to swim across. \n').lower()
if choice2 == "wait":
choice3 = input("You arrive at the island unharmed. There is a house with 3 doors. One red, one yellow and one blue. Which colour do you choose? \n").lower()
if choice3 == "red":
print("It's a room full of fire. Game Over.")
elif choice3 == "yellow":
print("You found the treasure! You Win!")
elif choice3 == "blue":
print("You enter a room of beasts. Game Over.")
else:
print("You chose a door that doesn't exist. Game Over.")
else:
print("You get attacked by an angry trout. Game Over.")
else:
print("You fell into a hole. Game Over.")
#Day 3 End
print("Welcome to the rollercoaster!")
height = int(input("What is your height in cm? "))
bill = 0
if height >= 120:
print("You can ride the rollercoaster!")
age = int(input("What is your age? "))
if age < 12:
bill = 5
print("Child tickets are $5.")
elif age <= 18:
bill = 7
print("Youth tickets are $7.")
elif age >= 45 and age <= 55:
print("Everything is going to be ok. Have a free ride on us!")
else:
bill = 12
print("Adult tickets are $12.")
wants_photo = input("Do you want a photo taken? Y or N. ")
if wants_photo == "Y":
bill += 3
print(f"Your final bill is ${bill}")
else:
print("Sorry, you have to grow taller before you can ride.")
|
[
"abhisabnives@gmail.com"
] |
abhisabnives@gmail.com
|
bbd162c69eacae485a35fd3ca6c6fc00a310fe79
|
07093294ee3b0218a687d34141052a0417d7292f
|
/venv/Scripts/pip-script.py
|
7aeea92865b0d0b2781cdb29d1b28baa0926b4ce
|
[] |
no_license
|
condoran/lab5-7
|
ebc409b79b9fa23a7ccd4daca405e8f989652e62
|
9b8e089acdec5818bb1a89fc040b50c9e7802037
|
refs/heads/master
| 2020-04-05T11:54:30.210998
| 2018-12-12T13:19:22
| 2018-12-12T13:19:22
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 406
|
py
|
#!C:\Users\Andrei\Desktop\FP\lab5-7\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip')()
)
|
[
"anco990@gmail.com"
] |
anco990@gmail.com
|
e5a188e98f707bfbcbe34f7ec600b5ee808fd90c
|
c91775afdc25f8897c6839cf8294869f3e928083
|
/PythonFiles/snowmass_cfg_ttB_14TEV_900_1600_Conf4v2_15.py
|
e3ee664b191b7795db375e26d848ab120fdb79e5
|
[] |
no_license
|
Saptaparna/Miscellaneous
|
7e6df9cdfd10d4861e2e382b1837dbd4c26fb249
|
b954189d85e56a02fe257b5f5cbd779365719c00
|
refs/heads/master
| 2021-01-23T13:29:30.283308
| 2017-12-20T08:26:37
| 2017-12-20T08:26:37
| 42,525,018
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,285
|
py
|
import FWCore.ParameterSet.Config as cms
import FWCore.PythonUtilities.LumiList as LumiList
import FWCore.ParameterSet.Types as CfgTypes
#
# Parameters that can be set via command line
# when submitting Condor jobs
#
isMc_settable = True
isSignalMc_settable = False
def FindFile(name):
fname = 'file.txt'
return fname
process = cms.Process("LJMetCom")
##################################################################
#
# All input files needed for the job to run
# Specify them here, and they will automatically be correctly
# transferred to Condor when needed
# NOTE: you can define as many or as few entries as you wish,
# names are up to you
miscFiles = {}
miscFiles['jec_uncertainty'] = '../cond/Summer12_V2_DATA_AK5PF_UncertaintySources.txt'
miscFiles['btag_performance'] = '../cond/btag_performance_db062012.root'
miscFiles['json'] = '../data/json/Cert_190456-208686_8TeV_PromptReco_Collisions12_JSON.txt'
miscFiles['MCL1JetPar'] = '../data/START53_V7G_L1FastJet_AK5PFchs.txt'
miscFiles['MCL2JetPar'] = '../data/START53_V7G_L2Relative_AK5PFchs.txt'
miscFiles['MCL3JetPar'] = '../data/START53_V7G_L3Absolute_AK5PFchs.txt'
miscFiles['DataL1JetPar'] = '../data/FT_53_V10_AN3_L1FastJet_AK5PFchs.txt'
miscFiles['DataL2JetPar'] = '../data/FT_53_V10_AN3_L2Relative_AK5PFchs.txt'
miscFiles['DataL3JetPar'] = '../data/FT_53_V10_AN3_L3Absolute_AK5PFchs.txt'
miscFiles['DataResJetPar'] = '../data/FT_53_V10_AN3_L2L3Residual_AK5PFchs.txt'
#Arguments from condor submit script which are used more than once
condorIsMC = bool(True)
relBase = str('/uscms_data/d2/sapta/work/LJMetCode_fromGena/Dilepton_Feb25/CMSSW_5_3_7_patch4')
condorJSON = str('None')
# Dilepton calculator options
process.load('LJMet.Com.DileptonCalc_cfi')
process.DileptonCalc.isMc = condorIsMC
process.DileptonCalc.dataType = cms.string('None')
############################################################
#
# FWLite application options
#
process.ljmet = cms.PSet(
isMc = cms.bool(condorIsMC),
runs = cms.vint32([]),
verbosity = cms.int32(0)
)
#Exclude unnecessary calculators
process.ljmet.excluded_calculators = cms.vstring(
'WprimeCalc',
'LjetsTopoCalc',
'LjetsTopoCalcNew',
'StopCalc'
)
############################################################
#
# common calculator options
process.load('LJMet.Com.commonCalc_cfi')
process.CommonCalc.dummy_parameter = cms.string('Dummy parameter value')
############################################################
#
# pileup calculator options
process.load('LJMet.Com.pileupCalc_cfi')
process.PileUpCalc.verbosity = process.ljmet.verbosity
############################################################
#
# Event selector options
#
process.event_selector = cms.PSet(
selection = cms.string('DileptonSelector'),
isMc = cms.bool(condorIsMC),
# cuts
#HLT
trigger_cut = cms.bool(True),
dump_trigger = cms.bool(False),
#Can use same trigger paths for data and MC since MC is always one of the data versions
trigger_path_ee = cms.vstring('HLT_Ele17_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_Ele8_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v15',
'HLT_Ele17_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_Ele8_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v16',
'HLT_Ele17_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_Ele8_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v17',
'HLT_Ele17_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_Ele8_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v18',
'HLT_Ele17_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_Ele8_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v19'),
trigger_path_em = cms.vstring('HLT_Mu8_Ele17_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v4', 'HLT_Mu8_Ele17_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v5',
'HLT_Mu8_Ele17_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v6', 'HLT_Mu8_Ele17_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v7',
'HLT_Mu8_Ele17_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v8', 'HLT_Mu8_Ele17_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v9',
'HLT_Mu17_Ele8_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v4', 'HLT_Mu17_Ele8_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v5',
'HLT_Mu17_Ele8_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v6', 'HLT_Mu17_Ele8_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v7',
'HLT_Mu17_Ele8_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v8', 'HLT_Mu17_Ele8_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v9'),
trigger_path_mm = cms.vstring('HLT_Mu17_Mu8_v16', 'HLT_Mu17_Mu8_v17', 'HLT_Mu17_Mu8_v18',
'HLT_Mu17_Mu8_v19', 'HLT_Mu17_Mu8_v21', 'HLT_Mu17_Mu8_v22',
'HLT_Mu17_TkMu8_v9', 'HLT_Mu17_TkMu8_v10', 'HLT_Mu17_TkMu8_v11',
'HLT_Mu17_TkMu8_v12', 'HLT_Mu17_TkMu8_v13', 'HLT_Mu17_TkMu8_v14'),
pv_cut = cms.bool(False),
hbhe_cut = cms.bool(False),
jet_cuts = cms.bool(False),
jet_minpt = cms.double(20.0),
jet_maxeta = cms.double(5),
min_jet = cms.int32(0),
max_jet = cms.int32(4000),
muon_cuts = cms.bool(True),
min_muon = cms.int32(0),
muon_minpt = cms.double(10.0),
muon_maxeta = cms.double(4.0),
max_muon = cms.int32(20),
electron_cuts = cms.bool(True),
min_electron = cms.int32(0),
electron_minpt = cms.double(10.0),
electron_maxeta = cms.double(4.0),
max_electron = cms.int32(20),
min_lepton = cms.int32(2),
met_cuts = cms.bool(False),
min_met = cms.double(0.0),
btag_cuts = cms.bool(False),
btagOP = cms.string("CSVM"),
btag_1 = cms.bool(True),
btag_2 = cms.bool(True),
btag_3 = cms.bool(False),
trigger_collection = cms.InputTag('TriggerResults::HLT'),
pv_collection = cms.InputTag('goodOfflinePrimaryVertices'),
jet_collection = cms.InputTag('goodPatJetsPFlow'),
muon_collection = cms.InputTag('selectedPatMuonsPFlowLoose'),
electron_collection = cms.InputTag('selectedPatElectronsPFlowLoose'),
met_collection = cms.InputTag('patMETsPFlow'),
JEC_txtfile = cms.string(miscFiles['jec_uncertainty']),
JECup = cms.bool(False),
JECdown = cms.bool(False),
JERup = cms.bool(False),
JERdown = cms.bool(False),
BTagUncertUp = cms.bool(False),
BTagUncertDown = cms.bool(True),
do53xJEC = cms.bool(True),
MCL1JetPar = cms.string(miscFiles['MCL1JetPar']),
MCL2JetPar = cms.string(miscFiles['MCL2JetPar']),
MCL3JetPar = cms.string(miscFiles['MCL3JetPar']),
DataL1JetPar = cms.string(miscFiles['DataL1JetPar']),
DataL2JetPar = cms.string(miscFiles['DataL2JetPar']),
DataL3JetPar = cms.string(miscFiles['DataL3JetPar']),
DataResJetPar = cms.string(miscFiles['DataResJetPar']),
keepFullMChistory = cms.bool(True)
)
##################################################################
#
# Input files
#
# NOTE: keep your test inputs in the python files as in
# this example, and they will be correctly substituted with
# specified input events when you submit to Condor
# (
#
# nEvents and skipEvents are for interactive use, their
# values will be correctly reset when you submit Condor
#
input_module = 'LJMet.Com.ttB_14TEV_900_1600_Conf4v2_15'
process.load(input_module)
process.inputs.nEvents = cms.int32(-1)
process.inputs.skipEvents = cms.int32(0)
############################################################
#
# JSON
JsonFile = miscFiles['json']
myList = LumiList.LumiList(filename=JsonFile).getCMSSWString().split(',')
if not condorIsMC:
process.inputs.lumisToProcess.extend(myList)
#######################################################
#
# Output
#
process.outputs = cms.PSet (
outputName = cms.string('ttB_14TEV_900_1600_Conf4v2_15'),
treeName = cms.string('ljmet'),
)
#######################################################
#
# Object selector options
#
# Primary vertex
process.load('PhysicsTools.SelectorUtils.pvSelector_cfi')
process.pvSelector.pvSrc = cms.InputTag('goodOfflinePrimaryVertices')
process.pvSelector.minNdof = cms.double(4.0)
process.pvSelector.maxZ = cms.double(24.0)
process.pvSelector.maxRho = cms.double(2.0)
# jets
process.load('PhysicsTools.SelectorUtils.pfJetIDSelector_cfi')
process.pfJetIDSelector.version = cms.string('FIRSTDATA')
process.pfJetIDSelector.quality = cms.string('LOOSE')
|
[
"saptaparna@gmail.com"
] |
saptaparna@gmail.com
|
2587ad784ccf844c42813b9a6b23c51a76963619
|
9bce7ec9dc841744615549f67a9aadcb212ea958
|
/src/projects/migrations/0005_auto_20191019_2024.py
|
85cc115f2a29d5a98d3492b2ee5b05a165d5da25
|
[
"MIT"
] |
permissive
|
ERogalla/portfolio
|
9b3d817ca6d7032fc7685aae21c90abf66f17001
|
6c484807feddb4abfe50c1fd9b2f09ce955f0d7b
|
refs/heads/master
| 2022-12-03T15:02:50.458999
| 2020-07-03T19:19:18
| 2020-07-03T19:19:18
| 213,979,386
| 0
| 0
| null | 2022-11-22T06:09:06
| 2019-10-09T17:19:29
|
CSS
|
UTF-8
|
Python
| false
| false
| 435
|
py
|
# Generated by Django 2.2.6 on 2019-10-19 20:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('projects', '0004_auto_20191019_2010'),
]
operations = [
migrations.AlterField(
model_name='project',
name='image',
field=models.ImageField(blank=True, upload_to='media', verbose_name='Uploaded Image'),
),
]
|
[
"erogalla@utexas.edu"
] |
erogalla@utexas.edu
|
0cbdc44a3aecfdfbc3789735359e62aeeca9bdfb
|
18e4588df588b431c110338754c4160c6e64157a
|
/src/ad/migrations/0020_auto_20200626_1525.py
|
f66f6797e89c61b81398bc24e195a590be28c722
|
[] |
no_license
|
mamee93/websilt-blog
|
60ae8aa8df737109b13af3a36e0abfd699357949
|
5d693c4f36ebb91b1bd5867db7feafd95bebf19c
|
refs/heads/master
| 2022-11-09T06:26:41.232243
| 2020-07-01T15:55:50
| 2020-07-01T15:55:50
| 263,414,708
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 794
|
py
|
# Generated by Django 3.0.6 on 2020-06-26 11:25
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('ad', '0019_auto_20200626_1520'),
]
operations = [
migrations.AlterField(
model_name='ad',
name='category',
field=models.ForeignKey(limit_choices_to={'main_category': True}, on_delete=django.db.models.deletion.CASCADE, related_name='ad_category', to='ad.Category'),
),
migrations.AlterField(
model_name='category',
name='main_category',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='maincategory', to='ad.Category'),
),
]
|
[
"mameeal@gmail.com"
] |
mameeal@gmail.com
|
98b6369757f30a3faa769111b9470dc7e0bddae8
|
4d36cccb6c17c708eb06dd8db56694dbdc452d28
|
/articles/migrations/0006_auto_20190912_1457.py
|
1f19b5227442ed9e0b07ea778c2bb2dcb9c94905
|
[] |
no_license
|
TheDevengers/welldone-BE
|
af583a4f9cfed63651324360ffe1fc46fb8b1846
|
c8a2e6bcd8b222c6a1fdb0336f45c4f1e66e05f5
|
refs/heads/develop
| 2023-04-30T08:47:39.380417
| 2021-03-23T08:02:43
| 2021-03-23T08:02:43
| 205,995,734
| 1
| 0
| null | 2023-04-21T20:37:07
| 2019-09-03T05:27:34
|
Python
|
UTF-8
|
Python
| false
| false
| 531
|
py
|
# Generated by Django 2.2.5 on 2019-09-12 12:57
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('articles', '0005_auto_20190912_1445'),
]
operations = [
migrations.AlterField(
model_name='article',
name='response_to',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='articles.Article', verbose_name='Response to'),
),
]
|
[
"vegekku@gmail.com"
] |
vegekku@gmail.com
|
19d3d6bf8cdea43d5b1639aa24aa44178d691d27
|
338defe1b32d25d4a048780836076e412de44a73
|
/Conditional/3.py
|
940dcdca2d55668b02ecf2ac4f05e8a084fe58d3
|
[] |
no_license
|
easyeah/githubtutorial
|
acaa45e20a0cadc9d817686399914c4fad260549
|
de9c744014294863a553f463674675569e11ac8c
|
refs/heads/master
| 2023-02-01T19:07:23.504698
| 2020-12-23T03:59:01
| 2020-12-23T03:59:01
| 321,349,761
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 91
|
py
|
input = 22
real = 11
if real == input :
print("Hello!")
else :
print("Who are you")
|
[
"easyeah@ijiyeui-MacBookAir.local"
] |
easyeah@ijiyeui-MacBookAir.local
|
59533c0199b661b3a1d1f58fdf59f4431c6b8008
|
65568de005fad42dbeafa9f2e3236651bf32ab7d
|
/sdocs/wsgi.py
|
8eabbfdc98b6bc3efb9705a6a1b1e7c7cbb3261e
|
[] |
no_license
|
xtess16/admin-back
|
b760f9a541e5c26823e0c1021b625050c7687b72
|
8d3fabe45cfb4cc46f3b5fad5884c20297ffa4c5
|
refs/heads/master
| 2022-04-03T10:53:04.868778
| 2020-01-13T09:45:38
| 2020-01-13T09:45:38
| 236,314,288
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 387
|
py
|
"""
WSGI config for sdocs project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sdocs.settings')
application = get_wsgi_application()
|
[
"intro333@ya.ru"
] |
intro333@ya.ru
|
a458def1da622eaef02e4a664937317cae80b6ed
|
2f15020ccc389d5a72b5cb2f55cd14ee7c0275f3
|
/find_libary/find_libary.py
|
28fd895328ad28e68ffd827a5107f97ad6aa21bd
|
[] |
no_license
|
guokai27/Tools
|
bfe5c5a680af6a7fa5d8e09ce9b77d85231b1925
|
bcd20c977daba7a88a95cdcaf2ff8f31fe42d3ba
|
refs/heads/master
| 2020-08-14T22:36:50.382826
| 2019-09-17T09:06:01
| 2019-09-17T09:06:01
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,300
|
py
|
# coding: utf-8
import os
def filter_libary(libary):
for l in libary.split('.'):
if l and l not in import_lst:
return l.strip()
return ''
def filter_libary2(string):
return (i.split('.')[0].strip().split('#')[0] for i in string.split(' as ')[0].split(','))
def walkdir(rootDir):
for filename in os.listdir(rootDir):
pathname = os.path.join(rootDir, filename)
if (os.path.isdir(pathname)):
walkdir(pathname)
elif pathname.endswith('.py'):
with open(pathname, 'r', encoding='utf-8') as f:
for line in f.readlines():
if 'import ' in line:
word_lst = line.split(' ')
if word_lst[0] == 'from':
import_lst.append(filter_libary(word_lst[1]))
elif word_lst[0] == 'import':
for item in filter_libary2(' '.join(word_lst[1:])):
if item not in import_lst:
import_lst.append(item.strip())
if __name__ == '__main__':
# rootDir = 'F:\\mitmproxy-master'
rootDir = ''
global import_lst
import_lst = []
walkdir(rootDir)
import_lst = sorted(list(set(import_lst)))[1:]
|
[
"wnma3mz@163.com"
] |
wnma3mz@163.com
|
1b62f125c1180fb8ba8b9a43a5b6ab561ed1ced0
|
beafb123b1475344ba15bb9dac4014d2c973cfa2
|
/api/v1/views/__init__.py
|
e7806ceccfa2ac661279dafdff480044fd509b69
|
[
"LicenseRef-scancode-public-domain"
] |
permissive
|
csoriano2832/AirBnB_clone_v3
|
ec1b6b0a0d9ff7370f5c43e09479b6951d6179e1
|
0aaad8fa40690960cd66f5ea1108858ad08bea88
|
refs/heads/master
| 2023-08-01T01:59:29.675599
| 2021-09-21T20:30:43
| 2021-09-21T20:30:43
| 407,196,301
| 0
| 1
| null | 2021-09-21T19:47:17
| 2021-09-16T14:27:57
|
Python
|
UTF-8
|
Python
| false
| false
| 373
|
py
|
#!/usr/bin/python3
""" Sets a new flask object based on app blueprint"""
from flask import Blueprint
app_views = Blueprint('app_views', __name__, url_prefix='/api/v1')
from api.v1.views.index import *
import api.v1.views.states
import api.v1.views.cities
import api.v1.views.amenities
import api.v1.views.users
import api.v1.views.places
import api.v1.views.places_reviews
|
[
"2832@holbertonschool.com"
] |
2832@holbertonschool.com
|
8bcee44498640e3e8e6b4ce9c36b104e65b57e20
|
3168148e3791b53dacb1ceb22531c71019b84f53
|
/stocktojs.py
|
84ce5f8e93956aa243d13a064fcc15ef94c45c67
|
[] |
no_license
|
tschwarz1/dogstocks
|
85d15375cb797afef7f2b5c4eea64ee7a01f21ab
|
745090cd381c1ca5bac410ff2e00db41e4b4e9ac
|
refs/heads/master
| 2023-08-11T03:46:53.370716
| 2021-09-26T12:38:50
| 2021-09-26T12:38:50
| 410,236,662
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 257
|
py
|
import json
import csv
f = open("large3.js", "w")
f.write('let WORDS = [\n')
with open('egg.csv', 'r') as file:
reader = csv.reader(file)
print
for row in reader:
f.write(' [\"'+row[0]+'\",\"'+row[1]+'\"],\n')
f.write('];')
f.close()
|
[
"88667871+tschwarz1@users.noreply.github.com"
] |
88667871+tschwarz1@users.noreply.github.com
|
33b8370cb5b7909991f8bc565713cbe209635289
|
e4926b6b7f2628bf8f118d3b3367dd68eea20c71
|
/actions/admin.py
|
df3b8184b1e5798e36c2b1f6207e03413b1bab68
|
[] |
no_license
|
jmshulett/django_web_development_assignment4
|
1ab2088481d295df6e8da0ad9a702125c90651e9
|
8eaafa9ef54bffe17e5828a0bce07243421efab1
|
refs/heads/main
| 2023-08-13T17:54:57.751715
| 2021-10-16T20:59:29
| 2021-10-16T20:59:29
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 248
|
py
|
from django.contrib import admin
from .models import Action
@admin.register(Action)
class ActionAdmin(admin.ModelAdmin):
list_display = ('user', 'verb', 'target', 'created')
list_filter = ('created',)
search_fields = ('verb',)
|
[
"noreply@github.com"
] |
jmshulett.noreply@github.com
|
57b9f07dfc61926523d65d0345e2667186ffd666
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/437/usersdata/311/96973/submittedfiles/funcoes1.py
|
bd4e4152074845b6783d9cb22737bbb554537481
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146
| 2017-12-22T16:05:45
| 2017-12-22T16:05:45
| 69,566,344
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,058
|
py
|
# -*- coding: utf-8 -*-
lista=[]
def crescente(lista):
#escreva o códintgo da função crescente aqui
for i in range (0,n,1):
if lista[i]<lista[i+1]:
return 'S'
else:
return 'N'
#escreva as demais funções
def decrescente(lista):
for i in range(0,n,1):
while lista[i] > lista[i+1]:
return 'S'
else:
return 'N'
def igualdade(lista):
for i in range(0,n,1):
if lista[i]==lista[i+1]:
return 'S'
else:
return 'N'
#escreva o programa principal
a=[]
b=[]
c=[]
n= (int(input('digite a quantidade: ')))
for i in range(0,n,1):
a.append(input('Digite o numero%d: ' % (i+1)))
for i in range(0,n,1):
b.append(input('Digite o numero%d: ' % (i+1)))
for i in range(0,n,1):
c.append(input('Digite o numero%d: ' % (i+1)))
print (crescente(a))
print (decrescente(a))
print (igualdade(a))
print (crescente(b))
print (decrescente(b))
print (igualdade(b))
print (crescente(c))
print (decrescente(c))
print (igualdade(c))
|
[
"rafael.mota@ufca.edu.br"
] |
rafael.mota@ufca.edu.br
|
0af9c310fc5620d9e451a9cf2726eb14e3678232
|
9a4df86b4ee8a8dbaaa3e7dc9d85fc3d8ca3dd48
|
/amymeme/20210121.py
|
f6640fee6001bf4a28b272ecf5169fa718bec762
|
[] |
no_license
|
AMYMEME/algorithm-study
|
3ff2997ef1f50a2a6d34d46bac49b5fb3c14d7a4
|
f5bd767c46a6571d2d139a946bd3603c96877edb
|
refs/heads/main
| 2023-07-10T06:38:07.444611
| 2021-08-24T14:17:07
| 2021-08-24T14:17:07
| 324,687,765
| 0
| 1
| null | 2021-08-24T14:17:08
| 2020-12-27T04:40:54
|
Python
|
UTF-8
|
Python
| false
| false
| 1,058
|
py
|
# acmicpc.net/problem/2667
import sys
from collections import deque
def bfs(row_idx, col_idx):
count = 0
q = deque()
q.append((row_idx, col_idx))
visit.add((row_idx, col_idx))
while q:
row_idx, col_idx = q.popleft()
count += 1
for d in range(4):
next_row_idx, next_col_idx = row_idx + dx[d], col_idx + dy[d]
if -1 < next_row_idx < N and -1 < next_col_idx < N:
if house_map[next_row_idx][next_col_idx] == 1 and (next_row_idx, next_col_idx) not in visit:
q.append((next_row_idx, next_col_idx))
visit.add((next_row_idx, next_col_idx))
return count
N = int(sys.stdin.readline().strip())
house_map = [list(map(int, sys.stdin.readline().strip())) for _ in range(N)]
visit = set()
dx = [-1, 1, 0, 0]
dy = [0, 0, -1, 1]
result = []
for i in range(N):
for j in range(N):
if house_map[i][j] == 1 and (i, j) not in visit:
result.append(bfs(i, j))
result.sort()
print(len(result))
for i in result:
print(i)
|
[
"cgc8016@ewhain.net"
] |
cgc8016@ewhain.net
|
fffe3e5e4ec658187ec9b1b7bdf82b2340a23bc0
|
8c8a752bd3efd126900b04e4b004d666e9f81bc9
|
/playground.py
|
b2f7284446b74d2c95b79a8dd590505805a4b7a1
|
[] |
no_license
|
nara/webscrapper
|
59ff111edb6e12c05386693bfd3a4864f2ad47a6
|
b876c9edbc4e4d8018aafe225c6ad34c80f9f674
|
refs/heads/master
| 2022-07-18T14:53:57.892857
| 2020-05-17T21:40:18
| 2020-05-17T21:40:18
| 263,176,694
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,859
|
py
|
import gevent
from gevent.queue import Queue
import sys
from googutils import downloadPage, downloadDetailPage, getNavigationLinks, getSymbolData, getLinkSet
import re
import repository as repo
import urllib2
import HTMLParser
repo.SymbolRepo.writeToCsv()
sys.exit()
link = "http://www.google.com/finance?catid=TRBC%3A57&sort=PE_RATIO&ei=s03UWOm2GorNjAHb-I-YBA&start=40&num=20"
print re.sub("start=(\d*)", "start=" + str(1*20), link)
links = getLinkSet(link, 0, 10, 2)
#print links
sys.exit()
text = downloadPage("http://www.google.com/finance?catid=TRBC%3A57&sort=PE_RATIO&ei=s03UWOm2GorNjAHb-I-YBA&start=40&num=20")
m = re.search("<table id=main[\s\S]*?class=results>([\s\S]*?<\/table>)", text)
mainTableHtml = m.group(1)
m1 = re.search("(<tr>[\s]*?<td align=[\s\S]*?<\/table>)", mainTableHtml)
mainTableRowData = m1.group(1)
rows = re.findall("(<tr>[\s]*?<td align=[\s\S]*?)(?=(<tr>|<\/table>))", mainTableRowData)
data = []
for tuplerow in rows:
row = tuplerow[0]
if (row == "</table>"):
continue
print row
print "----------"
symbol = re.search("href=\"\/finance\?q=\w*:(\w*)&", row).group(1)
name = re.search("href=\"\/finance\?q=[\s\S]*?>(.*)<\/a>", row).group(1)
tds = re.findall("<td align=right class=\"[\s\S]*?(.*)[\s]*(?=(<td|$))", row)
data.append({ 'symbol' : symbol, 'name': name, 'price' : tds[0][0], 'marketcap' : tds[1][0],
'peratio' : tds[2][0], 'annrevenue' : tds[3][0], 'netincome' : tds[4][0], 'success': False })
print data
sys.exit()
sys.exit()
class Actor(gevent.Greenlet):
def __init__(self):
self.inbox = Queue()
Greenlet.__init__(self)
def receive(self, message):
"""
Define in your subclass.
"""
raise NotImplementedError()
def _run(self):
self.running = True
while self.running:
message = self.inbox.get()
self.receive(message)
import gevent
from gevent.queue import Queue
from gevent import Greenlet
class Worker(Actor):
def __init__(self, boss, index):
self.boss = boss
self.index = index
Actor.__init__(self)
def receive(self, message):
print "w%d %s " %(self.index, message)
self.boss.inbox.put('done')
gevent.sleep(0)
class Manager(Actor):
def __init__(self):
self.workers = [Worker(self, i) for i in xrange(4)]
for worker in self.workers:
worker.start()
Actor.__init__(self)
def receive(self, message):
print(message)
for idx, worker in enumerate(self.workers):
worker.inbox.put('manager telling worker')
gevent.sleep(0)
man = Manager()
man.start()
man.inbox.put('start')
gevent.joinall([man])
|
[
"avnrao@gmail.com"
] |
avnrao@gmail.com
|
cdf5b2312c0a5cf620f94dccb54931b8d92aba5c
|
9be0071a0132d42c4253632d57927379b866d2bf
|
/horizon-caffe/tools/disturb_in_eye_pic.py
|
ba897c6e0ebb5b33f534e330ec02510fe6eeedc1
|
[] |
no_license
|
zhaoxuli/Eye_clsfy
|
5f1635a3766d61d1895e1cea037461fc662f1185
|
692b7c2061d34a4c5b5ce1af052ab179e77bfa50
|
refs/heads/master
| 2021-04-03T09:58:50.329785
| 2018-03-14T08:37:33
| 2018-03-14T10:55:09
| 125,232,398
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,330
|
py
|
# -*- coding: UTF-8 -*-
import os
import sys
import glob
import numpy as np
import cv2
import argparse
# if test_ratio = [1, 5], then:
# 1. idx % 10 == [0, 2, 3, 4, 6, 7, 8, 9] saves to 'train folders'
# 2. idx % 10 == [1, 5] saves to 'test folders'
test_ratio = [1, 5]
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-pic_dir', type=str)
parser.add_argument('-save_path', type=str)
parser.add_argument('-disturb_time', type=int, default=10, help='[o]')
args = parser.parse_args()
if args.save_path[-1] != os.sep:
args.save_path += os.sep
return args
def append_diff_part_path(path_src, path_dst):
src_list = str(path_src).split(os.sep)
dst_list = str(path_dst).split(os.sep)
for path_part in src_list:
if path_part not in dst_list:
dst_list.append(path_part)
return os.sep.join(dst_list)
class B:
def __init__(self, args, debug=0):
self.pic_dir = args.pic_dir
self.save_path = args.save_path
self.disturb_time = args.disturb_time
self.cut_scale = '1.8x1.8'
if debug == 1:
print 'args:', args
def get_eye_rect(self, m1, m2, n1, n2, width, height):
w = width / float(m1)
ox = (n1 - m1) * w / 2.0
oy = (n2 - m2) * w / 2.0
return 0 - ox, 0 - oy, width + ox, height + oy
def generate_disturb_rect(self, rect, context, img_w, img_h):
import random
w = rect[2] - rect[0]
h = rect[3] - rect[1]
x0 = rect[0] + random.random() * w * context
x1 = rect[2] - random.random() * w * context
x2 = rect[2] - random.random() * w * context
x3 = rect[0] + random.random() * w * context
y0 = rect[1] + random.random() * w * context
y1 = rect[1] + random.random() * w * context
y2 = rect[3] - random.random() * w * context
y3 = rect[3] - random.random() * w * context
x0 = 0 if x0 < 0 else x0
x1 = img_w if x1 > img_w else x1
x2 = 0 if x2 < 0 else x2
x3 = img_w if x3 > img_w else x3
y0 = 0 if y0 < 0 else y0
y1 = 0 if y1 < 0 else y1
y2 = img_h if y2 > img_h else y2
y3 = img_h if y3 > img_h else y3
res = np.zeros((4, 2), dtype="float32")
res[:, 0] = [x0, x1, x2, x3]
res[:, 1] = [y0, y1, y2, y3]
return res
def process(self):
# for dir, algo_res_file, video in self.algofile_list:
dir = self.pic_dir
print "[Processing]", dir
src_files = glob.glob(dir + os.path.sep + '*')
img_idx = 0
for img_name in src_files:
img = cv2.imread(img_name)
height, width, channel = img.shape
left, top, right, bottom = self.get_eye_rect(2.0, 2.0, 1.8, 1.8, width, height)
eye_rect = [left, top, right, bottom]
dst_vertex = np.zeros((4, 2), dtype="float32")
dst_vertex[:, 0] = [left, right, right, left] # [left, right, right, left]
dst_vertex[:, 1] = [top, top, bottom, bottom] # [top, top, bottom, bottom]
disturb_num = 0
for k in range(0, self.disturb_time):
context = 0.05
smp_vertex = self.generate_disturb_rect(eye_rect, context, width, height)
M = cv2.getPerspectiveTransform(smp_vertex, dst_vertex)
wrap_img = cv2.warpPerspective(img, M, (width, height))
left, top, right, bottom = int(left), int(top), int(right), int(bottom)
wrap_img_roi = wrap_img[top:bottom + 1, left:right + 1]
# print img.shape, wrap_img_roi.shape, left, top, right, bottom
# fn = os.path.join(outimg_dir, "%06d.png" % k)
_img_path, _img_name = os.path.split(img_name)
if _img_path[0] == os.sep:
_img_path = _img_path[1:]
if (img_idx % 10) in test_ratio:
w_img_path = append_diff_part_path(_img_path, self.save_path + 'test')
else:
w_img_path = append_diff_part_path(_img_path, self.save_path + 'train')
_img_name = os.path.splitext(_img_name)[0]
fn = os.path.join('', w_img_path, _img_name + '_' + str(disturb_num) + '.png')
print fn
disturb_num += 1
# Check if need to create save dir
if img_idx == 0:
test_path = append_diff_part_path(_img_path, self.save_path + 'test')
train_path = append_diff_part_path(_img_path, self.save_path + 'train')
if not os.path.exists(test_path):
os.makedirs(test_path)
if not os.path.exists(train_path):
os.makedirs(train_path)
if not os.path.exists(w_img_path):
os.makedirs(w_img_path)
cv2.imwrite(fn, wrap_img_roi)
img_idx += 1
if __name__ == '__main__':
args = parse_args()
if not os.path.exists(args.save_path):
if args.save_path[-1] == os.sep:
args.save_path = args.save_path[:-1]
print args.save_path, "not exists"
raw_input('press any key to create it')
b = B(args, 0)
b.process()
|
[
"zhaoxu.li@hobot.cc"
] |
zhaoxu.li@hobot.cc
|
46f1830187ea8e97f6eab98b1ee66dcdcdfab9ce
|
ea0969f670c1aab1e16c5ebec9c9fe9b2d6782e0
|
/src/grille.py
|
73362e1e5297634d6c0d7d86ffe9c3c8987c19c4
|
[] |
no_license
|
franzx5/Monte-Carlo-based-Protein-Folding-Algorithm-Implementation
|
abcc7504bfcad21fef696aaf18c45557a835e47f
|
a65a8aa0dc98fa1325a281c1b74aba913885ec3b
|
refs/heads/master
| 2020-03-28T04:43:32.373595
| 2018-10-09T20:54:52
| 2018-10-09T20:54:52
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,432
|
py
|
#-*- coding: utf-8 -*-
#Author: AKE Franz-Arnold
#Universite Paris-Diderot
#Projet Court: ...
#import library
import numpy as np
class Grille(object):
"""
Class Grille
============
Class to compute a Lattice in which a conformation could be integrated
and contains several function to make operations on it
:need no arguments for initialisation!
"""
def __init__(self):
"""
initialiser an Empty lattice
:attribute_1 : grille_taille -> Size of the Lattice (int)
:attribute_2 : grille_midpoint -> Midpoint of the Lattice (tuple of int)
:attribute_3 : grille -> Lattice (numpy_array)
"""
self.grille_taille = 0
self.grille_midpoint = (0,0)
self.grille = np.chararray((self.grille_taille, self.grille_taille))
self.grille[:] = "*"
#Methodes
def maj_grille(self, input_conformation):
"""
integrate a conformation in the lattice object self !
:input : conformation object
"""
self.grille_taille = 2 * input_conformation.getTaille() + 1
self.grille_midpoint = (input_conformation.getTaille(), input_conformation.getTaille())
self.grille = np.chararray((self.grille_taille, self.grille_taille), itemsize=2)
self.grille[:] = "*"
for i, res in enumerate(input_conformation.conf.items()):
self.grille[res[1].coordX, res[1].coordY] = res[1].index_seq
def draw_grille(self):
"""
Print _drawing the lattice in the terminal...
:input : Nothing
"""
for i in range(self.grille_taille):
for j in range(self.grille_taille):
if self.grille[i,j] == "*":
print self.grille[i,j], " ",
else:
print self.grille[i,j], " ",
print "\n"
def get_topological_free_voisins(self, input_tuple):
"""
return for an input_coord, a list of the coords of free adjacents Position ...
:input : tuple of position (x,y)
:output : list of coord's tuples
"""
x, y = (input_tuple[0], input_tuple[1])
pos_a_verif = ((x,y+1),(x,y-1),(x-1,y),(x+1,y))
output = []
for tuple in pos_a_verif:
if self.check_free(tuple):
output.append(tuple)
return output
def check_free(self, input_tuple):
"""
Check in an position is free
:input : an tuple of coords
:output : True or False
"""
if self.grille[input_tuple] == "*":
return True
else:
return False
def get_topological_voisins(self, input_tuple):
"""
return for an input_coord, a list of the coords of adjacent Residus
:input : tuple of position (x,y)
:output : list of coord's tuples
"""
x, y = (input_tuple[0], input_tuple[1])
pos_a_verif = ((x,y+1),(x,y-1),(x-1,y),(x+1,y))
output = []
for tuple in pos_a_verif:
if self.check_free(tuple) == False:
output.append(tuple)
return output
def get_topological_voisins_all(self, input_tuple):
"""
return for an input_coord, a list of the coords of adjacent Positions
:input : tuple of position (x,y)
:output : list of coord's tuples
"""
x, y = (input_tuple[0], input_tuple[1])
pos_a_verif = ((x,y+1),(x,y-1),(x-1,y),(x+1,y))
return pos_a_verif
def check_adjacent_diag(self, tuple_A, tuple_B):
"""
check if a coords of tuple_A if diagonally adjacent to another one, tuple_B
:input1 : tuple A
:input2 : tuple B
:output : True or False
"""
x, y = (tuple_A[0], tuple_A[1])
pos_a_verif = ((x-1,y+1),(x-1,y-1),(x+1,y-1),(x+1,y+1))
for tuple in pos_a_verif:
if tuple == tuple_B:
return True
return False
def check_adjacence(self, tuple_A, tuple_B):
"""
check if a coords of tuple_A if adjacent to another one, tuple_B
:input1 : tuple A
:input2 : tuple B
:output : True or False
"""
x, y = (tuple_A[0], tuple_A[1])
pos_a_verif = ((x,y+1),(x,y-1),(x-1,y),(x+1,y))
for tuple in pos_a_verif:
if tuple == tuple_B:
return True
return False
|
[
"aerod7@hotmail.fr"
] |
aerod7@hotmail.fr
|
a2b315f8ac5d849d88680e61611a6b16300e5e69
|
58d4e49e8214d4a1699e8783bfed837b3d535dbd
|
/libs/Theano/theano/tensor/type.py
|
9e563bfe55ff1a5e04169998753f981562e67fa1
|
[
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
nke001/attention-lvcsr
|
08bcab649b9e02bba56a6a397fcb270f6a41e87a
|
93e8d0938f8aa7726ea12318cf0a2fc0e159b5ab
|
refs/heads/master
| 2021-01-18T00:44:05.546879
| 2015-11-05T19:51:15
| 2015-11-05T19:51:15
| 43,223,536
| 0
| 0
| null | 2015-09-26T21:23:02
| 2015-09-26T21:23:02
| null |
UTF-8
|
Python
| false
| false
| 31,403
|
py
|
import logging
import warnings
import numpy
import theano
from theano import config
from theano.gof import hashtype, Type, Variable
from theano import scalar as scal
_logger = logging.getLogger("theano.tensor.type")
class TensorType(Type):
"""Symbolic `Type` representing a numpy.ndarray value."""
filter_checks_isfinite = False
"""
When this is True, strict filtering rejects data containing NaN or
Inf entries. (Used in `DebugMode`)
"""
def __init__(self, dtype, broadcastable, name=None, sparse_grad=False):
"""Initialize self.dtype and self.broadcastable.
:Parameters:
- `dtype`: str corresponding to numpy dtype (e.g., 'int64')
The value (ndarray) associated to a `Variable` of this `Type` will
have this dtype.
- `broadcastable`: tuple, list, or array of boolean values
This argument serves two purposes. First, the True elements of this
list indicate the dimensions where the shape of an associated value
must be 1. Secondly, the length of this list is the number of
dimensions that an associated value must have. See
:doc:`broadcasting` for an explanation of how this list is used.
- `name`: str
Optional name for this type.
"""
self.dtype = str(dtype)
if self.dtype == 'floatX':
self.dtype = config.floatX
# broadcastable is immutable, and all elements are either
# True or False
self.broadcastable = tuple(bool(b) for b in broadcastable)
self.dtype_specs() # error checking is done there
self.name = name
self.numpy_dtype = numpy.dtype(self.dtype)
self.sparse_grad = sparse_grad
if sparse_grad:
warnings.warn(
"DEPRECATION WARNING: You use an old interface to"
" AdvancedSubtensor1 sparse_grad. Now use"
" theano.sparse_grad(a_tensor[an_int_vector]).")
def clone(self, dtype=None, broadcastable=None):
"""
Return a copy of the type optionally with a new dtype or
broadcastable pattern.
"""
if dtype is None:
dtype = self.dtype
if broadcastable is None:
broadcastable = self.broadcastable
return self.__class__(dtype, broadcastable, name=self.name,
sparse_grad=self.sparse_grad)
def filter(self, data, strict=False, allow_downcast=None):
"""Convert `data` to something which can be associated to a
`TensorVariable`.
This function is not meant to be called in user code. It is for
`Linker` instances to use when running a compiled graph.
"""
# Explicit error message when one accidentally uses a Variable as
# input (typical mistake, especially with shared variables).
if isinstance(data, Variable):
raise TypeError(
'Expected an array-like object, but found a Variable: '
'maybe you are trying to call a function on a (possibly '
'shared) variable instead of a numeric array?')
if ((type(data) is numpy.ndarray) and
(data.dtype == self.numpy_dtype)):
if data.dtype.num != self.numpy_dtype.num:
data = theano._asarray(data, dtype=self.dtype)
# -- now fall through to ndim check
elif ((type(data) is numpy.memmap) and
(data.dtype == self.numpy_dtype)):
# numpy.memmap is a "safe" subclass of ndarray,
# so we can use it whereever we expect a base ndarray.
# however, casting it would defeat the purpose of not
# loading the whole data into memory
pass
elif strict:
# If any of the two conditions above was not met,
# we raise a meaningful TypeError.
if not (type(data) is numpy.ndarray):
raise TypeError("%s expected a ndarray object." % self,
data, type(data))
if data.dtype != self.numpy_dtype:
raise TypeError(("%s expected a ndarray object with "
"dtype = %s (got %s).") %
(self, self.numpy_dtype, data.dtype))
assert False, "This point should never be reached."
else:
if allow_downcast:
# Convert to self.dtype, regardless of the type of data
data = theano._asarray(data, dtype=self.dtype)
# TODO: consider to pad shape with ones to make it consistent
# with self.broadcastable... like vector->row type thing
else:
if isinstance(data, numpy.ndarray):
# Check if self.dtype can accurately represent data
# (do not try to convert the data)
up_dtype = scal.upcast(self.dtype, data.dtype)
if up_dtype == self.dtype:
# Bug in the following line when data is a
# scalar array, see
# http://projects.scipy.org/numpy/ticket/1611
# data = data.astype(self.dtype)
data = theano._asarray(data, dtype=self.dtype)
if up_dtype != self.dtype:
err_msg = (
'%s cannot store a value of dtype %s without '
'risking loss of precision. If you do not mind '
'this loss, you can: '
'1) explicitly cast your data to %s, or '
'2) set "allow_input_downcast=True" when calling '
'"function".'
% (self, data.dtype, self.dtype))
raise TypeError(err_msg, data)
elif (allow_downcast is None and
type(data) is float and
self.dtype == theano.config.floatX):
# Special case where we allow downcasting of Python float
# literals to floatX, even when floatX=='float32'
data = theano._asarray(data, self.dtype)
else:
# data has to be converted.
# Check that this conversion is lossless
converted_data = theano._asarray(data, self.dtype)
# We use the `values_eq` static function from TensorType
# to handle NaN values.
if TensorType.values_eq(numpy.asarray(data),
converted_data,
force_same_dtype=False):
data = converted_data
else:
# Do not print a too long description of data
# (ndarray truncates it, but it's not sure for data)
str_data = str(data)
if len(str_data) > 80:
str_data = str_data[:75] + '(...)'
err_msg = (
'%s cannot store accurately value %s, '
'it would be represented as %s. '
'If you do not mind this precision loss, you can: '
'1) explicitly convert your data to a numpy array '
'of dtype %s, or '
'2) set "allow_input_downcast=True" when calling '
'"function".'
% (self, data, converted_data, self.dtype))
raise TypeError(err_msg, data)
if self.ndim != data.ndim:
raise TypeError("Wrong number of dimensions: expected %s,"
" got %s with shape %s." % (self.ndim, data.ndim,
data.shape))
if not data.flags.aligned:
try:
msg = "object buffer" + str(data.data)
except AttributeError:
msg = ""
raise TypeError("The numpy.ndarray object is not aligned."
" Theano C code does not support that.",
msg,
"object shape", data.shape,
"object strides", data.strides,
"object dtype", data.dtype)
i = 0
for b in self.broadcastable:
if b and data.shape[i] != 1:
raise TypeError("Non-unit value on shape on a broadcastable"
" dimension.", data.shape, self.broadcastable)
i += 1
if (self.filter_checks_isfinite and
not numpy.all(numpy.isfinite(data))):
raise ValueError("non-finite elements not allowed")
return data
def filter_variable(self, other):
"""Convert a symbolic Variable into a TensorType, if compatible.
For the moment, only a TensorType or CudaNdarrayType will be
converted, provided they have the same number of dimensions,
broadcastable pattern, and dtype.
"""
if hasattr(other, '_as_TensorVariable'):
other = other._as_TensorVariable()
if not isinstance(other, Variable):
# The value is not a Variable: we cast it into
# a Constant of the appropriate Type.
other = self.Constant(type=self, data=other)
if other.type == self:
return other
raise TypeError(
'Cannot convert Type %(othertype)s '
'(of Variable %(other)s) into Type %(self)s. '
'You can try to manually convert %(other)s into a %(self)s.' %
dict(othertype=other.type,
other=other,
self=self))
def value_validity_msg(self, a):
try:
self.filter(a, strict=True)
except Exception as e:
return str(e)
return "value is valid"
def dtype_specs(self):
"""Return a tuple (python type, c type, numpy typenum) that corresponds
to self.dtype.
This function is used internally as part of C code generation.
"""
# TODO: add more type correspondances for e.g. int32, int64, float32,
# complex64, etc.
try:
return {
'float16': (float, 'npy_float16', 'NPY_FLOAT16'),
'float32': (float, 'npy_float32', 'NPY_FLOAT32'),
'float64': (float, 'npy_float64', 'NPY_FLOAT64'),
'uint8': (int, 'npy_uint8', 'NPY_UINT8'),
'int8': (int, 'npy_int8', 'NPY_INT8'),
'uint16': (int, 'npy_uint16', 'NPY_UINT16'),
'int16': (int, 'npy_int16', 'NPY_INT16'),
'uint32': (int, 'npy_uint32', 'NPY_UINT32'),
'int32': (int, 'npy_int32', 'NPY_INT32'),
'uint64': (int, 'npy_uint64', 'NPY_UINT64'),
'int64': (int, 'npy_int64', 'NPY_INT64'),
'complex128': (complex, 'theano_complex128', 'NPY_COMPLEX128'),
'complex64': (complex, 'theano_complex64', 'NPY_COMPLEX64')
}[self.dtype]
except KeyError:
raise TypeError("Unsupported dtype for %s: %s"
% (self.__class__.__name__, self.dtype))
def to_scalar_type(self):
return scal.get_scalar_type(dtype=self.dtype)
def __eq__(self, other):
"""Compare True iff other is the same kind of TensorType"""
return type(self) == type(other) and other.dtype == self.dtype \
and other.broadcastable == self.broadcastable
def convert_variable(self, var):
if (type(self) == type(var.type) and # noqa
self.dtype == var.type.dtype and
self.ndim == var.type.ndim and
all(sb == ob or ob for sb, ob in zip(self.broadcastable,
var.type.broadcastable))):
return theano.tensor.patternbroadcast(var, self.broadcastable)
@staticmethod
def may_share_memory(a, b):
# This is a method of TensorType, so both a and b should be ndarrays
if isinstance(a, numpy.ndarray) and isinstance(b, numpy.ndarray):
return numpy.may_share_memory(a, b)
else:
return False
@staticmethod
def values_eq(a, b, force_same_dtype=True):
# TODO: check to see if the shapes must match
# for now, we err on safe side...
if a.shape != b.shape:
return False
if force_same_dtype and a.dtype != b.dtype:
return False
a_eq_b = (a == b)
r = numpy.all(a_eq_b)
if r:
return True
# maybe the trouble is that there are NaNs
a_missing = numpy.isnan(a)
if a_missing.any():
b_missing = numpy.isnan(b)
return numpy.all(a_eq_b + (a_missing == b_missing))
else:
return False
@staticmethod
def values_eq_approx(a, b, allow_remove_inf=False, allow_remove_nan=False,
rtol=None, atol=None):
"""
:param allow_remove_inf: If True, when there is an inf in a,
we allow any value in b in that position.
Event -inf
:param allow_remove_nan: If True, when there is a nan in a,
we allow any value in b in that position.
Event +-inf
:param rtol: relative tolerance, passed to _allclose
:param atol: absolute tolerance, passed to _allclose
"""
if isinstance(a, numpy.ndarray) and isinstance(b, numpy.ndarray):
if a.shape != b.shape:
return False
if a.dtype != b.dtype:
return False
if 'int' in str(a.dtype):
return numpy.all(a == b)
else:
# work around a numpy.allclose bug:
# http://projects.scipy.org/numpy/ticket/1672
if a.ndim == 0 and numpy.isinf(a):
a = a.reshape(1)
b = b.reshape(1)
cmp = theano.tensor.basic._allclose(a, b, rtol=rtol, atol=atol)
if cmp:
# Numpy claims they are close, this is good enough for us.
return True
# Numpy is unhappy, but it does not necessarily mean that a and
# b are different. Indeed, Numpy does not like missing values
# and will return False whenever some are found in a or b.
# The proper way would be to use the MaskArray stuff available
# in Numpy. However, it looks like it has been added to Numpy's
# core recently, so it may not be available to everyone. Thus,
# for now we use a home-made recipe, that should probably be
# revisited in the future.
a_missing = numpy.isnan(a)
a_inf = numpy.isinf(a)
if not (a_missing.any() or (allow_remove_inf and a_inf.any())):
# There are no missing values in a, thus this is not the
# reason why numpy.allclose(a, b) returned False.
_logger.info(
'numpy allclose failed for abs_err %f and rel_err %f',
numpy.max(abs(a - b)),
numpy.max(abs(a - b) / (abs(a) + abs(b))))
return False
# The following line is what numpy.allclose bases its decision
# upon, according to its documentation.
rtol = 1.0000000000000001e-05
atol = 1e-8
cmp_elemwise = (numpy.absolute(a - b) <=
(atol + rtol * numpy.absolute(b)))
# Find places where both a and b have missing values.
both_missing = a_missing * numpy.isnan(b)
# Find places where both a and b have inf of the same sign.
both_inf = a_inf * numpy.isinf(b)
# cmp_elemwise is weird when we have inf and -inf.
# set it to False
cmp_elemwise = numpy.where(
both_inf & cmp_elemwise,
a == b,
cmp_elemwise)
# check the sign of the inf
both_inf = numpy.where(both_inf, (a == b), both_inf)
if allow_remove_inf:
both_inf += a_inf
if allow_remove_nan:
both_missing += a_missing
# Combine all information.
return (cmp_elemwise + both_missing + both_inf).all()
return False
def __hash__(self):
"""Hash equal for same kinds of TensorType"""
return hashtype(self) ^ hash(self.dtype) ^ hash(self.broadcastable)
ndim = property(lambda self: len(self.broadcastable),
doc="number of dimensions")
"""Number of dimensions
This read-only property is the preferred way to get the number of
dimensions of a `TensorType`.
"""
def make_variable(self, name=None):
"""Return a `TensorVariable` of this type
:Parameters:
- `name`: str
A pretty name to identify this `Variable` when printing and
debugging
"""
return self.Variable(self, name=name)
def _str_impl(self, short_form=True):
name_s = ''
if self.name:
if short_form:
return self.name
name_s = self.name + ':'
b = self.broadcastable
named_broadcastable = {(): 'scalar',
(False,): 'vector',
(False, True): 'col',
(True, False): 'row',
(False, False): 'matrix'}
if b in named_broadcastable:
bcast = named_broadcastable[b]
else:
if any(b):
bcast = str(b)
else:
bcast = '%iD' % len(b)
return "%sTensorType(%s, %s)" % (name_s, str(self.dtype), bcast)
if self.name:
return self.name
else:
b = self.broadcastable
named_broadcastable = {(): 'scalar',
(False,): 'vector',
(False, True): 'col',
(True, False): 'row',
(False, False): 'matrix'}
if b in named_broadcastable:
bcast = named_broadcastable[b]
else:
if any(b):
bcast = str(b)
else:
bcast = '%iD' % len(b)
return "TensorType(%s, %s)" % (str(self.dtype), bcast)
def __str__(self):
return self._str_impl()
def __repr__(self):
return self._str_impl(short_form=False)
# "TensorType{%s, %s}" % (str(self.dtype), str(self.broadcastable))
def c_declare(self, name, sub, check_input=True):
"""Override `CLinkerType.c_declare` """
if(check_input):
check = """
typedef %(dtype)s dtype_%(name)s;
""" % dict(sub, name=name, dtype=self.dtype_specs()[1])
else:
check = ""
declaration = """
PyArrayObject* %(name)s;
""" % dict(sub, name=name, dtype=self.dtype_specs()[1])
return declaration + check
def c_init(self, name, sub):
"""Override `CLinkerType.c_init` """
return """
%(name)s = NULL;
""" % dict(sub, name=name, type_num=self.dtype_specs()[2])
def c_extract(self, name, sub, check_input=True):
"""Override `CLinkerType.c_extract` """
if(check_input):
check = """
%(name)s = NULL;
if (py_%(name)s == Py_None) {
// We can either fail here or set %(name)s to NULL and rely on Ops
// using tensors to handle the NULL case, but if they fail to do so
// they'll end up with nasty segfaults, so this is public service.
PyErr_SetString(PyExc_ValueError, "expected an ndarray, not None");
%(fail)s
}
if (!PyArray_Check(py_%(name)s)) {
PyErr_SetString(PyExc_ValueError, "expected an ndarray");
%(fail)s
}
// We expect %(type_num)s
if (!PyArray_ISALIGNED((PyArrayObject*) py_%(name)s)) {
PyArrayObject * tmp = (PyArrayObject*) py_%(name)s;
PyErr_Format(PyExc_NotImplementedError,
"expected an aligned array of type %%ld "
"(%(type_num)s), got non-aligned array of type %%ld"
" with %%ld dimensions, with 3 last dims "
"%%ld, %%ld, %%ld"
" and 3 last strides %%ld %%ld, %%ld.",
(long int) %(type_num)s,
(long int) PyArray_TYPE((PyArrayObject*) py_%(name)s),
(long int) PyArray_NDIM(tmp),
(long int) PyArray_NDIM(tmp) >= 3 ?
PyArray_DIMS(tmp)[PyArray_NDIM(tmp)-3] : -1,
(long int) PyArray_NDIM(tmp) >= 2 ?
PyArray_DIMS(tmp)[PyArray_NDIM(tmp)-2] : -1,
(long int) PyArray_NDIM(tmp) >= 1 ?
PyArray_DIMS(tmp)[PyArray_NDIM(tmp)-1] : -1,
(long int) PyArray_NDIM(tmp) >= 3 ?
PyArray_STRIDES(tmp)[PyArray_NDIM(tmp)-3] : -1,
(long int) PyArray_NDIM(tmp) >= 2 ?
PyArray_STRIDES(tmp)[PyArray_NDIM(tmp)-2] : -1,
(long int) PyArray_NDIM(tmp) >= 1 ?
PyArray_STRIDES(tmp)[PyArray_NDIM(tmp)-1] : -1
);
%(fail)s
}
// This is a TypeError to be consistent with DEBUG_MODE
// Note: DEBUG_MODE also tells the name of the container
if (PyArray_TYPE((PyArrayObject*) py_%(name)s) != %(type_num)s) {
PyErr_Format(PyExc_TypeError,
"expected type_num %%d (%(type_num)s) got %%d",
%(type_num)s, PyArray_TYPE((PyArrayObject*) py_%(name)s));
%(fail)s
}
""" % dict(sub, name=name, type_num=self.dtype_specs()[2])
else:
check = ""
return check + """
%(name)s = (PyArrayObject*)(py_%(name)s);
Py_XINCREF(%(name)s);
""" % dict(sub, name=name, type_num=self.dtype_specs()[2])
def c_cleanup(self, name, sub):
"""Override `CLinkerType.c_cleanup` """
return """
if (%(name)s) {
Py_XDECREF(%(name)s);
}
""" % locals()
def c_sync(self, name, sub):
"""Override `CLinkerType.c_sync` """
fail = sub['fail']
type_num = self.dtype_specs()[2]
return """
{Py_XDECREF(py_%(name)s);}
if (!%(name)s) {
Py_INCREF(Py_None);
py_%(name)s = Py_None;
}
else if ((void*)py_%(name)s != (void*)%(name)s) {
py_%(name)s = (PyObject*)%(name)s;
}
{Py_XINCREF(py_%(name)s);}
if (%(name)s && !PyArray_ISALIGNED((PyArrayObject*) py_%(name)s)) {
PyErr_Format(PyExc_NotImplementedError,
"c_sync: expected an aligned array, got non-aligned array of type %%ld"
" with %%ld dimensions, with 3 last dims "
"%%ld, %%ld, %%ld"
" and 3 last strides %%ld %%ld, %%ld.",
(long int) PyArray_TYPE((PyArrayObject*) py_%(name)s),
(long int) PyArray_NDIM(%(name)s),
(long int) PyArray_NDIM(%(name)s) >= 3 ?
PyArray_DIMS(%(name)s)[PyArray_NDIM(%(name)s)-3] : -1,
(long int) PyArray_NDIM(%(name)s) >= 2 ?
PyArray_DIMS(%(name)s)[PyArray_NDIM(%(name)s)-2] : -1,
(long int) PyArray_NDIM(%(name)s) >= 1 ?
PyArray_DIMS(%(name)s)[PyArray_NDIM(%(name)s)-1] : -1,
(long int) PyArray_NDIM(%(name)s) >= 3 ?
PyArray_STRIDES(%(name)s)[PyArray_NDIM(%(name)s)-3] : -1,
(long int) PyArray_NDIM(%(name)s) >= 2 ?
PyArray_STRIDES(%(name)s)[PyArray_NDIM(%(name)s)-2] : -1,
(long int) PyArray_NDIM(%(name)s) >= 1 ?
PyArray_STRIDES(%(name)s)[PyArray_NDIM(%(name)s)-1] : -1
);
%(fail)s
}
""" % locals()
def c_headers(self):
"""Override `CLinkerObject.c_headers` """
return scal.get_scalar_type(self.dtype).c_headers()
def c_libraries(self):
return scal.get_scalar_type(self.dtype).c_libraries()
def c_compile_args(self):
return scal.get_scalar_type(self.dtype).c_compile_args()
def c_support_code(self):
"""Override `CLinkerObject.c_support_code` """
return scal.get_scalar_type(self.dtype).c_support_code()
def c_init_code(self):
return scal.get_scalar_type(self.dtype).c_init_code()
def c_code_cache_version(self):
scalar_version = scal.get_scalar_type(self.dtype).c_code_cache_version()
if scalar_version:
return (11,) + scalar_version
else:
return ()
def value_zeros(self, shape):
"""
Create an numpy ndarray full of 0 values.
"""
return numpy.zeros(shape, dtype=self.dtype)
def get_shape_info(self, obj):
"""
Return the information needed to compute the memory size of ``obj``.
The memory size is only the data, so this excludes the container.
For an ndarray, this is the data, but not the ndarray object and
other data structures such as shape and strides.
``get_shape_info()`` and ``get_size()`` work in tandem for the memory
profiler.
``get_shape_info()`` is called during the execution of the function.
So it is better that it is not too slow.
``get_size()`` will be called on the output of this function
when printing the memory profile.
:param obj: The object that this Type represents during execution
:return: Python object that ``self.get_size()`` understands
"""
return obj.shape
def get_size(self, shape_info):
""" Number of bytes taken by the object represented by shape_info.
:param shape_info: the output of the call to get_shape_info()
:return: the number of bytes taken by the object described by
``shape_info``.
"""
if shape_info:
return numpy.prod(shape_info) * numpy.dtype(self.dtype).itemsize
else: # a scalar
return numpy.dtype(self.dtype).itemsize
theano.compile.ops.expandable_types += (TensorType,)
def values_eq_approx_remove_inf(a, b):
return TensorType.values_eq_approx(a, b, True)
def values_eq_approx_remove_nan(a, b):
return TensorType.values_eq_approx(a, b, False, True)
def values_eq_approx_remove_inf_nan(a, b):
return TensorType.values_eq_approx(a, b, True, True)
def values_eq_approx_always_true(a, b):
return True
# Register TensorType C code for ViewOp.
theano.compile.register_view_op_c_code(
TensorType,
"""
Py_XDECREF(%(oname)s);
%(oname)s = %(iname)s;
Py_XINCREF(%(oname)s);
""",
version=1)
# Register TensorType C code for Shape Op.
theano.compile.register_shape_c_code(
TensorType,
"""
npy_intp shape[] = {PyArray_NDIM(%(iname)s)};
if(%(oname)s == NULL || (PyArray_DIMS(%(oname)s)[0] != shape[0]))
{
Py_XDECREF(%(oname)s);
%(oname)s = (PyArrayObject*) PyArray_SimpleNew(1, shape, NPY_INT64);
}
for(int i=0;i<shape[0];i++)
{
((npy_int64*)PyArray_GETPTR1(%(oname)s, i))[0] = PyArray_DIMS(%(iname)s)[i];
}
""",
version=1)
# Register TensorType C code for ViewOp.
theano.compile.register_shape_i_c_code(
TensorType,
"""
if(!%(oname)s)
%(oname)s=(PyArrayObject*)PyArray_EMPTY(0, NULL, NPY_INT64, 0);
((npy_int64*)PyArray_DATA(%(oname)s))[0]=PyArray_DIMS(%(iname)s)[%(i)s];
""",
"""
if (%(i)s>=PyArray_NDIM(%(iname)s)){
PyErr_SetString(PyExc_TypeError,
"Number of dimensions lower than expected");
%(fail)s
}
""",
version=3)
# Register TensorType C code for DeepCopyOp
theano.compile.register_deep_copy_op_c_code(
TensorType,
"""
int alloc = %(oname)s == NULL;
for(int i=0; !alloc && i<PyArray_NDIM(%(oname)s); i++) {
if(PyArray_DIMS(%(iname)s)[i] != PyArray_DIMS(%(oname)s)[i]) {
alloc = true;
break;
}
}
if(alloc) {
Py_XDECREF(%(oname)s);
%(oname)s = (PyArrayObject*)PyArray_NewCopy(%(iname)s,
NPY_ANYORDER);
if (!%(oname)s)
{
PyErr_SetString(PyExc_ValueError,
"DeepCopyOp: the copy failed!");
%(fail)s;
}
} else {
if(PyArray_CopyInto(%(oname)s, %(iname)s)){
PyErr_SetString(PyExc_ValueError,
"DeepCopyOp: the copy failed into already allocated space!");
%(fail)s;
}
}
""",
version=2)
theano.compile.register_rebroadcast_c_code(
TensorType,
"""
if(PyArray_DIMS(%(iname)s)[%(axis)s] != 1){
PyErr_Format(PyExc_ValueError,
"Dimension %(axis)s in Rebroadcast's input was"
" supposed to be 1 (got %%d instead)",
PyArray_DIMS(%(iname)s)[%(axis)s]);
%(fail)s
}
""",
version=1)
theano.compile.register_specify_shape_c_code(
TensorType,
"""
if (PyArray_NDIM(%(iname)s) != PyArray_DIMS(%(shape)s)[0]) {
PyErr_Format(PyExc_AssertionError,
"SpecifyShape: vector of shape has %%d elements,"
" but the input has %%d dimensions.",
PyArray_NDIM(%(iname)s),
PyArray_DIMS(%(shape)s)[0]);
%(fail)s;
}
for(int i = 0; i < PyArray_NDIM(%(iname)s); i++){
dtype_%(shape)s shp = ((dtype_%(shape)s*)PyArray_GETPTR1(%(shape)s,
i))[0];
if (PyArray_DIMS(%(iname)s)[i] != shp) {
PyErr_Format(PyExc_AssertionError,
"SpecifyShape: dim %%d of input has shape %%d,"
" expected %%d.",
i, PyArray_DIMS(%(iname)s)[i],
shp);
%(fail)s;
}
}
Py_XDECREF(%(oname)s);
%(oname)s = %(iname)s;
Py_XINCREF(%(oname)s);
""",
version=1)
|
[
"jan.chorowski@gmail.com"
] |
jan.chorowski@gmail.com
|
fc3b38692f0f6d26c2267d32cdcf80c3f9eeaa57
|
90ae5e5276e7d2e43a2e1da84d2534f1dd6fd25e
|
/prepare_data.py
|
5fa4a421dd61267f548dcb27c24eef6bc670603b
|
[] |
no_license
|
LogicJake/yidianzixun-ctr-top1
|
be7004a5199aa823ffce4121b4ae865f60293c74
|
cdd1208c1ab7d43ef74822992c6cf15511caf862
|
refs/heads/master
| 2023-08-17T16:41:10.663032
| 2021-09-19T16:30:40
| 2021-09-19T16:30:40
| 408,175,708
| 16
| 5
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 516
|
py
|
import warnings
warnings.simplefilter('ignore')
import gc
import numpy as np
import pandas as pd
pd.set_option('max_columns', None)
df_features = pd.read_pickle('/media/user01/wd1tb/yidian/data/feature.pkl')
df_features.drop(['keyword', 'history_docid'], axis=1, inplace=True)
gc.collect()
train = df_features[df_features['click'].notna()]
test = df_features[df_features['id'].notna()]
train.to_pickle('/media/user01/wd1tb/yidian/data/train47.pkl')
test.to_pickle('/media/user01/wd1tb/yidian/data/test47.pkl')
|
[
"noreply@github.com"
] |
LogicJake.noreply@github.com
|
1e0527b357570af3ae2b1c6e24e6c92b1722e82a
|
4151bab1ae974b6e613a3fd53eb66aef96418f03
|
/test_wiki_testing.py
|
e674cc0a62713a3a035395cbe818d8be4ac331be
|
[] |
no_license
|
GuptaAkshay/Assit
|
c6b73000994c3333326270f29c7285d0e97cef74
|
1f69201d8509c414546d612932be6619371bd572
|
refs/heads/master
| 2021-05-01T13:44:47.698671
| 2017-01-20T20:46:21
| 2017-01-20T20:46:21
| 79,592,552
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 175
|
py
|
import wikipedia
while True:
query = input("Query :")
wikipedia.set_lang("en")
result = wikipedia.summary(query, sentences = 2).encode('utf-8')
print (result)
|
[
"akshgupta547@gmail.com"
] |
akshgupta547@gmail.com
|
5e1a3ae86c4f42578664cd65af131794f3437314
|
5a96dcb6e22b37e43386b3c4e376dc457929deff
|
/02python常用api/03-列表list常用方法.py
|
0ad3182bbaa230c7f7ba77a0667c88b38481b13c
|
[] |
no_license
|
chengjing00/pythonLearning
|
3c0ce48f112a21e90530576ffa3803eb7b9f374c
|
63c8395de76adc10b2fd957e1a85c2f157cd0452
|
refs/heads/main
| 2023-03-09T23:58:49.642979
| 2021-02-28T05:25:22
| 2021-02-28T05:25:22
| 306,793,712
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,574
|
py
|
# -*- coding: utf-8 -*-
# @Time : 2020-10-26 10:34
# @Author : chengj-f
# @Email : chengj-f@glodon.com
# @File : 03-列表list常用方法.py
# @Software: PyCharm
list1 = [1, 3, 5, 7, 100]
print(list1) # [1, 3, 5, 7, 100]
# 乘号表示列表元素的重复
list2 = ['hello'] * 3
print(list2) # ['hello', 'hello', 'hello']
# 计算列表长度(元素个数)
print(len(list1)) # 5
# 下标(索引)运算
print(list1[0]) # 1
print(list1[4]) # 100
# print(list1[5]) # IndexError: list index out of range
print(list1[-1]) # 100
print(list1[-3]) # 5
list1[2] = 300
print(list1) # [1, 3, 300, 7, 100]
# 通过循环用下标遍历列表元素
for index in range(len(list1)):
print(list1[index])
# 通过for循环遍历列表元素
for elem in list1:
print(elem)
# 通过enumerate函数处理列表之后再遍历可以同时获得元素索引和值
for index, elem in enumerate(list1):
print(index, elem)
# --------------------------
list1 = [1, 3, 5, 7, 100]
# 添加元素
list1.append(200)
list1.insert(1, 400)
# 合并两个列表
# list1.extend([1000, 2000])
list1 += [1000, 2000]
print(list1) # [1, 400, 3, 5, 7, 100, 200, 1000, 2000]
print(len(list1)) # 9
# 先通过成员运算判断元素是否在列表中,如果存在就删除该元素
if 3 in list1:
list1.remove(3)
if 1234 in list1:
list1.remove(1234)
print(list1) # [1, 400, 5, 7, 100, 200, 1000, 2000]
# 从指定的位置删除元素
list1.pop(0)
list1.pop(len(list1) - 1)
print(list1) # [400, 5, 7, 100, 200, 1000]
# 清空列表元素
list1.clear()
print(list1) # []
# --------------------
fruits = ['grape', 'apple', 'strawberry', 'waxberry']
fruits += ['pitaya', 'pear', 'mango']
# 列表切片
fruits2 = fruits[1:4]
print(fruits2) # apple strawberry waxberry
# 可以通过完整切片操作来复制列表
fruits3 = fruits[:]
print(fruits3) # ['grape', 'apple', 'strawberry', 'waxberry', 'pitaya', 'pear', 'mango']
fruits4 = fruits[-3:-1]
print(fruits4) # ['pitaya', 'pear']
# 可以通过反向切片操作来获得倒转后的列表的拷贝
fruits5 = fruits[::-1]
print(fruits5) # ['mango', 'pear', 'pitaya', 'waxberry', 'strawberry', 'apple', 'grape']
# --------------
list1 = ['orange', 'apple', 'zoo', 'internationalization', 'blueberry']
list2 = sorted(list1)
# sorted函数返回列表排序后的拷贝不会修改传入的列表
# 函数的设计就应该像sorted函数一样尽可能不产生副作用
list3 = sorted(list1, reverse=True)
# 通过key关键字参数指定根据字符串长度进行排序而不是默认的字母表顺序
list4 = sorted(list1, key=len)
print(list1)
print(list2)
print(list3)
print(list4)
# 给列表对象发出排序消息直接在列表对象上进行排序
list1.sort(reverse=True)
print(list1)
# -------------------
f = [x for x in range(1, 10)]
print(f)
f = [x + y for x in 'ABCDE' for y in '1234567']
print(f)
# 用列表的生成表达式语法创建列表容器
# 用这种语法创建列表之后元素已经准备就绪所以需要耗费较多的内存空间
f = [x ** 2 for x in range(1, 1000)]
# print(sys.getsizeof(f)) # 查看对象占用内存的字节数
print(f)
# 请注意下面的代码创建的不是一个列表而是一个生成器对象
# 通过生成器可以获取到数据但它不占用额外的空间存储数据
# 每次需要数据的时候就通过内部的运算得到数据(需要花费额外的时间)
f = (x ** 2 for x in range(1, 1000))
# print(sys.getsizeof(f)) # 相比生成式生成器不占用存储数据的空间
print(f)
for val in f:
print(val)
|
[
"chengjing002@163.com"
] |
chengjing002@163.com
|
abf518b8b5904b80283d0be5973d382c366310f6
|
74be814f7cd10d3c91a53460bd6698aa8bc95704
|
/LeetCode周赛/2020年4月12日周赛/5382. HTML 实体解析器.py
|
100f3c2047decc971e12392ce1f1c89165f2329d
|
[] |
no_license
|
weiyuyan/LeetCode
|
7202f7422bc3bef6bd35ea299550b51905401656
|
19db0e78826d3e3d27d2574abd9d461eb41458d1
|
refs/heads/master
| 2020-12-03T17:10:53.738507
| 2020-05-27T08:28:36
| 2020-05-27T08:28:36
| 231,402,839
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,620
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# author:ShidongDu time:2020/4/12
'''
「HTML 实体解析器」 是一种特殊的解析器,它将 HTML 代码作为输入,并用字符本身替换掉所有这些特殊的字符实体。
HTML 里这些特殊字符和它们对应的字符实体包括:
双引号:字符实体为 " ,对应的字符是 " 。
单引号:字符实体为 ' ,对应的字符是 ' 。
与符号:字符实体为 & ,对应对的字符是 & 。
大于号:字符实体为 > ,对应的字符是 > 。
小于号:字符实体为 < ,对应的字符是 < 。
斜线号:字符实体为 ⁄ ,对应的字符是 / 。
给你输入字符串 text ,请你实现一个 HTML 实体解析器,返回解析器解析后的结果。
示例 1:
输入:text = "& is an HTML entity but &ambassador; is not."
输出:"& is an HTML entity but &ambassador; is not."
解释:解析器把字符实体 & 用 & 替换
示例 2:
输入:text = "and I quote: "...""
输出:"and I quote: \"...\""
示例 3:
输入:text = "Stay home! Practice on Leetcode :)"
输出:"Stay home! Practice on Leetcode :)"
示例 4:
输入:text = "x > y && x < y is always false"
输出:"x > y && x < y is always false"
示例 5:
输入:text = "leetcode.com⁄problemset⁄all"
输出:"leetcode.com/problemset/all"
提示:
1 <= text.length <= 10^5
字符串可能包含 256 个ASCII 字符中的任意字符。
'''
class Solution:
def entityParser(self, text: str) -> str:
a_dict = {
'"': ('"', 5),
''': ("'", 5),
'&': ('&', 4),
'>': ('>', 3),
'<': ('<', 3),
'⁄':('/', 6)
}
res = ''
list_text = [char for char in text]
self.k = 0
for i in range(len(list_text)):
if self.k > 0:
self.k -= 1
continue
if list_text[i] == '&':
k = 0
for j in range(4, 8):
tmp = ''.join(list_text[i: i+j])
if tmp in a_dict:
res += a_dict[tmp][0]
self.k += a_dict[tmp][1]
break
if self.k == 0:
res += '&'
else:
res += list_text[i]
return res
if __name__ == '__main__':
solution = Solution()
text = "& is an HTML entity but &ambassador; is not."
res = solution.entityParser(text)
print(res)
|
[
"244128764@qq.com"
] |
244128764@qq.com
|
d846f8c565849b567cf5ac8fb5611527a0dffc7b
|
11415118049c6ef1e778f6f3af1ab1678d374c5d
|
/superlists/settings.py
|
9be6f9aa64be560c9698779d44e495013bcc6173
|
[] |
no_license
|
azrishnr/ListApp
|
dfd0a019a703fa62b117b5e3339d7fc001992bcc
|
5c595ff5963f5786fd4d9a6528c20b5935617bb6
|
refs/heads/master
| 2020-03-07T02:25:07.923907
| 2018-04-09T15:45:24
| 2018-04-09T15:45:24
| 127,206,832
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,157
|
py
|
"""
Django settings for superlists project.
Generated by 'django-admin startproject' using Django 1.11.11.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '0)-1(rst5oms4g$i1nvf#0c48njpnz6&bqw+4qnfqomx9-&)u9'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'lists',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'superlists.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'superlists.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
|
[
"ahilcar.s@gmail.com"
] |
ahilcar.s@gmail.com
|
2beb5c545c5f8137c263e97d7eb1ef2a8085c386
|
42d4c00477339ed114ed03d2296f9a3ba41f9e43
|
/sets.py
|
ca8b750e7e45d784c9d3f4bd04b4d5c52454f592
|
[] |
no_license
|
SaifullahKatpar/CodingTheMatrix
|
5506f79214e9be6e4a9e0f74de69f34ab846e118
|
b2702ee27d63ce6c2fa6f144680f1ebce78c7789
|
refs/heads/master
| 2021-07-24T01:29:46.345458
| 2017-11-03T06:26:19
| 2017-11-03T06:26:19
| 109,307,748
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,056
|
py
|
S = {1,2,3}
print('S = ',S)
print('2 in S=',2 in S)
# union
print('Union of {1,2,3} and {2,3,4,5,6}:')
union = {1,2,3} | {2,3,4,5,6}
print(union,'\n')
# intersection
print('intersection of {1,2,3} and {2,3,4,5,6}:')
intersection = {1,2,3} & {2,3,4,5,6}
print(intersection,'\n')
# add
print('added 4 to S:')
S.add(4)
print(S,'\n')
Friends = {'Sair','Sagar','Liaqat','Azhar'}
print(Friends)
print('added ali to Friends:')
Friends.add('Ali')
print(Friends,'\n')
# remove
print('remove 4 to S:')
S.remove(4)
print(S,'\n')
print('removed sagar from Friends:')
Friends.remove('Sagar')
print(Friends,'\n')
#update
print('update {7,8,9} to S')
S.update({7,8,9})
print(S,'\n')
#intersection_update
print('intersection_update {3,7,11,20} to S')
S.intersection_update({3,7,11,20})
print(S,'\n')
# bound variables
print('T=S,T.remove,T=')
T=S
T.remove(7)
print(S,'\n')
# copy
# The assignment statement binds U not to the value of S but to a copy of that value,
print('U=S.copy(), add 0 to U' )
U=S.copy()
U.add(0)
print('S=',S,'\n')
print('U=',U,'\n')
|
[
"saifullah.cs14@iba-suk.edu.pk"
] |
saifullah.cs14@iba-suk.edu.pk
|
4b61dad65c0eec78567531230fc5dfd64845d3b1
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02947/s946573717.py
|
2e7fbd83033b4322cdcfef0dce174c61380629b5
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 188
|
py
|
import collections
N=int(input())
lis=[''.join(sorted(input(), key=str.upper)) for _ in range(N)]
v=collections.Counter(lis).values()
ans=sum(list(map(lambda x: x*(x-1)//2, v)))
print(ans)
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
f31edda5d1ea8a0a72aa23e1b1d830d7a491ff5c
|
a97f789530412fc1cb83170a11811f294b139ee8
|
/疯狂Python讲义/codes/02/2.5/chars_test.py
|
877b1afe2a70512932e068b93aa383d941a93381
|
[] |
no_license
|
baidongbin/python
|
3cebf2cc342a15b38bf20c23f941e6887dac187a
|
1c1398bff1f1820afdd8ddfa0c95ccebb4ee836f
|
refs/heads/master
| 2021-07-21T19:23:32.860444
| 2020-03-07T11:55:30
| 2020-03-07T11:55:30
| 195,909,272
| 0
| 1
| null | 2020-07-21T00:51:24
| 2019-07-09T01:24:31
|
Python
|
UTF-8
|
Python
| false
| false
| 846
|
py
|
s = 'crazyit.org is very good'
# 获取s中索引2的字符
print(s[2])
# 获取s中从右边开始,索引4的字符
print(s[-4])
# 获取s中从索引3到索引5(不包含)的子串
print(s[3:5])
# 获取s中从索引3到倒数第5个字符的子串
print(s[3:-5])
# 获取s中从倒数第6个字符到倒数第3个字符的子串
print(s[-6:-3])
# 获取s中从索引5到结束的子串
print(s[5:])
# 获取s中从倒数第6个字符到结束的子串
print(s[-6:])
# 获取s中从开始到索引5的子串
print(s[:5])
# 获取s中从开始到倒数第6个字符的子串
print(s[:-6])
# 判断s是否包含'very'子串
print("very" in s)
print("fkit" in s)
# 输出s的长度
print(len(s))
# 输出'test'的长度
print(len('test'))
# 输出s字符串中的最大字符
print(max(s))
# 输出s字符串中的最小字符
print(min(s))
|
[
"baidongbin@qq.com"
] |
baidongbin@qq.com
|
f7e85c6652a156f9683772188744b06f6a569901
|
f65be574caf751a50cca2dc61a035dad9829d27c
|
/data/testbed/twist.py
|
2462ea123b88783c8287627faf9252d570e70e78
|
[] |
no_license
|
Chen-Gu/slp
|
295810b597810f386c0d96038a5bae0d178c5ae0
|
db242fdc01f42a916a9c1c295548a03c94e56948
|
refs/heads/master
| 2021-08-28T11:37:52.906728
| 2021-08-11T15:00:25
| 2021-08-11T15:00:25
| 237,653,729
| 0
| 0
| null | 2020-02-01T17:49:44
| 2020-02-01T17:49:44
| null |
UTF-8
|
Python
| false
| false
| 1,540
|
py
|
import numpy as np
from simulator.Topology import Topology
def name():
return __name__
def platform():
"""The hardware platform of the testbed"""
return ("eyesIFX", "telosa", "telosb")
def log_mode():
return "printf"
def url():
return "https://www.twist.tu-berlin.de"
def submitter(*args, **kwargs):
raise RuntimeError(f"{name()} does not support automatic submission")
def build_arguments():
return {}
def fastserial_supported():
return True
# Resources:
# - https://www.twist.tu-berlin.de/tutorials/twist-getting-started.html#prerequisites
class Twist(Topology):
"""The layout of nodes on the TWIST testbed, see: https://www.twist.tu-berlin.de/testbeds/index.html"""
def __init__(self):
super(Twist, self).__init__()
# I think there are about 279 nodes in the testbed, but that is a bit of a guess
# Reported dead when logging into the web interface
dead = {59, 60, 274, 275, 62, 64, 276, 277, 171, 174, 278, 279, 22, 23,
38, 39, 40, 41, 42, 43, 44, 45, 48, 49, 52, 54, 83, 84, 172, 181,
198, 187, 212, 203, 194, 225, 209, 207, 224, 206, 205, 211, 230,
204, 33, 208, 222, 270, 36, 37, 220, 221, 26, 27, 189, 190}
for nid in range(1, 280):
# Skip any dead nodes
if nid in dead:
continue
self.nodes[nid] = np.array((-100, -100), dtype=np.float64)
self._process_node_id_order("topology")
def __str__(self):
return "Twist<>"
|
[
"MBradbury@users.noreply.github.com"
] |
MBradbury@users.noreply.github.com
|
d0b5aad7626c24cddeda85400d41c59779b8b3cd
|
b102173408f2b99008cc4d9a6aca86f294b5f8fe
|
/removingthesameelements.py
|
fa0c42a5746dfaa6e9f21943e7e69a580b3c113b
|
[] |
no_license
|
Avenger98/Python-Programms
|
843083d0d0589d57b2a3fadfe7343c07d9f7292b
|
0b7521e111c6a8750e05ecac3c90d256436c83ee
|
refs/heads/master
| 2020-12-13T08:27:28.845042
| 2020-01-16T16:32:33
| 2020-01-16T16:32:33
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 287
|
py
|
a = []
cycle_number = int(input("Enter the cycle number: "))
for i in range(cycle_number):
num = int(input("Enter the number: "))
a.append(num)
print("This is the list created: ", a)
b = []
for numbers in a:
if numbers not in b:
b.append(numbers)
print(b)
|
[
"noreply@github.com"
] |
Avenger98.noreply@github.com
|
500c896b25f008b10bf817e79c1c5a6f23d40198
|
b681b0ef052d49452c7784b5293d56a64b9b1ceb
|
/MutiView3DReconstruction/lib/view.py
|
3fcde7012a93cd28c8d485bfbdd49da14d34f861
|
[] |
no_license
|
Gravity-N1/3D-Object-Reconstruction-with-RealSense-D435
|
bd21f55e97f8cfd30fa247179c844b0101e3edc4
|
52708913e2d0954bbca237111b5b8ce6bc976191
|
refs/heads/master
| 2022-04-05T12:08:19.655903
| 2020-03-03T09:07:22
| 2020-03-03T09:07:22
| 257,914,309
| 3
| 2
| null | 2020-04-22T13:46:12
| 2020-04-22T13:46:11
| null |
UTF-8
|
Python
| false
| false
| 129
|
py
|
import numpy as np
import open3d as o3d
pcd = o3d.io.read_point_cloud('template.pcd')
o3d.visualization.draw_geometries([pcd])
|
[
"roboticsleo@gmail.com"
] |
roboticsleo@gmail.com
|
70c2b4c693ff41de681677fb36fcaf4d68a47c1a
|
3d319130b7146249592c8e7383cf09104232358d
|
/data/read_data.py
|
e8d293d38422ec20bd63ead37123e9c7bcc9fae4
|
[
"MIT"
] |
permissive
|
ziqianxy/coach_service
|
81f3ab6770ec05832660f8e1e9812505a39e51e5
|
8020c5e8d7255d22c3473529baf0f111c209879f
|
refs/heads/master
| 2021-01-11T21:47:24.759966
| 2017-01-13T13:21:19
| 2017-01-13T13:21:19
| 78,846,756
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,583
|
py
|
# coding=utf-8
import csv
import string
from common_utils import gps
CSV_NAME = '_2016-1210-080343'
file_name = '_data_resolved/' + CSV_NAME + '.csv'
# key-map of the column
km = {
"_time": 0, "G_STA": 1, "G_LON": 2, "G_LAT": 3, "G_ALT": 4,
"G_pdop": 5, "G_hdop": 6, "G_vdop": 7, "G_SPD": 8, "G_DIR": 9,
"G_DATE": 10, "G_TIME": 11, "SL_V": 12, "SL_GPS": 13, "SL_BD": 14,
"G_MODE": 15, "_0103": 16, "_0104": 17, "_0105": 18, "_0106": 19,
"_0107": 20, "_010c": 21, "_010d": 22, "_010e": 23, "_010f": 24,
"_0110": 25, "_0111": 26, "_0115": 27, "_011f": 28, "_0121": 29,
"_0124": 30, "_012e": 31, "_0130": 32, "_0131": 33, "_0133": 34,
"_0134": 35, "_013c": 36, "_013e": 37, "_0142": 38, "_0143": 39,
"_0144": 40, "_0145": 41, "_0147": 42, "_0149": 43, "_014a": 44,
"_014c": 45, "_014d": 46, "_014e": 47
}
# function to print the list data
def print_list(d_list):
for i in d_list:
print i
data_list = []
# read data
with open(file_name, 'rb') as data:
reader = csv.reader(data)
for line in reader:
data_list.append(line)
pre_row = 2
data_list = data_list[pre_row:]
data_get_list = []
# resolve data
for row in data_list[1:-1:1]:
vss = string.atof(row[km.get("_010d")])
rpm = string.atof(row[km.get("_010c")])
ect = string.atof(row[km.get("_0105")])
vpwr = string.atof(row[km.get("_0142")])
spd = string.atof(row[km.get("G_SPD")])
# data_get_list.append([vss, rpm, ect, vpwr])
data_get_list.append(spd)
print(data_get_list)
|
[
"ziqiancn@163.com"
] |
ziqiancn@163.com
|
b5b5413f29f0d3fd56f012bfdb61e13fb70ba12e
|
08cfc4fb5f0d2f11e4e226f12520a17c5160f0a2
|
/kubernetes/test/test_v2beta1_metric_status.py
|
f6f4980396861a82b487c809011755f67128829f
|
[
"Apache-2.0"
] |
permissive
|
ex3cv/client-python
|
5c6ee93dff2424828d064b5a2cdbed3f80b74868
|
2c0bed9c4f653472289324914a8f0ad4cbb3a1cb
|
refs/heads/master
| 2021-07-12T13:37:26.049372
| 2017-10-16T20:19:01
| 2017-10-16T20:19:01
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 985
|
py
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v2beta1_metric_status import V2beta1MetricStatus
class TestV2beta1MetricStatus(unittest.TestCase):
""" V2beta1MetricStatus unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV2beta1MetricStatus(self):
"""
Test V2beta1MetricStatus
"""
# FIXME: construct object with mandatory attributes with example values
#model = kubernetes.client.models.v2beta1_metric_status.V2beta1MetricStatus()
pass
if __name__ == '__main__':
unittest.main()
|
[
"mehdy@google.com"
] |
mehdy@google.com
|
b03f005419c18694abd3a3bb1e63bcfca9b7a48c
|
61445c4c3270441f61137224745f5e378815d27e
|
/main.py
|
24cc2b93cc6dc971316786b2a593ee9b0cc4e964
|
[] |
no_license
|
lgxqf/easy_mock
|
51f7eea0fa80ac78755bfe7e976eb912ed55445d
|
42bc63511bdacfb090b68c9fc8eddd2c4d8bb002
|
refs/heads/master
| 2023-03-30T21:32:01.845354
| 2021-03-31T09:50:35
| 2021-03-31T09:50:35
| 352,025,425
| 3
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 304
|
py
|
# -*- coding: utf-8 -*-
import os
from easy_mock import config
from easy_mock.core import main
from easy_mock.loader import load_default_conf
# config.port = load_default_conf("port") or "9000"
# config.is_https = bool(load_default_conf("is_https"))
#
# config.file_path = "server/example.yml"
main()
|
[
"mayi@sensetime.com"
] |
mayi@sensetime.com
|
aa7f2093185a88a33dfb0000ac70a0a23fc889a2
|
ac5b5e6a3ebb3825c8a14a7f507283938976e635
|
/authapp/urls.py
|
ec9ac166054b6953288eb4406856d1fa9cfc6ef4
|
[] |
no_license
|
Django-TOPS/18Aug_AuthProject
|
40f53d4a162a4cfed2137a024a2e8ddf88cbd4d1
|
05d1369799d973104668c86edae3f67be68e7476
|
refs/heads/master
| 2023-02-22T05:42:13.939901
| 2021-01-30T07:07:19
| 2021-01-30T07:07:19
| 334,351,031
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 263
|
py
|
from django.contrib import admin
from django.urls import path
from authapp import views
urlpatterns = [
path('',views.index, name='index'),
path('signup/',views.signup),
path('home/',views.home,name='home'),
path('logout/',views.user_logout),
]
|
[
"sanketiosonline@gmail.com"
] |
sanketiosonline@gmail.com
|
a2945f3d81c090a8f2aa8f45e45b0acac73e2923
|
adea9fc9697f5201f4cb215571025b0493e96b25
|
/napalm_yang/models/openconfig/network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/__init__.py
|
dc8030ca891d1f3cc60283e5218058163251d128
|
[
"Apache-2.0"
] |
permissive
|
andyjsharp/napalm-yang
|
d8a8b51896ef7c6490f011fe265db46f63f54248
|
ef80ebbfb50e188f09486380c88b058db673c896
|
refs/heads/develop
| 2021-09-09T02:09:36.151629
| 2018-03-08T22:44:04
| 2018-03-08T22:44:04
| 114,273,455
| 0
| 0
| null | 2018-03-08T22:44:05
| 2017-12-14T16:33:35
|
Python
|
UTF-8
|
Python
| false
| false
| 41,850
|
py
|
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
unicode = str
elif six.PY2:
import __builtin__
from . import config
from . import state
from . import l2
from . import ip
from . import transport
from . import action
class rule(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/policy-forwarding/policies/policy/rules/rule. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: A match rule for the policy. In the case that multiple
criteria are specified within a single
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_extmethods', '__sequence_id','__config','__state','__l2','__ip','__transport','__action',)
_yang_name = 'rule'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__ip = YANGDynClass(base=ip.ip, is_container='container', yang_name="ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
self.__sequence_id = YANGDynClass(base=unicode, is_leaf=True, yang_name="sequence-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)
self.__l2 = YANGDynClass(base=l2.l2, is_container='container', yang_name="l2", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
self.__action = YANGDynClass(base=action.action, is_container='container', yang_name="action", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
self.__config = YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
self.__transport = YANGDynClass(base=transport.transport, is_container='container', yang_name="transport", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'network-instances', u'network-instance', u'policy-forwarding', u'policies', u'policy', u'rules', u'rule']
def _get_sequence_id(self):
"""
Getter method for sequence_id, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/sequence_id (leafref)
YANG Description: A unique sequence identifier for the match rule.
"""
return self.__sequence_id
def _set_sequence_id(self, v, load=False):
"""
Setter method for sequence_id, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/sequence_id (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_sequence_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sequence_id() directly.
YANG Description: A unique sequence identifier for the match rule.
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="sequence-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """sequence_id must be of a type compatible with leafref""",
'defined-type': "leafref",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="sequence-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)""",
})
self.__sequence_id = t
if hasattr(self, '_set'):
self._set()
def _unset_sequence_id(self):
self.__sequence_id = YANGDynClass(base=unicode, is_leaf=True, yang_name="sequence-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/config (container)
YANG Description: Configuration parameters relating to the match
rule.
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration parameters relating to the match
rule.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
})
self.__config = t
if hasattr(self, '_set'):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/state (container)
YANG Description: Operational state parameters relating to the match
rule.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: Operational state parameters relating to the match
rule.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
def _get_l2(self):
"""
Getter method for l2, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/l2 (container)
YANG Description: Ethernet header fields
"""
return self.__l2
def _set_l2(self, v, load=False):
"""
Setter method for l2, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/l2 (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_l2 is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_l2() directly.
YANG Description: Ethernet header fields
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=l2.l2, is_container='container', yang_name="l2", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """l2 must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=l2.l2, is_container='container', yang_name="l2", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
})
self.__l2 = t
if hasattr(self, '_set'):
self._set()
def _unset_l2(self):
self.__l2 = YANGDynClass(base=l2.l2, is_container='container', yang_name="l2", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
def _get_ip(self):
"""
Getter method for ip, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip (container)
YANG Description: Top level container
"""
return self.__ip
def _set_ip(self, v, load=False):
"""
Setter method for ip, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_ip is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ip() directly.
YANG Description: Top level container
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=ip.ip, is_container='container', yang_name="ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ip must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=ip.ip, is_container='container', yang_name="ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
})
self.__ip = t
if hasattr(self, '_set'):
self._set()
def _unset_ip(self):
self.__ip = YANGDynClass(base=ip.ip, is_container='container', yang_name="ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
def _get_transport(self):
"""
Getter method for transport, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/transport (container)
YANG Description: Transport fields container
"""
return self.__transport
def _set_transport(self, v, load=False):
"""
Setter method for transport, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/transport (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_transport is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_transport() directly.
YANG Description: Transport fields container
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=transport.transport, is_container='container', yang_name="transport", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """transport must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=transport.transport, is_container='container', yang_name="transport", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
})
self.__transport = t
if hasattr(self, '_set'):
self._set()
def _unset_transport(self):
self.__transport = YANGDynClass(base=transport.transport, is_container='container', yang_name="transport", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
def _get_action(self):
"""
Getter method for action, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/action (container)
YANG Description: The forwarding policy action to be applied for
packets matching the rule.
"""
return self.__action
def _set_action(self, v, load=False):
"""
Setter method for action, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/action (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_action is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_action() directly.
YANG Description: The forwarding policy action to be applied for
packets matching the rule.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=action.action, is_container='container', yang_name="action", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """action must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=action.action, is_container='container', yang_name="action", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
})
self.__action = t
if hasattr(self, '_set'):
self._set()
def _unset_action(self):
self.__action = YANGDynClass(base=action.action, is_container='container', yang_name="action", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
sequence_id = __builtin__.property(_get_sequence_id, _set_sequence_id)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
l2 = __builtin__.property(_get_l2, _set_l2)
ip = __builtin__.property(_get_ip, _set_ip)
transport = __builtin__.property(_get_transport, _set_transport)
action = __builtin__.property(_get_action, _set_action)
_pyangbind_elements = {'sequence_id': sequence_id, 'config': config, 'state': state, 'l2': l2, 'ip': ip, 'transport': transport, 'action': action, }
from . import config
from . import state
from . import l2
from . import ip
from . import transport
from . import action
class rule(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/policy-forwarding/policies/policy/rules/rule. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: A match rule for the policy. In the case that multiple
criteria are specified within a single
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_extmethods', '__sequence_id','__config','__state','__l2','__ip','__transport','__action',)
_yang_name = 'rule'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__ip = YANGDynClass(base=ip.ip, is_container='container', yang_name="ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
self.__sequence_id = YANGDynClass(base=unicode, is_leaf=True, yang_name="sequence-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)
self.__l2 = YANGDynClass(base=l2.l2, is_container='container', yang_name="l2", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
self.__action = YANGDynClass(base=action.action, is_container='container', yang_name="action", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
self.__config = YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
self.__transport = YANGDynClass(base=transport.transport, is_container='container', yang_name="transport", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'network-instances', u'network-instance', u'policy-forwarding', u'policies', u'policy', u'rules', u'rule']
def _get_sequence_id(self):
"""
Getter method for sequence_id, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/sequence_id (leafref)
YANG Description: A unique sequence identifier for the match rule.
"""
return self.__sequence_id
def _set_sequence_id(self, v, load=False):
"""
Setter method for sequence_id, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/sequence_id (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_sequence_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sequence_id() directly.
YANG Description: A unique sequence identifier for the match rule.
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="sequence-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """sequence_id must be of a type compatible with leafref""",
'defined-type': "leafref",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="sequence-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)""",
})
self.__sequence_id = t
if hasattr(self, '_set'):
self._set()
def _unset_sequence_id(self):
self.__sequence_id = YANGDynClass(base=unicode, is_leaf=True, yang_name="sequence-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/config (container)
YANG Description: Configuration parameters relating to the match
rule.
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration parameters relating to the match
rule.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
})
self.__config = t
if hasattr(self, '_set'):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/state (container)
YANG Description: Operational state parameters relating to the match
rule.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: Operational state parameters relating to the match
rule.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
def _get_l2(self):
"""
Getter method for l2, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/l2 (container)
YANG Description: Ethernet header fields
"""
return self.__l2
def _set_l2(self, v, load=False):
"""
Setter method for l2, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/l2 (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_l2 is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_l2() directly.
YANG Description: Ethernet header fields
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=l2.l2, is_container='container', yang_name="l2", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """l2 must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=l2.l2, is_container='container', yang_name="l2", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
})
self.__l2 = t
if hasattr(self, '_set'):
self._set()
def _unset_l2(self):
self.__l2 = YANGDynClass(base=l2.l2, is_container='container', yang_name="l2", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
def _get_ip(self):
"""
Getter method for ip, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip (container)
YANG Description: Top level container
"""
return self.__ip
def _set_ip(self, v, load=False):
"""
Setter method for ip, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/ip (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_ip is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ip() directly.
YANG Description: Top level container
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=ip.ip, is_container='container', yang_name="ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ip must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=ip.ip, is_container='container', yang_name="ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
})
self.__ip = t
if hasattr(self, '_set'):
self._set()
def _unset_ip(self):
self.__ip = YANGDynClass(base=ip.ip, is_container='container', yang_name="ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
def _get_transport(self):
"""
Getter method for transport, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/transport (container)
YANG Description: Transport fields container
"""
return self.__transport
def _set_transport(self, v, load=False):
"""
Setter method for transport, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/transport (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_transport is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_transport() directly.
YANG Description: Transport fields container
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=transport.transport, is_container='container', yang_name="transport", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """transport must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=transport.transport, is_container='container', yang_name="transport", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
})
self.__transport = t
if hasattr(self, '_set'):
self._set()
def _unset_transport(self):
self.__transport = YANGDynClass(base=transport.transport, is_container='container', yang_name="transport", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
def _get_action(self):
"""
Getter method for action, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/action (container)
YANG Description: The forwarding policy action to be applied for
packets matching the rule.
"""
return self.__action
def _set_action(self, v, load=False):
"""
Setter method for action, mapped from YANG variable /network_instances/network_instance/policy_forwarding/policies/policy/rules/rule/action (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_action is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_action() directly.
YANG Description: The forwarding policy action to be applied for
packets matching the rule.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=action.action, is_container='container', yang_name="action", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """action must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=action.action, is_container='container', yang_name="action", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
})
self.__action = t
if hasattr(self, '_set'):
self._set()
def _unset_action(self):
self.__action = YANGDynClass(base=action.action, is_container='container', yang_name="action", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
sequence_id = __builtin__.property(_get_sequence_id, _set_sequence_id)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
l2 = __builtin__.property(_get_l2, _set_l2)
ip = __builtin__.property(_get_ip, _set_ip)
transport = __builtin__.property(_get_transport, _set_transport)
action = __builtin__.property(_get_action, _set_action)
_pyangbind_elements = {'sequence_id': sequence_id, 'config': config, 'state': state, 'l2': l2, 'ip': ip, 'transport': transport, 'action': action, }
|
[
"dbarrosop@dravetech.com"
] |
dbarrosop@dravetech.com
|
0350bfcfba09fa9dcfb8c0ae0577552fed9d9f2d
|
44b01a73aeea92dec4469f3c5c792dd0acf290bf
|
/examples/advanced/phase_callbacks.py
|
23bc002b715a6a30d1d5cca731d8442d1a8b7925
|
[
"Apache-2.0"
] |
permissive
|
catalyst-team/gan
|
3adb4a0e44cfd5fbce8b97b67fe85e1ef8141c5b
|
6a077f6aa889dee7f3aba9c49de27e06c1fce1c5
|
refs/heads/master
| 2021-09-29T11:25:24.544686
| 2021-09-13T06:01:04
| 2021-09-13T06:01:04
| 236,473,329
| 6
| 3
|
Apache-2.0
| 2020-04-22T23:45:16
| 2020-01-27T11:11:41
|
Python
|
UTF-8
|
Python
| false
| false
| 5,563
|
py
|
import copy
from typing import Dict, List # isort:skip
from collections import OrderedDict
from catalyst.core import State
from catalyst.dl import registry
from catalyst_gan.callbacks import PhaseManagerCallback
# TODO: remove copy-paste from catalyst.dl.callback.phase
class Phase:
"""
Class for storing information about certain phase, including
- phase name
- number of steps (batches) in phase before next phase is chosen
- how many steps (batches) are done already
"""
def __init__(self, name: str = None, steps: int = None,
batch_metric_key: str = None,
threshold: float = None,
alpha: float = 1.0,
greater_is_good: bool = None,
do_abs_metric: bool = False):
self.steps = int(steps) if steps is not None else None
assert 1e-9 < alpha <= 1
self.alpha = alpha
self.curr_step = 0
self.name = name
self.batch_metric_key = batch_metric_key
self.threshold = threshold
self.greater_is_good = greater_is_good
self.do_abs_metric = do_abs_metric
self._prev_metric_value = None
def step(self, state: State):
metric_value = state.prev_batch_metrics.get(self.batch_metric_key, None)
if metric_value is None:
return False
if self.do_abs_metric:
metric_value = abs(metric_value)
if self._prev_metric_value is not None:
metric_value = (
self._prev_metric_value * (1 - self.alpha)
+ metric_value * self.alpha
)
self._prev_metric_value = metric_value
is_greater = metric_value > self.threshold
do_step = (not is_greater) and (not self.greater_is_good)
do_step = do_step or (is_greater and self.greater_is_good)
if do_step:
self.curr_step = (self.curr_step + 1) % self.steps
phase_finished = self.curr_step == 0
if phase_finished:
self._prev_metric_value = None
return phase_finished
else:
return False
# TODO: remove copy-paste from catalyst.dl.callback.phase
class PhaseManager:
"""
Class for storing & managing all phases in experiment configuration
Stores separately current phases in train & validation modes
By calling `.step(...)` method current phase is updated by step-size
and if current phase is finished, the next phase becomes current
"""
def __init__(self, train_phases: List[Phase], valid_phases: List[Phase]):
self.train_phases = train_phases
self.valid_phases = valid_phases
self.train_index = 0
self.valid_index = 0
def step(self, state: State, step_size: int = 1):
assert step_size == 1
if state.is_train_loader:
if len(self.train_phases) > 1:
need_change_phase = self.train_phases[self.train_index].step(state)
if need_change_phase:
self.train_index = \
(self.train_index + 1) % len(self.train_phases)
else:
if len(self.valid_phases) > 1:
need_change_phase = self.valid_phases[self.valid_index].step(state)
if need_change_phase:
self.valid_index = \
(self.valid_index + 1) % len(self.valid_phases)
def get_phase_name(self, state: State):
if state.is_train_loader:
return self.train_phases[self.train_index].name
return self.valid_phases[self.valid_index].name
@registry.Callback
class SmartPhaseManagerCallback(PhaseManagerCallback):
def __init__(self, train_phases: "OrderedDict[str, int]" = None,
valid_phases: "OrderedDict[str, int]" = None,
valid_mode: str = None):
super().__init__(train_phases, valid_phases, valid_mode)
self._curr_phase_steps = 0
def _get_phase_manager(
self,
train_phases: "OrderedDict[str, Dict]" = None,
valid_phases: "OrderedDict[str, Dict]" = None,
valid_mode: str = None
):
assert (valid_phases is None) ^ (valid_mode is None), \
"Exactly one of them must be specified"
if train_phases is None:
train_phases = [Phase(name=None, steps=None)]
else:
train_phases = [
Phase(name=name, **params)
for name, params in train_phases.items()
]
if valid_phases is None:
if valid_mode == self.VALIDATION_MODE_ALL:
valid_phases = [Phase(name=None, steps=None)]
elif valid_mode == self.VALIDATION_MODE_SAME:
valid_phases = copy.deepcopy(train_phases)
else:
raise ValueError(
f"Unsupported validation_mode, should be one of "
f"{self.allowed_valid_modes}"
)
return PhaseManager(
train_phases=train_phases, valid_phases=valid_phases
)
def on_batch_start(self, state: State):
super().on_batch_start(state)
def on_batch_end(self, state: State):
super().on_batch_end(state)
if state.is_train_loader:
self._curr_phase_steps += 1
if state.phase != self.phase_manager.get_phase_name(state):
state.batch_metrics[f"phase_steps/{state.phase}"] = \
self._curr_phase_steps
self._curr_phase_steps = 0
|
[
"andrew.zharkov@abbyy.com"
] |
andrew.zharkov@abbyy.com
|
2e84202775f3633e16e26cb77080e6a4c281f2fe
|
bc441bb06b8948288f110af63feda4e798f30225
|
/ens_sdk/model/cmdb/object_attr_value_pb2.py
|
1c8bea37db97cc1f37eb4ffdf454c89d7af3932d
|
[
"Apache-2.0"
] |
permissive
|
easyopsapis/easyops-api-python
|
23204f8846a332c30f5f3ff627bf220940137b6b
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
refs/heads/master
| 2020-06-26T23:38:27.308803
| 2020-06-16T07:25:41
| 2020-06-16T07:25:41
| 199,773,131
| 5
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| true
| 6,395
|
py
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: object_attr_value.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from ens_sdk.model.cmdb import object_attr_value_struct_pb2 as ens__sdk_dot_model_dot_cmdb_dot_object__attr__value__struct__pb2
from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='object_attr_value.proto',
package='cmdb',
syntax='proto3',
serialized_options=_b('Z>go.easyops.local/contracts/protorepo-models/easyops/model/cmdb'),
serialized_pb=_b('\n\x17object_attr_value.proto\x12\x04\x63mdb\x1a\x31\x65ns_sdk/model/cmdb/object_attr_value_struct.proto\x1a\x1cgoogle/protobuf/struct.proto\"\x8a\x02\n\x0fObjectAttrValue\x12\x0c\n\x04type\x18\x01 \x01(\t\x12%\n\x05regex\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value\x12\x14\n\x0c\x64\x65\x66\x61ult_type\x18\x03 \x01(\t\x12\'\n\x07\x64\x65\x66\x61ult\x18\x04 \x01(\x0b\x32\x16.google.protobuf.Value\x12\x32\n\rstruct_define\x18\x05 \x03(\x0b\x32\x1b.cmdb.ObjectAttrValueStruct\x12\x0c\n\x04mode\x18\x06 \x01(\t\x12\x0e\n\x06prefix\x18\x07 \x01(\t\x12\x13\n\x0bstart_value\x18\x08 \x01(\x05\x12\x1c\n\x14series_number_length\x18\t \x01(\x05\x42@Z>go.easyops.local/contracts/protorepo-models/easyops/model/cmdbb\x06proto3')
,
dependencies=[ens__sdk_dot_model_dot_cmdb_dot_object__attr__value__struct__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,])
_OBJECTATTRVALUE = _descriptor.Descriptor(
name='ObjectAttrValue',
full_name='cmdb.ObjectAttrValue',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='cmdb.ObjectAttrValue.type', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='regex', full_name='cmdb.ObjectAttrValue.regex', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='default_type', full_name='cmdb.ObjectAttrValue.default_type', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='default', full_name='cmdb.ObjectAttrValue.default', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='struct_define', full_name='cmdb.ObjectAttrValue.struct_define', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mode', full_name='cmdb.ObjectAttrValue.mode', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='prefix', full_name='cmdb.ObjectAttrValue.prefix', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='start_value', full_name='cmdb.ObjectAttrValue.start_value', index=7,
number=8, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='series_number_length', full_name='cmdb.ObjectAttrValue.series_number_length', index=8,
number=9, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=115,
serialized_end=381,
)
_OBJECTATTRVALUE.fields_by_name['regex'].message_type = google_dot_protobuf_dot_struct__pb2._VALUE
_OBJECTATTRVALUE.fields_by_name['default'].message_type = google_dot_protobuf_dot_struct__pb2._VALUE
_OBJECTATTRVALUE.fields_by_name['struct_define'].message_type = ens__sdk_dot_model_dot_cmdb_dot_object__attr__value__struct__pb2._OBJECTATTRVALUESTRUCT
DESCRIPTOR.message_types_by_name['ObjectAttrValue'] = _OBJECTATTRVALUE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ObjectAttrValue = _reflection.GeneratedProtocolMessageType('ObjectAttrValue', (_message.Message,), {
'DESCRIPTOR' : _OBJECTATTRVALUE,
'__module__' : 'object_attr_value_pb2'
# @@protoc_insertion_point(class_scope:cmdb.ObjectAttrValue)
})
_sym_db.RegisterMessage(ObjectAttrValue)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
|
[
"service@easyops.cn"
] |
service@easyops.cn
|
7ebce75c2d3154de02e1b6ffa38cb81df8c835df
|
ec25e9a9627d637b16905dc7d12a6038c1d45096
|
/one_by_one_30%/4/sc.py
|
acc01335f5a461f09d1b7d5101d5933c4fcd8f96
|
[] |
no_license
|
Mahdi77N/covid-19
|
b1ba48ec5302277d6178862773f18b1717ef3dca
|
05be426ed213caeb52412c88c5242de2b346f1e0
|
refs/heads/master
| 2023-01-20T13:44:43.781029
| 2020-11-23T19:40:01
| 2020-11-23T19:40:01
| 315,423,008
| 1
| 0
| null | 2020-11-23T19:44:26
| 2020-11-23T19:44:26
| null |
UTF-8
|
Python
| false
| false
| 516
|
py
|
from sys import argv
import sys
import os
import subprocess
from pexecute.process import ProcessLoom
def Run(i):
subprocess.call("python ./prediction.py "+str(i), shell=True)
def main():
# loom = ProcessLoom(max_runner_cap = 8)
for i in range(1,7):
print(200 * '*')
print(i)
Run(i)
subprocess.call("python ./errors.py", shell=True)
# print(i)
# loom.add_function(Run,[i])
# loom.execute()
if __name__ == "__main__":
main()
|
[
"marmegh@gmail.com"
] |
marmegh@gmail.com
|
b409b037061d3893cf2c2c4ad1317e510f63e35f
|
df2cbe914f463ad050d7ed26194424afbe3a0a52
|
/addons/portal_rating/controllers/portal_rating.py
|
2e8e4884730fe8bdee603045117406e60c3b6b03
|
[
"Apache-2.0"
] |
permissive
|
SHIVJITH/Odoo_Machine_Test
|
019ed339e995be980606a2d87a63312ddc18e706
|
310497a9872db7844b521e6dab5f7a9f61d365a4
|
refs/heads/main
| 2023-07-16T16:23:14.300656
| 2021-08-29T11:48:36
| 2021-08-29T11:48:36
| 401,010,175
| 0
| 0
|
Apache-2.0
| 2021-08-29T10:13:58
| 2021-08-29T10:13:58
| null |
UTF-8
|
Python
| false
| false
| 749
|
py
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import http, _
from odoo.http import request
class PortalRating(http.Controller):
@http.route(['/website/rating/comment'], type='json', auth="user", methods=['POST'], website=True)
def publish_rating_comment(self, rating_id, publisher_comment):
rating = request.env['rating.rating'].search([('id', '=', int(rating_id))])
if not rating:
return {'error': _('Invalid rating')}
rating.write({'publisher_comment': publisher_comment})
# return to the front-end the created/updated publisher comment
return rating.read(['publisher_comment', 'publisher_id', 'publisher_datetime'])[0]
|
[
"36736117+SHIVJITH@users.noreply.github.com"
] |
36736117+SHIVJITH@users.noreply.github.com
|
7aa5cdd160d7b30b3446ab9d7341fcc95f70ee6a
|
e37a5eeacc4a4bb2216f76a9301401446e6c956b
|
/tango_with_django_project/tango_with_django_project/urls.py
|
7a4a789df7b7eb723717f5855ca967e1330e0711
|
[] |
no_license
|
fullfr34k/Rango
|
ca0860635c8d7afb1c54158970ddf544d7ba6d8d
|
67b23aa6c8802c1a1fed7a92f8229a631ecf4bdb
|
refs/heads/master
| 2020-04-17T00:55:55.323462
| 2016-09-02T13:44:41
| 2016-09-02T13:44:41
| 67,223,623
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,029
|
py
|
"""tango_with_django_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from django.conf.urls import include
from rango import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^rango/', include('rango.urls')),
# above maps any URLs starting
# with rango/ to be handled by
# the rango application
url(r'^admin/', admin.site.urls),
]
|
[
"uzohobialeri@live.com"
] |
uzohobialeri@live.com
|
52825730c9e220f34d4b1e21552fd3c9fc880ec5
|
51f5ba127e25d775bcf4454b8addf5a0e3258104
|
/content/downloads/euler/p38.py
|
36e403cf1a5f1bc667404438b88bc672bf154996
|
[] |
no_license
|
fengxia41103/myblog
|
db7a7508f88965ac99adbd5cc166f922f21e3d87
|
18c3eb9de27222b37b21aee6ad643f7dc0402c97
|
refs/heads/main
| 2023-08-30T21:08:31.236197
| 2023-08-30T14:43:00
| 2023-08-30T14:43:00
| 66,040,322
| 1
| 0
| null | 2023-03-27T13:21:32
| 2016-08-19T00:41:47
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 1,543
|
py
|
# -*- coding: utf-8 -*-
"""Take the number 192 and multiply it by each of 1, 2, and 3:
192 × 1 = 192
192 × 2 = 384
192 × 3 = 576
By concatenating each product we get the 1 to 9 pandigital,
192384576. We will call 192384576 the concatenated product of 192 and
(1,2,3)
The same can be achieved by starting with 9 and multiplying by 1, 2,
3, 4, and 5, giving the pandigital, 918273645, which is the
concatenated product of 9 and (1,2,3,4,5).
What is the largest 1 to 9 pandigital 9-digit number that can be
formed as the concatenated product of an integer with (1,2, ... , n)
where n > 1? """
from operator import add
def main():
method_1()
method_2()
def method_1():
max_digits = 9
# 1-9 pandigital test
test = lambda x: set('123456789') == set(x)
# iterate problem space
result = 0
for n in [2, 3, 4]: # number of digits
for i in range(10 ** (n - 1) * 9, 10 ** n): # potential numbers
# concatenated sum strings
tmp = reduce(add, [str(i * j)
for j in range(1, max_digits / n + 1)])
# pandigital test
if test(tmp):
print 'found', n, i, tmp
result = max(result, tmp)
print result
def method_2():
"""Same solution with assumption that the number has 9 digits
and starts with 9 -> 9xxx.
"""
for x in range(9000, 10000):
st = str(x) + str(x * 2)
if set('123456789') == set(st):
print x, st
if __name__ == '__main__':
main()
|
[
"fxia1@lenovo.com"
] |
fxia1@lenovo.com
|
3517a958ce771fa25670dc830749f1ebb2204ad8
|
2e28b34e6fcf6890bdc0bacafbac41c996dbd1e7
|
/doc/source/conf.py
|
87936b78723b72aae3793bf4e2ef7b7bb5986581
|
[
"MIT"
] |
permissive
|
muhammedfurkan/shelfdb
|
a516d57757c7a776f6a548294b50382f28e21630
|
96a1dd27d2cd885d96098e0db2545ce3526f54a3
|
refs/heads/master
| 2021-05-18T01:02:33.458427
| 2020-03-23T04:40:33
| 2020-03-23T04:40:33
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,853
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# ShelfDB documentation build configuration file, created by
# sphinx-quickstart on Thu Feb 23 21:25:00 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
import sphinx_rtd_theme
import shelfdb
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.coverage']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'ShelfDB'
copyright = '2017, Nitipit Nontasuwan'
author = 'Nitipit Nontasuwan'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.2.1'
# The full version, including alpha/beta/rc tags.
release = '0.2.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'ShelfDBdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'ShelfDB.tex', 'ShelfDB Documentation',
'Nitipit Nontasuwan', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'shelfdb', 'ShelfDB Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'ShelfDB', 'ShelfDB Documentation',
author, 'ShelfDB', 'One line description of project.',
'Miscellaneous'),
]
|
[
"nitipit@gmail.com"
] |
nitipit@gmail.com
|
c6850ff0db5408f68ffb310180c2e6fcfa6a64c0
|
9fe646e011269454ef6219fb7d87159df02b8555
|
/arrays/chocolate_distribution.py
|
42aad5dce92f191d788deb8086e0c239d7de6dcd
|
[] |
no_license
|
shreyas-selvaraj/geeks_for_geeks_problems
|
34fe9b01cebf6be35bd3d3ba9802478ff256c928
|
a3d244494df86ba26bad0576469dc936ea027100
|
refs/heads/master
| 2022-11-28T22:01:08.851558
| 2020-07-29T11:59:15
| 2020-07-29T11:59:15
| 283,486,919
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 469
|
py
|
def chocolate_dist(arr, students):
#each packet has dif amount
#packets need to be distributed to students
#each students gets one packet
#dif between number chocolates given to student with max packet and student with min packet
#is a minimum
#problem doesnt make sense?
T = input()
ans = []
for i in range(T):
N = input()
arr = [int(x) for x in raw_input().split(" ")]
students = input()
ans.append(chocolate_dist(arr, students))
for a in ans:
print(a)
|
[
"shreeachari@gmail.com"
] |
shreeachari@gmail.com
|
8fc7cebe00a0f7f59d9119c58cd1eba1f57e32ec
|
bd6da83f5438b3f2ab8f7c6abe0a308e66a123e5
|
/egresocovid19/database.py
|
fb0e3f17b700955fd3fd239c55199fcd47688660
|
[] |
no_license
|
CoolCows/egresocovid19-server
|
f8ac2a6de247bf784f9e33fa1ff93e39c082efce
|
eb711f4d1101a97261674dea8796627c24a83c89
|
refs/heads/main
| 2023-08-14T00:00:55.119430
| 2021-09-18T18:49:11
| 2021-09-18T18:49:11
| 407,357,259
| 0
| 0
| null | 2021-09-17T00:49:50
| 2021-09-17T00:49:50
| null |
UTF-8
|
Python
| false
| false
| 4,105
|
py
|
from datetime import datetime
from typing import List, Optional
from beanie import PydanticObjectId
from motor.motor_asyncio import AsyncIOMotorClient
from pydantic import BaseModel, EmailStr, Field
from .enums import (
AftermathEnum,
BloodTypeEnum,
ContagionEnum,
DiagnosisWayEnum,
IncomeEnum,
ProphylaxisEnum,
SexEnum,
SkinColorEnum,
TestDiagnosisEnum,
TreatmentEnum,
)
from .settings import get_settings
from .utils.base_entity import BaseEntity
client = AsyncIOMotorClient(get_settings().database_url)
class DischargeOfPositiveCasesOfCovid19EmbeddedEntity(BaseModel):
detection_date: Optional[datetime]
# With autocompletation base on previous data and with default options
# SymptomEntity
symptoms: List[PydanticObjectId]
duration_of_symptoms: Optional[int]
diagnosis_way: Optional[DiagnosisWayEnum]
test_used_in_diagnosis: Optional[TestDiagnosisEnum]
days_from_symptoms_to_diagnosis: Optional[int]
number_pcr_performed: Optional[int]
time_from_diagnosis_to_negative_or_discharge: Optional[int]
form_of_contagion: Optional[ContagionEnum]
was_he_part_of_an_event: Optional[bool]
did_he_work_in_the_attention_to_positive_cases: Optional[bool]
hospitalization_time: Optional[str]
incomes: List["IncomeEmbeddedEntity"]
contacts_first_level: Optional[int]
contacts_first_level_positives: Optional[int]
contacts_second_level: Optional[int]
contacts_second_level_positives: Optional[int]
contacts_third_level: Optional[int]
contacts_third_level_positives: Optional[int]
treatments_received: List[TreatmentEnum]
# With autocompletation base on previous data
antibiotics: List[str]
prophylaxis: List[ProphylaxisEnum]
# With autocompletation base on previous data
another_vaccine_against_covid: Optional[str]
aftermath: List[AftermathEnum]
# With autocompletation base on previous data
others_aftermath: List[str]
class IncomeEmbeddedEntity(BaseModel):
income: IncomeEnum
days: int
class MunicipalityEmbeddedEntity(BaseModel):
id: PydanticObjectId = Field(default_factory=PydanticObjectId)
name: str
class PathologicalEmbeddedEntity(BaseModel):
pathological: PydanticObjectId
treatments: str
class PathologicalEntity(BaseEntity):
name: str
default: bool = False
class Collection:
name: str = "pathologicals"
class PatientEntity(BaseEntity):
firstname: str
lastname: str
ci: str
age: int
sex: SexEnum
skin_color: SkinColorEnum
blood_type: Optional[BloodTypeEnum]
address: str
# MunicipalityEmbeddedEntity
municipality: PydanticObjectId
# With autocompletation base on previous data
polyclinic: str
# With autocompletation base on previous data
surgery: str
# With autocompletation base on previous data
popular_council: str
# With autocompletation base on previous data
neighborhood: str
block_number: int
# With autocompletation base on previous data and with default options
personal_pathological_history: List[PathologicalEmbeddedEntity]
# With autocompletation base on previous data and with default options
family_pathological_history: List[PathologicalEmbeddedEntity]
discharge_of_positive_cases_of_covid_19: Optional[
DischargeOfPositiveCasesOfCovid19EmbeddedEntity
]
class Collection:
name: str = "patients"
class ProvinceEntity(BaseEntity):
name: str
municipalities: List[MunicipalityEmbeddedEntity]
class Collection:
name: str = "provinces"
class SymptomEntity(BaseEntity):
name: str
default: bool = False
class Collection:
name: str = "symptoms"
class UserEntity(BaseEntity):
name: str
email: EmailStr
phone: str
hashed_password: str
email_confirmed: bool = False
phone_confirmed: bool = False
disabled: bool = False
class Collection:
name: str = "users"
entities = [
PathologicalEntity,
PatientEntity,
ProvinceEntity,
SymptomEntity,
UserEntity,
]
|
[
"leynier41@gmail.com"
] |
leynier41@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.