hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 248 | max_stars_repo_name stringlengths 5 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 248 | max_issues_repo_name stringlengths 5 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 248 | max_forks_repo_name stringlengths 5 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 2.06M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.03M | alphanum_fraction float64 0 1 | count_classes int64 0 1.6M | score_classes float64 0 1 | count_generators int64 0 651k | score_generators float64 0 1 | count_decorators int64 0 990k | score_decorators float64 0 1 | count_async_functions int64 0 235k | score_async_functions float64 0 1 | count_documentation int64 0 1.04M | score_documentation float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
37afd5c2ce06e31b0fb649e3c85d0e4fb3ee953a | 3,429 | py | Python | Misc Learning/HackerRank 30 Days of Code/Additional Practice Problems/Dominator.py | hamil168/Learning-Data-Science | dd91e4336b6a48a30265a86f8b816658639a17e9 | [
"BSD-2-Clause"
] | null | null | null | Misc Learning/HackerRank 30 Days of Code/Additional Practice Problems/Dominator.py | hamil168/Learning-Data-Science | dd91e4336b6a48a30265a86f8b816658639a17e9 | [
"BSD-2-Clause"
] | 1 | 2018-03-31T05:54:37.000Z | 2018-03-31T05:54:37.000Z | Misc Learning/HackerRank 30 Days of Code/Additional Practice Problems/Dominator.py | hamil168/Data-Science-Misc | dd91e4336b6a48a30265a86f8b816658639a17e9 | [
"BSD-2-Clause"
] | null | null | null | """
Dominator Problem
by Codility
Solution by B Hamilton
An array A consisting of N integers is given.
The dominator of array A is the value that occurs in more than half of the
elements of A.
Write a function that, given an array A consisting of N integers, returns
index of any element of array A in which the dominator of A occurs.
The function should return −1 if array A does not have a dominator.
(Given) Assume that:
N is an integer within the range [0..100,000];
each element of array A is an integer within the range
[−2,147,483,648..2,147,483,647].
Target Complexity: Time O(N) oor O(NlogN) | Space O(1)
Task Score: 83% | Correctness: 75% | Performance 100%
"""
def solution(A):
# I think I cna use a hassh table here,
# but that will make a solution that is O(N) or worse
# I may not know how to do so, anyway...
# I will try a greedy math-based algorithm
# Key intuition is that the max will have
# as many coutns as all the others combined, except 1.
# and so every time we change to a number that isn't the max,
# decrease the count
# every time we repeat a number, increase the count
# In the end, the sum should be 1ish.
# a,b,c,a,a,d,a,a 1, -1, -1, -1, +1, +1, -1, -1, + 1 = -1
# may need to do a every time we change the number that is not the
# 'max', decrease by 1.
# If A[0], get Id (i.e. a) and Ct (+1)
# A[1], Id (!=a) so Ct 7483-1
# A[2] Id [!=a]
# Constraints
if len(A) < 0 or len(A) > 100000 or A == []:
return -1
#if any(A < -2147483648) or any(A > 2147483647):
# return -1
dominator = None
dominator_count = 0
dominator_indices = []
if len(A) == 0:
return -1
for i in range(len(A)): # an O(N) operation
#any time the count is 0...
#the incoming number becomes the dominator
#and increment the count by 1
if dominator_count == 0:
dominator = A[i]
#dominator_indices.append(i)
dominator_count += 1
# if the incoming number is the same as the dominator
# increment by 1
# appemd the index
elif A[i] == dominator:
dominator_count += 1
#dominator_indices.append(i)
# incoming number is the same as the dominator
# decrement by 1. If this reduces it to 0, the dominator
# will change on the next pass.
# pop the end of the stack
else:
dominator_count -= 1
#dominator_indices.pop()
# A second O(n) operation
dominator_indices= [idx for idx, num in enumerate(A) if num == dominator]
# What happens when we end with dominator_count = 0?
# edge cases where failed
# all different, non-dominator.
# need to double check the length is big enough!
if len(dominator_indices) == 0:
dominator_indices = [-1]
elif len(dominator_indices) < round(len(A)/2):
dominator_indices = [-1]
#print(dominator_indices)
# I can generate the indices, but it will not let me
# Maybe it allows me to return ANY of the indices
return dominator_indices[0]
# Test Cases that failed:
# any wher len(A) %2 == 0, split 50/50
# half elements the same, and half + 1 elements the same (got 10/20 for value 2,
# but 2 needed 11 to be the dominator
#
# N/2 values of 1, N even + [0,0,1,1,1] (for N = 4, not sure how this works)
#
#
| 30.891892 | 80 | 0.624089 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,450 | 0.713662 |
37b03eee963ae5d66a198ecf984f7cee7d0c3fdc | 4,437 | py | Python | avalon-scone/scone_worker/avalon_worker/workload/openvino.py | T-Systems-MMS/hyperledger-secure-avalon | b43796a53726242d54be526ce9f6fc8d8a2670d1 | [
"Apache-2.0"
] | null | null | null | avalon-scone/scone_worker/avalon_worker/workload/openvino.py | T-Systems-MMS/hyperledger-secure-avalon | b43796a53726242d54be526ce9f6fc8d8a2670d1 | [
"Apache-2.0"
] | 1 | 2021-02-03T07:57:06.000Z | 2021-02-13T13:53:49.000Z | avalon-scone/scone_worker/avalon_worker/workload/openvino.py | T-Systems-MMS/hyperledger-secure-avalon | b43796a53726242d54be526ce9f6fc8d8a2670d1 | [
"Apache-2.0"
] | 4 | 2021-06-09T08:55:26.000Z | 2021-11-26T16:25:48.000Z | #!/usr/bin/python3
# Copyright 2020 Mujtaba Idrees
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import random
import string
import toml
import requests
import sys
import logging
import json
from workload.workload import WorkLoad
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler(sys.stdout))
# -------------------------------------------------------------------------
class OpenvinoWorkLoad(WorkLoad):
"""
Openvino workload class. This is an example workload.
"""
# -------------------------------------------------------------------------
def execute(self, in_data_array):
"""
Executes Openvino workload.
Parameters :
in_data_array: Input data array containing data in plain bytes
Returns :
status as boolean and output result in bytes.
"""
logger.info("Execute Openvino workload")
data_plain_bytes = in_data_array[0]["data"]
try:
tcf_home = os.environ.get("TCF_HOME", "/project/avalon")
config = toml.load(tcf_home + "/config/scone_config.toml")
data_str = data_plain_bytes.decode("UTF-8")
openvino_template=config["WorkloadExecution"]["openvino_template"]
openvino_template_arr=openvino_template.split('-')
session_name = openvino_template_arr[0]+'-'+os.environ["SELF_IDENTITY"]
rand_str=''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(20))
PATH = 'session_log.txt'
opv_session_file=''
# if session file already exists, it means we have to update the session
if os.path.isfile(PATH) and os.access(PATH, os.R_OK):
f = open("session_log.txt", "r")
prev_session_hash=f.read()
f.close()
f = open(tcf_home + "/config/openvino_session_template.yml", "r")
opv_session_file=f.read()
f.close()
opv_session_file=opv_session_file+'\npredecessor: '+prev_session_hash
# else we must create the session
else:
f = open(tcf_home + "/config/openvino_session_template.yml", "r")
opv_session_file=f.read()
f.close()
# Adding actual values to session template
opv_session_file=opv_session_file.replace("SESSION_NAME",session_name)
opv_session_file=opv_session_file.replace("IMAGE_NAME",data_str)
opv_session_file=opv_session_file.replace("RANDOM_STRING",rand_str)
scone_cas_alias=config["CAS"]["scone_cas_alias"]
scone_cas_port=config["CAS"]["scone_cas_port"]
scone_cas_url='https://'+scone_cas_alias+':'+scone_cas_port
# Post session add/update request to CAS
# certs used in this req are the ones which are generated at worker boot up
res = requests.post(scone_cas_url+'/session', opv_session_file.encode(), verify=tcf_home+'/cas-ca.pem', cert=(tcf_home+'/client.crt', tcf_home+'/client-key.key'))
session_upload_response=json.loads(res.text)
# writes the session hash to file so it could be used to update session for next req
f = open("session_log.txt", "wt")
f.write(session_upload_response['hash'])
f.close()
out_msg = "Workload submitted to openvino enclave see output folder"
out_msg_bytes = out_msg.encode("utf-8")
result = True
except Exception as e:
out_msg = "Error processing ovenvino workload: " + str(e)
out_msg_bytes = out_msg.encode("utf-8")
logger.error(out_msg)
result = False
return result, out_msg_bytes
# -------------------------------------------------------------------------
| 39.616071 | 174 | 0.610548 | 3,415 | 0.769664 | 0 | 0 | 0 | 0 | 0 | 0 | 2,028 | 0.457066 |
37b0ef435382ce1f55b5e500c3de8d4e9d9bcd74 | 6,504 | py | Python | MLProjects/iris-machine-learning-master/irisML.py | evidawei/HacktoberFest_2021 | 3c950c6a6451ac732c4090f374c7dc4b6ef36c50 | [
"MIT"
] | 33 | 2021-10-01T17:51:53.000Z | 2022-03-20T11:30:09.000Z | MLProjects/iris-machine-learning-master/irisML.py | evidawei/HacktoberFest_2021 | 3c950c6a6451ac732c4090f374c7dc4b6ef36c50 | [
"MIT"
] | 69 | 2021-10-01T09:07:22.000Z | 2021-10-20T02:21:12.000Z | MLProjects/iris-machine-learning-master/irisML.py | evidawei/HacktoberFest_2021 | 3c950c6a6451ac732c4090f374c7dc4b6ef36c50 | [
"MIT"
] | 187 | 2021-10-01T09:06:51.000Z | 2022-01-29T03:18:30.000Z |
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from IPython.display import Image
from sklearn.externals.six import StringIO
from sklearn.tree import export_graphviz
from sklearn.neighbors import KNeighborsClassifier
from sklearn.model_selection import train_test_split
from sklearn import tree
from sklearn.linear_model import LogisticRegression
from sklearn.tree import DecisionTreeClassifier
from sklearn import svm
from sklearn import metrics
from sklearn.tree import DecisionTreeRegressor
df = pd.read_csv("~/Desktop/iris-machine-learning/IRIS.csv")
"""EDA plot"""
EDA_plot_sepal = df[df.species == "Iris-setosa"].plot(kind = "scatter", x="sepal_length", y="sepal_width", color="red", label="Setosa")
df[df.species == "Iris-versicolor"].plot(kind = "scatter", x="sepal_length", y="sepal_width", color="green", label="Versicolor", ax=EDA_plot_sepal)
df[df.species == "Iris-virginica"].plot(kind = "scatter", x="sepal_length", y="sepal_width", color="yellow", label="Virginica", ax=EDA_plot_sepal)
EDA_plot_sepal.set_xlabel("Sepal Length")
EDA_plot_sepal.set_ylabel("Sepal Width")
EDA_plot_sepal.set_title("IRIS Sepal")
EDA_plot_sepal=plt.gcf()
EDA_plot_sepal.set_size_inches(10,7)
EDA_plot_petal = df[df.species == "Iris-setosa"].plot(kind = "scatter", x="petal_length", y="petal_width", color="red", label="Setosa")
df[df.species == "Iris-versicolor"].plot(kind = "scatter", x="petal_length", y="petal_width", color="green", label="Versicolor", ax=EDA_plot_petal)
df[df.species == "Iris-virginica"].plot(kind = "scatter", x="petal_length", y="petal_width", color="yellow", label="Virginica", ax=EDA_plot_petal)
EDA_plot_petal.set_xlabel("Petal Length")
EDA_plot_petal.set_ylabel("Petal Width")
EDA_plot_petal.set_title("IRIS Petal")
EDA_plot_petal=plt.gcf()
EDA_plot_petal.set_size_inches(10,7)
"""Histogram"""
df.hist(edgecolor = "black", linewidth = 1)
hist = plt.gcf()
hist.set_size_inches(10,7)
"""Violin Plot"""
plt.figure(figsize=(15,10))
plt.subplot(2, 2, 1)
sns.violinplot(x = "species", y = "sepal_length", data = df)
plt.subplot(2, 2, 2)
sns.violinplot(x = "species", y = "sepal_width", data = df)
plt.subplot(2, 2, 3)
sns.violinplot(x = "species", y = "petal_length", data = df)
plt.subplot(2, 2, 4)
sns.violinplot(x = "species", y = "petal_length", data = df)
"""HeatMap"""
plt.figure(figsize=(8, 4))
sns.heatmap(df.corr(), annot=True, cmap="cubehelix_r")
plt.show()
"""Splitting The Data into Training And Testing Dataset"""
train, test = train_test_split(df, test_size=0.2) #data split to train ans test with the attribute test size 20% and train 80%
train_x = train[["sepal_length", "sepal_width", "petal_length", "petal_width"]] #feat data
train_y = train.species #output from training data
test_x = test[["sepal_length", "sepal_width", "petal_length", "petal_width"]] #feat data
test_y = test.species #output from testing data
"""Creating IRIS Training Data"""
sepal = df[["sepal_length", "sepal_width", "species"]]
petal = df[["petal_length", "petal_width", "species"]]
train_sepal, test_sepal = train_test_split(sepal, test_size=0.2, random_state=0)
train_x_sepal = train_sepal[["sepal_length", "sepal_width"]]
train_y_sepal = train_sepal.species
test_x_sepal = test_sepal[["sepal_length", "sepal_width"]]
test_y_sepal = test_sepal.species
train_petal, test_petal = train_test_split(petal, test_size=0.2, random_state=0)
train_x_petal = train_petal[["petal_length", "petal_width"]]
train_y_petal = train_petal.species
test_x_petal = test_petal[["petal_length", "petal_width"]]
test_y_petal = test_petal.species
"""Support Vector Machine Algorithm"""
model = svm.SVC()
model.fit(train_x, train_y)
prediction = model.predict(test_x)
print("The accuracy of SVM: ", metrics.accuracy_score(prediction, test_y))
model = svm.SVC()
model.fit(train_x_sepal, train_y_sepal)
prediction = model.predict(test_x_sepal)
model.fit(train_x_petal, train_y_petal)
prediction = model.predict(test_x_petal)
print("The accuracy of SVM_sepal: ", metrics.accuracy_score(prediction, test_y_sepal))
print("The accuracy of SVM_petal: ", metrics.accuracy_score(prediction, test_y_petal))
"""Logistic Regression"""
model = LogisticRegression()
model.fit(train_x, train_y)
prediction = model.predict(test_x)
print("The accuracy of Logistic Regression: ", metrics.accuracy_score(prediction, test_y))
model = LogisticRegression()
model.fit(train_x_sepal, train_y_sepal)
prediction = model.predict(test_x_sepal)
model.fit(train_x_petal, train_y_petal)
prediction = model.predict(test_x_petal)
print("The accuracy of Logistic Regression_sepal: ", metrics.accuracy_score(prediction, test_y_sepal))
print("The accuracy of Logistic Regression_petal: ", metrics.accuracy_score(prediction, test_y_petal))
"""Decision Tree"""
model = DecisionTreeClassifier(max_depth=4)
model.fit(train_x, train_y)
prediction = model.predict(test_x)
print("The accuracy of DecisionTree: ", metrics.accuracy_score(prediction, test_y))
model = DecisionTreeClassifier()
model.fit(train_x_sepal, train_y_sepal)
prediction = model.predict(test_x_sepal)
model.fit(train_x_petal, train_y_petal)
prediction = model.predict(test_x_petal)
print("The accuracy of DecisionTree_sepal: ", metrics.accuracy_score(prediction, test_y_sepal))
print("The accuracy of DecisionTree_petal: ", metrics.accuracy_score(prediction, test_y_petal))
"""K-Nearest Neighbors"""
model = KNeighborsClassifier(n_neighbors=5)
model.fit(train_x, train_y)
prediction = model.predict(test_x)
print("The accuracy of KNN: ", metrics.accuracy_score(prediction, test_y))
model = KNeighborsClassifier(n_neighbors=5)
model.fit(train_x_sepal, train_y_sepal)
prediction = model.predict(test_x_sepal)
model.fit(train_x_petal, train_y_petal)
prediction = model.predict(test_x_petal)
print("The accuracy of KNN_sepal: ", metrics.accuracy_score(prediction, test_y_sepal))
print("The accuracy of KNN_petal: ", metrics.accuracy_score(prediction, test_y_petal))
"""check the accuracy for various values of n for KNN"""
n_range = list(range(1,20))
n = pd.Series()
for i in n_range:
model = KNeighborsClassifier(n_neighbors=i)
model.fit(train_x, train_y)
prediction = model.predict(test_x)
n = n.append(pd.Series(metrics.accuracy_score(prediction, test_y)))
plt.plot(n_range, n)
plt.xticks(n_range)
sns.pairplot(df.drop("Id", axis = 1), hue = "species", size = 3)
pd.plotting.andrews_curves(df.drop("Id", axis = 1), "species")
pd.plotting.parallel_coordinates(df.drop("Id", axis = 1), "species")
plt.show() | 41.692308 | 147 | 0.767066 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,827 | 0.280904 |
37b13ac6a482ca464f323ea8e9763e6ccd5bd832 | 3,646 | py | Python | Data Structures/Heap/JesseAndCookies.py | aibenStunner/HackerRank | de223f2b1fa95d1959deef9ce14b39baa61100ba | [
"MIT"
] | 2 | 2020-04-17T02:54:59.000Z | 2020-06-08T23:32:12.000Z | Data Structures/Heap/JesseAndCookies.py | aibenStunner/HackerRank | de223f2b1fa95d1959deef9ce14b39baa61100ba | [
"MIT"
] | null | null | null | Data Structures/Heap/JesseAndCookies.py | aibenStunner/HackerRank | de223f2b1fa95d1959deef9ce14b39baa61100ba | [
"MIT"
] | 1 | 2021-11-16T14:05:22.000Z | 2021-11-16T14:05:22.000Z | #!/bin/python3
import os
import sys
class minHeap:
def __init__(self):
self.items = []
self.size = len(self.items)
def getLeftChildIndex(self, parentIndex):
return 2 * parentIndex + 1
def getRightChildIndex(self, parentIndex):
return 2 * parentIndex + 2
def getParentIndex(self, childIndex):
return int((childIndex - 1) / 2)
def hasLeftChild(self, index):
return self.getLeftChildIndex(index) < self.size
def hasRightChild(self, index):
return self.getRightChildIndex(index) < self.size
def hasParent(self, index):
return self.getParentIndex(index) >= 0
def leftChild(self, index):
return self.items[self.getLeftChildIndex(index)]
def rightChild(self, index):
return self.items[self.getRightChildIndex(index)]
def parent(self, index):
return self.items[self.getParentIndex(index)]
def swap(self, indexOne, indexTwo):
temp = self.items[indexOne]
self.items[indexOne] = self.items[indexTwo]
self.items[indexTwo] = temp
def pop(self):
if self.size == 0:
raise Exception("Empty Heap!!")
else:
item = self.items[0]
self.items[0] = self.items[self.size - 1]
del(self.items[self.size - 1])
self.size = len(self.items)
self.heapifyDown()
return item
def peek(self):
if self.size == 0:
raise Exception("Empty Heap!!")
else:
return self.items[0]
def add(self, item):
self.items.append(item)
self.size = len(self.items)
self.heapifyUp()
def heapify(self, arr):
for elem in arr:
self.add(elem)
def heapifyUp(self):
index = self.size - 1
while (self.hasParent(index) and self.parent(index) > self.items[index]):
self.swap(self.getParentIndex(index), index)
index = self.getParentIndex(index)
def heapifyDown(self):
index = 0
while(self.hasLeftChild(index)):
smallerChildIndex = self.getLeftChildIndex(index)
if(self.hasRightChild(index) and self.rightChild(index) < self.leftChild(index)):
smallerChildIndex = self.getRightChildIndex(index)
if(self.items[index] < self.items[smallerChildIndex]):
break
else:
self.swap(index, smallerChildIndex)
index = smallerChildIndex
#
# Complete the cookies function below.
#
def cookies(k, A):
qHeap = minHeap()
qHeap.heapify(A)
count = 0
try:
while qHeap.peek() < k:
count += 1
a, b = qHeap.pop(), qHeap.pop()
newCookie = (1*a)+(2*b)
qHeap.add(newCookie)
return count
except:
return -1
if __name__ == '__main__':
# fptr = open(os.environ['OUTPUT_PATH'], 'w')
nk = input().split()
n = int(nk[0])
k = int(nk[1])
A = list(map(int, input().rstrip().split()))
result = cookies(k, A)
print(result)
# fptr.write(str(result) + '\n')
# fptr.close()
# def cookies(k, A):
# from heapq import heappop,heappush,heapify
# heapify(A)
# fC = 0
# try:
# while A[0] < k:
# fC+=1
# c1 = heappop(A)
# c2 = heappop(A)
# newCookie=(1*c1)+(2*c2)
# heappush(A,newCookie)
# return fC
# except:
# return -1
| 26.42029 | 93 | 0.538124 | 2,501 | 0.685957 | 0 | 0 | 0 | 0 | 0 | 0 | 618 | 0.169501 |
37b14ef8ca35ac4c2685b58e6101352688efef2d | 4,751 | py | Python | submodule.py | Jaewoo97/VisualOdomtery | cc6fc72dd40cb5f37e735502e6409d4329279c1d | [
"MIT"
] | null | null | null | submodule.py | Jaewoo97/VisualOdomtery | cc6fc72dd40cb5f37e735502e6409d4329279c1d | [
"MIT"
] | null | null | null | submodule.py | Jaewoo97/VisualOdomtery | cc6fc72dd40cb5f37e735502e6409d4329279c1d | [
"MIT"
] | null | null | null | from __future__ import print_function
import torch
import torch.nn as nn
import torch.utils.data
from torch.autograd import Variable
import torch.nn.functional as F
import math
import numpy as np
def test(model, imgL,imgR,disp_true):
model.eval()
imgL, imgR, disp_true = imgL.cuda(), imgR.cuda(), disp_true.cuda()
#---------
mask = disp_true < 192
#----
if imgL.shape[2] % 16 != 0:
times = imgL.shape[2]//16
top_pad = (times+1)*16 -imgL.shape[2]
else:
top_pad = 0
if imgL.shape[3] % 16 != 0:
times = imgL.shape[3]//16
right_pad = (times+1)*16-imgL.shape[3]
else:
right_pad = 0
imgL = F.pad(imgL,(0,right_pad, top_pad,0))
imgR = F.pad(imgR,(0,right_pad, top_pad,0))
with torch.no_grad():
output3 = model(imgL,imgR)
output3 = torch.squeeze(output3)
if top_pad !=0:
img = output3[:,top_pad:,:]
else:
img = output3
if len(disp_true[mask])==0:
loss = 0
else:
loss = torch.mean(torch.abs(img[mask]-disp_true[mask])) # end-point-error
return loss.data.cpu(), img[mask]
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride, downsample, pad, dilation):
super(BasicBlock, self).__init__()
self.conv1 = nn.Sequential(convbn(inplanes, planes, 3, stride, pad, dilation),
nn.ReLU(inplace=True))
self.conv2 = convbn(planes, planes, 3, 1, pad, dilation)
self.downsample = downsample
self.stride = stride
def forward(self, x):
out = self.conv1(x)
out = self.conv2(out)
if self.downsample is not None:
x = self.downsample(x)
out += x
return out
class hourglass(nn.Module):
def __init__(self, inplanes):
super(hourglass, self).__init__()
self.conv1 = nn.Sequential(convbn_3d(inplanes, inplanes*2, kernel_size=3, stride=2, pad=1),
nn.ReLU(inplace=True))
self.conv2 = convbn_3d(inplanes*2, inplanes*2, kernel_size=3, stride=1, pad=1)
self.conv3 = nn.Sequential(convbn_3d(inplanes*2, inplanes*2, kernel_size=3, stride=2, pad=1),
nn.ReLU(inplace=True))
self.conv4 = nn.Sequential(convbn_3d(inplanes*2, inplanes*2, kernel_size=3, stride=1, pad=1),
nn.ReLU(inplace=True))
self.conv5 = nn.Sequential(nn.ConvTranspose3d(inplanes*2, inplanes*2, kernel_size=3, padding=1, output_padding=1, stride=2,bias=False),
nn.BatchNorm3d(inplanes*2)) #+conv2
self.conv6 = nn.Sequential(nn.ConvTranspose3d(inplanes*2, inplanes, kernel_size=3, padding=1, output_padding=1, stride=2,bias=False),
nn.BatchNorm3d(inplanes)) #+x
def forward(self, x ,presqu, postsqu):
out = self.conv1(x) #in:1/4 out:1/8
pre = self.conv2(out) #in:1/8 out:1/8
if postsqu is not None:
pre = F.relu(pre + postsqu, inplace=True)
else:
pre = F.relu(pre, inplace=True)
out = self.conv3(pre) #in:1/8 out:1/16
out = self.conv4(out) #in:1/16 out:1/16
if presqu is not None:
post = F.relu(self.conv5(out)+presqu, inplace=True) #in:1/16 out:1/8
else:
post = F.relu(self.conv5(out)+pre, inplace=True)
out = self.conv6(post) #in:1/8 out:1/4
return out, pre, post
class disparityregression(nn.Module):
def __init__(self, maxdisp):
super(disparityregression, self).__init__()
self.disp = Variable(torch.Tensor(np.reshape(np.array(range(maxdisp)),[1,maxdisp,1,1])).cuda(), requires_grad=False)
def forward(self, x):
disp = self.disp.repeat(x.size()[0],1,x.size()[2],x.size()[3])
out = torch.sum(x*disp,1)
return out
def convbn_3d(in_planes, out_planes, kernel_size, stride, pad):
return nn.Sequential(nn.Conv3d(in_planes, out_planes, kernel_size=kernel_size, padding=pad, stride=stride,bias=False),
nn.BatchNorm3d(out_planes))
def convbn(in_planes, out_planes, kernel_size, stride, pad, dilation):
return nn.Sequential(nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=dilation if dilation > 1 else pad, dilation = dilation, bias=False), \
nn.BatchNorm2d(out_planes))
| 37.409449 | 177 | 0.565144 | 2,851 | 0.600084 | 0 | 0 | 0 | 0 | 0 | 0 | 147 | 0.030941 |
37b1af8d30379fc26514ca07544d691e2506df45 | 15,066 | py | Python | parseq/scripts/geoquery_geo880_basic_nar.py | saist1993/parseq | dce90d06d14ffbb0a471849f04c373a173475d3a | [
"MIT"
] | 1 | 2022-01-21T16:08:08.000Z | 2022-01-21T16:08:08.000Z | parseq/scripts/geoquery_geo880_basic_nar.py | saist1993/parseq | dce90d06d14ffbb0a471849f04c373a173475d3a | [
"MIT"
] | null | null | null | parseq/scripts/geoquery_geo880_basic_nar.py | saist1993/parseq | dce90d06d14ffbb0a471849f04c373a173475d3a | [
"MIT"
] | 1 | 2020-08-19T07:09:44.000Z | 2020-08-19T07:09:44.000Z | import os
import re
import sys
from functools import partial
from typing import *
import torch
import qelos as q
from allennlp.modules.seq2seq_encoders import PytorchSeq2SeqWrapper
from nltk import PorterStemmer
from torch.utils.data import DataLoader
# from funcparse.decoding import TransitionModel, TFActionSeqDecoder, LSTMCellTransition, BeamActionSeqDecoder, \
# GreedyActionSeqDecoder, TFTokenSeqDecoder
# from funcparse.grammar import FuncGrammar, passtr_to_pas
# from funcparse.states import FuncTreeState, FuncTreeStateBatch, BasicState, BasicStateBatch
# from funcparse.vocab import VocabBuilder, SentenceEncoder, FuncQueryEncoder
# from funcparse.nn import TokenEmb, PtrGenOutput, SumPtrGenOutput, BasicGenOutput
from parseq.decoding import SeqDecoder, TFTransition, FreerunningTransition, merge_metric_dicts
from parseq.eval import CELoss, SeqAccuracies, make_array_of_metrics, DerivedAccuracy
from parseq.grammar import prolog_to_pas
from parseq.nn import TokenEmb, BasicGenOutput
from parseq.states import DecodableState, BasicDecoderState, State
from parseq.tm import TransformerConfig, Transformer
from parseq.transitions import TransitionModel, LSTMCellTransition
from parseq.vocab import SequenceEncoder, Vocab
def stem_id_words(pas, idparents, stem=False, strtok=None):
if stem is True:
assert(not isinstance(pas, tuple))
if not isinstance(pas, tuple):
if stem is True:
assert(isinstance(pas, str))
if re.match(r"'([^']+)'", pas):
pas = re.match(r"'([^']+)'", pas).group(1)
pas = strtok(pas)
return [("_str", pas)]
else:
return [pas]
else:
return [pas]
else:
tostem = pas[0] in idparents
children = [stem_id_words(k, idparents, stem=tostem, strtok=strtok)
for k in pas[1]]
children = [a for b in children for a in b]
return [(pas[0], children)]
def pas2toks(pas):
if not isinstance(pas, tuple):
return [pas]
else:
children = [pas2toks(k) for k in pas[1]]
ret = [pas[0]] if pas[0] != "@NAMELESS@" else []
ret[0] += "("
for child in children:
ret += child
# ret.append(",")
# ret.pop(-1)
ret.append(")")
return ret
def basic_query_tokenizer(x:str, strtok=None):
pas = prolog_to_pas(x)
idpreds = set("_cityid _countryid _stateid _riverid _placeid".split(" "))
idpreds = set("cityid stateid countryid riverid placeid".split(" "))
pas = stem_id_words(pas, idpreds, strtok=strtok)[0]
ret = pas2toks(pas)
return ret
def try_basic_query_tokenizer():
stemmer = PorterStemmer()
x = "answer(cityid('new york', _))"
y = basic_query_tokenizer(x, strtok=lambda x: [stemmer.stem(xe) for xe in x.split()])
# print(y)
class GeoQueryDataset(object):
def __init__(self,
p="../../datasets/geoquery/",
sentence_encoder:SequenceEncoder=None,
min_freq:int=2, **kw):
super(GeoQueryDataset, self).__init__(**kw)
self.data = {}
self.sentence_encoder = sentence_encoder
questions = [x.strip() for x in open(os.path.join(p, "questions.txt"), "r").readlines()]
queries = [x.strip() for x in open(os.path.join(p, "queries.funql"), "r").readlines()]
trainidxs = set([int(x.strip()) for x in open(os.path.join(p, "train_indexes.txt"), "r").readlines()])
testidxs = set([int(x.strip()) for x in open(os.path.join(p, "test_indexes.txt"), "r").readlines()])
splits = [None]*len(questions)
for trainidx in trainidxs:
splits[trainidx] = "train"
for testidx in testidxs:
splits[testidx] = "test"
if any([split == None for split in splits]):
print(f"{len([split for split in splits if split == None])} examples not assigned to any split")
self.query_encoder = SequenceEncoder(tokenizer=partial(basic_query_tokenizer, strtok=sentence_encoder.tokenizer), add_end_token=True)
# build vocabularies
for i, (question, query, split) in enumerate(zip(questions, queries, splits)):
self.sentence_encoder.inc_build_vocab(question, seen=split=="train")
self.query_encoder.inc_build_vocab(query, seen=split=="train")
self.sentence_encoder.finalize_vocab(min_freq=min_freq)
self.query_encoder.finalize_vocab(min_freq=min_freq)
self.build_data(questions, queries, splits)
def build_data(self, inputs:Iterable[str], outputs:Iterable[str], splits:Iterable[str]):
for inp, out, split in zip(inputs, outputs, splits):
state = BasicDecoderState([inp], [out], self.sentence_encoder, self.query_encoder)
if split not in self.data:
self.data[split] = []
self.data[split].append(state)
def get_split(self, split:str):
return DatasetSplitProxy(self.data[split])
@staticmethod
def collate_fn(data:Iterable):
goldmaxlen = 0
inpmaxlen = 0
data = [state.make_copy(detach=True, deep=True) for state in data]
for state in data:
goldmaxlen = max(goldmaxlen, state.gold_tensor.size(1))
inpmaxlen = max(inpmaxlen, state.inp_tensor.size(1))
for state in data:
state.gold_tensor = torch.cat([
state.gold_tensor,
state.gold_tensor.new_zeros(1, goldmaxlen - state.gold_tensor.size(1))], 1)
state.inp_tensor = torch.cat([
state.inp_tensor,
state.inp_tensor.new_zeros(1, inpmaxlen - state.inp_tensor.size(1))], 1)
ret = data[0].merge(data)
return ret
def dataloader(self, split:str=None, batsize:int=5):
if split is None: # return all splits
ret = {}
for split in self.data.keys():
ret[split] = self.dataloader(batsize=batsize, split=split)
return ret
else:
assert(split in self.data.keys())
dl = DataLoader(self.get_split(split), batch_size=batsize, shuffle=split=="train",
collate_fn=GeoQueryDataset.collate_fn)
return dl
def try_dataset():
tt = q.ticktock("dataset")
tt.tick("building dataset")
ds = GeoQueryDataset(sentence_encoder=SequenceEncoder(tokenizer=lambda x: x.split()))
train_dl = ds.dataloader("train", batsize=19)
test_dl = ds.dataloader("test", batsize=20)
examples = set()
examples_list = []
duplicates = []
for b in train_dl:
print(len(b))
for i in range(len(b)):
example = b.inp_strings[i] + " --> " + b.gold_strings[i]
if example in examples:
duplicates.append(example)
examples.add(example)
examples_list.append(example)
# print(example)
pass
print(f"duplicates within train: {len(duplicates)} from {len(examples_list)} total")
tt.tock("dataset built")
class DatasetSplitProxy(object):
def __init__(self, data, **kw):
super(DatasetSplitProxy, self).__init__(**kw)
self.data = data
def __getitem__(self, item):
return self.data[item].make_copy()
def __len__(self):
return len(self.data)
class NARTMModel(TransitionModel):
def __init__(self, tm, out, maxinplen=50, maxoutlen=50, numinpids:int=None, eval=tuple(), **kw):
super(NARTMModel, self).__init__(**kw)
self.tm = tm
self.out = out
self.maxinplen = maxinplen
self.maxoutlen = maxoutlen
self._metrics = eval
self._numinpids = numinpids
def forward(self, x:State):
inpseq = x.inp_tensor
position_ids = torch.arange(inpseq.size(1), dtype=torch.long, device=inpseq.device)[None, :].repeat(inpseq.size(0), 1)
inpseq = torch.cat([inpseq, torch.arange(self.maxoutlen, dtype=inpseq.dtype, device=inpseq.device)[None, :].repeat(inpseq.size(0), 1)+self._numinpids], 1)
position_ids_out = torch.arange(self.maxoutlen, dtype=torch.long, device=inpseq.device)[None, :].repeat(inpseq.size(0), 1) + self.maxinplen
position_ids = torch.cat([position_ids, position_ids_out], 1)
attention_mask = (inpseq != 0)
y = self.tm(inpseq, attention_mask=attention_mask, position_ids=position_ids)
outprobs = self.out(y[0])
outprobs = outprobs[:, self.maxinplen:]
_, predactions = outprobs.max(-1)
metrics = [metric(outprobs, predactions, x) for metric in self._metrics]
metrics = merge_metric_dicts(*metrics)
return metrics, x
def create_model(hdim=128, dropout=0., numlayers:int=1, numheads:int=4,
sentence_encoder:SequenceEncoder=None,
query_encoder:SequenceEncoder=None,
feedatt=False, maxtime=100):
inpemb = torch.nn.Embedding(sentence_encoder.vocab.number_of_ids()+maxtime, hdim, padding_idx=0)
inpemb = TokenEmb(inpemb, rare_token_ids=sentence_encoder.vocab.rare_ids, rare_id=1)
tm_config = TransformerConfig(vocab_size=inpemb.emb.num_embeddings, num_attention_heads=numheads,
num_hidden_layers=numlayers, hidden_size=hdim, intermediate_size=hdim*4,
hidden_dropout_prob=dropout)
tm = Transformer(tm_config)
tm.embeddings.word_embeddings = inpemb
decoder_out = BasicGenOutput(hdim, query_encoder.vocab)
model = NARTMModel(tm, decoder_out, maxinplen=maxtime, maxoutlen=maxtime, numinpids=sentence_encoder.vocab.number_of_ids())
return model
def do_rare_stats(ds:GeoQueryDataset):
# how many examples contain rare words, in input and output, in both train and test
def get_rare_portions(examples:List[State]):
total = 0
rare_in_question = 0
rare_in_query = 0
rare_in_both = 0
rare_in_either = 0
for example in examples:
total += 1
question_tokens = example.inp_tokens[0]
query_tokens = example.gold_tokens[0]
both = True
either = False
if len(set(question_tokens) & example.sentence_encoder.vocab.rare_tokens) > 0:
rare_in_question += 1
either = True
else:
both = False
if len(set(query_tokens) & example.query_encoder.vocab.rare_tokens) > 0:
either = True
rare_in_query += 1
else:
both = False
if both:
rare_in_both += 1
if either:
rare_in_either += 1
return rare_in_question / total, rare_in_query/total, rare_in_both/total, rare_in_either/total
print("RARE STATS:::")
print("training data:")
ris, riq, rib, rie = get_rare_portions(ds.data["train"])
print(f"\t In question: {ris} \n\t In query: {riq} \n\t In both: {rib} \n\t In either: {rie}")
print("test data:")
ris, riq, rib, rie = get_rare_portions(ds.data["test"])
print(f"\t In question: {ris} \n\t In query: {riq} \n\t In both: {rib} \n\t In either: {rie}")
return
def tensor2tree(x, D:Vocab=None):
# x: 1D int tensor
x = list(x.detach().cpu().numpy())
x = [D(xe) for xe in x]
x = [xe for xe in x if xe != D.padtoken]
# find first @END@ and cut off
parentheses_balance = 0
for i in range(len(x)):
if x[i] ==D.endtoken:
x = x[:i]
break
elif x[i] == "(" or x[i][-1] == "(":
parentheses_balance += 1
elif x[i] == ")":
parentheses_balance -= 1
else:
pass
# balance parentheses
while parentheses_balance > 0:
x.append(")")
parentheses_balance -= 1
i = len(x) - 1
while parentheses_balance < 0 and i > 0:
if x[i] == ")":
x.pop(i)
parentheses_balance += 1
i -= 1
# introduce comma's
i = 1
while i < len(x):
if x[i-1][-1] == "(":
pass
elif x[i] == ")":
pass
else:
x.insert(i, ",")
i += 1
i += 1
return " ".join(x)
def run(lr=0.001,
batsize=20,
epochs=100,
embdim=100,
encdim=164,
numlayers=4,
numheads=4,
dropout=.0,
wreg=1e-10,
cuda=False,
gpu=0,
minfreq=2,
gradnorm=3000.,
cosine_restarts=1.,
):
print(locals())
tt = q.ticktock("script")
device = torch.device("cpu") if not cuda else torch.device("cuda", gpu)
tt.tick("loading data")
stemmer = PorterStemmer()
tokenizer = lambda x: [stemmer.stem(xe) for xe in x.split()]
ds = GeoQueryDataset(sentence_encoder=SequenceEncoder(tokenizer=tokenizer), min_freq=minfreq)
train_dl = ds.dataloader("train", batsize=batsize)
test_dl = ds.dataloader("test", batsize=batsize)
tt.tock("data loaded")
do_rare_stats(ds)
# batch = next(iter(train_dl))
# print(batch)
# print("input graph")
# print(batch.batched_states)
model = create_model(hdim=encdim, dropout=dropout, numlayers=numlayers, numheads=numheads,
sentence_encoder=ds.sentence_encoder, query_encoder=ds.query_encoder)
model._metrics = [CELoss(ignore_index=0, mode="logprobs"),
SeqAccuracies()]
losses = make_array_of_metrics("loss", "elem_acc", "seq_acc")
vlosses = make_array_of_metrics("loss", "seq_acc")
# 4. define optim
optim = torch.optim.Adam(model.parameters(), lr=lr, weight_decay=wreg)
# optim = torch.optim.SGD(tfdecoder.parameters(), lr=lr, weight_decay=wreg)
# lr schedule
if cosine_restarts >= 0:
# t_max = epochs * len(train_dl)
t_max = epochs
print(f"Total number of updates: {t_max} ({epochs} * {len(train_dl)})")
lr_schedule = q.WarmupCosineWithHardRestartsSchedule(optim, 0, t_max, cycles=cosine_restarts)
reduce_lr = [lambda: lr_schedule.step()]
else:
reduce_lr = []
# 6. define training function (using partial)
clipgradnorm = lambda: torch.nn.utils.clip_grad_norm_(model.parameters(), gradnorm)
trainbatch = partial(q.train_batch, on_before_optim_step=[clipgradnorm])
trainepoch = partial(q.train_epoch, model=model, dataloader=train_dl, optim=optim, losses=losses,
_train_batch=trainbatch, device=device, on_end=reduce_lr)
# 7. define validation function (using partial)
validepoch = partial(q.test_epoch, model=model, dataloader=test_dl, losses=vlosses, device=device)
# validepoch = partial(q.test_epoch, model=tfdecoder, dataloader=test_dl, losses=vlosses, device=device)
# 7. run training
tt.tick("training")
q.run_training(run_train_epoch=trainepoch, run_valid_epoch=validepoch, max_epochs=epochs)
tt.tock("done training")
if __name__ == '__main__':
try_basic_query_tokenizer()
# try_build_grammar()
# try_dataset()
q.argprun(run) | 38.238579 | 162 | 0.627174 | 4,991 | 0.331276 | 0 | 0 | 745 | 0.049449 | 0 | 0 | 2,223 | 0.147551 |
37b79a42787ab39d8a353cbbf500ada9ea93a8d8 | 78 | py | Python | oe_site/app/admin.py | WsinGithub/ChemECar_web | aad7d8b98ac82830ddff0ec9adb99efef66e40f8 | [
"MIT"
] | null | null | null | oe_site/app/admin.py | WsinGithub/ChemECar_web | aad7d8b98ac82830ddff0ec9adb99efef66e40f8 | [
"MIT"
] | null | null | null | oe_site/app/admin.py | WsinGithub/ChemECar_web | aad7d8b98ac82830ddff0ec9adb99efef66e40f8 | [
"MIT"
] | 1 | 2021-07-30T04:31:43.000Z | 2021-07-30T04:31:43.000Z | from django.contrib import admin
# django框架默认文件
# Register your models here.
| 15.6 | 32 | 0.794872 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 54 | 0.6 |
37b7a0bdfbd839a755c886e06a53cba5251f183c | 4,244 | py | Python | Queen's Attack II.py | Swagatamkar/Python_HackerranK | f9d61b0461286f333b3624082d577943b906c311 | [
"MIT"
] | 3 | 2020-10-28T16:01:26.000Z | 2021-04-25T05:33:46.000Z | Queen's Attack II.py | Swagatamkar/Python_HackerranK | f9d61b0461286f333b3624082d577943b906c311 | [
"MIT"
] | null | null | null | Queen's Attack II.py | Swagatamkar/Python_HackerranK | f9d61b0461286f333b3624082d577943b906c311 | [
"MIT"
] | 2 | 2021-01-16T15:24:36.000Z | 2021-06-19T09:34:05.000Z | '''
Problem Statement: https://www.hackerrank.com/challenges/queens-attack-2/problem
@Coded by TSG, 2020
'''
import math
import os
import random
import re
import sys
# Complete the queensAttack function below.
def queensAttack(n, k, qr, qc , obs):
closet_row_obs_left = 1
closet_row_obs_right = n
closet_col_obs_up = n
closet_col_obs_down = 1
dai_obs_ur_qr = qr
dai_obs_ur_qc = qc
dai_obs_ul_qr = qr
dai_obs_ul_qc = qc
dai_obs_dl_qr = qr
dai_obs_dl_qc = qc
dai_obs_dr_qr = qr
dai_obs_dr_qc = qc
dai_ur = True
dai_ul = True
dai_dr = True
dai_dl = True
for x in obs:
if x[0] == qr:
t_obs_l = x[1]
if t_obs_l >= closet_row_obs_left and t_obs_l <= qc:
closet_row_obs_left = t_obs_l+1
t_obs_r = x[1]
if t_obs_r <= closet_row_obs_right and t_obs_r >= qc:
closet_row_obs_right = t_obs_r-1
if x[1] == qc:
t_obs_up = x[0]
if t_obs_up <= closet_col_obs_up and t_obs_up >= qr:
closet_col_obs_up = t_obs_up-1
t_obs_down = x[0]
if t_obs_down >= closet_col_obs_down and t_obs_down <= qr:
closet_col_obs_down = t_obs_down+1
#for dai_ur
if x[0] > qr and x[0] <= n and x[1] > qc and x[1] <= n:
if (x[0]-qr+1) == (x[1]-qc+1):
t_ur_qr = n-x[0]+qr+1
t_ur_qc = n-x[1]+qc+1
if dai_ur:
dai_obs_ur_qr = n-x[0]+qr+1
dai_obs_ur_qc = n-x[1]+qc+1
dai_ur = False
elif t_ur_qc > dai_obs_ur_qr and t_ur_qc > dai_obs_ur_qc:
dai_obs_ur_qr = t_ur_qr
dai_obs_ur_qc = t_ur_qc
#for dai_ul
if x[0] > qr and x[0] >= 1 and x[1] < qc and x[1] >= 1:
if (x[0]-qr+1) == -(x[1]-qc-1):
t_ul_qr = qr-x[0]
t_ul_qc = qc-x[1]
if dai_ul:
dai_obs_ul_qr = qr-x[0]
dai_obs_ul_qc = qc-x[1]
dai_ul = False
if t_ul_qr > dai_obs_ul_qr and t_ul_qc < dai_obs_ul_qc:
dai_obs_ul_qr = t_ul_qr
dai_obs_ul_qc = t_ul_qc
#for dai_dr
if x[0] < qr and x[0] >= 1 and x[1] > qc and x[1] <= n:
if -(x[0]-qr-1) == (x[1]-qc+1):
t_dai_qr = qr-x[0]
t_dai_qc = x[1]-qc
if dai_dr:
dai_obs_dr_qr = qr-x[0]
dai_obs_dr_qc = x[1]-qc
dai_dr = False
elif t_dai_qr < dai_obs_dr_qr and t_dai_qc < dai_obs_dr_qc:
dai_obs_dr_qr = qr-x[0]
dai_obs_dr_qc = x[1]-qc
#for dai_dl
if x[0] < qr and x[0] >= 1 and x[1] < qc and x[1] >= 1:
if -(x[0]-qr-1) == -(x[1]-qc-1):
t_dl_qr = qr-x[0]
t_dl_qc = qc-x[1]
if dai_dl:
dai_obs_dl_qr = qr-x[0]
dai_obs_dl_qc = qc-x[1]
dai_dl = False
if t_dl_qr < dai_obs_dl_qr and t_dl_qc < dai_obs_dl_qc:
dai_obs_dl_qr = qr-x[0]
dai_obs_dl_qc = qc-x[1]
row_l = qc - closet_row_obs_left
row_r = closet_row_obs_right - qc
col_u = closet_col_obs_up - qr
col_d = qr - closet_col_obs_down
dai_ur = (n - max(dai_obs_ur_qr,dai_obs_ur_qc))
dai_ul = min(n-dai_obs_ul_qr,dai_obs_ul_qc-1)
dai_dr = min(dai_obs_dr_qr-1,n-dai_obs_dr_qc)
dai_dl = min(dai_obs_dl_qr,dai_obs_dl_qc) - 1
#print row_l, row_r, col_u, col_d, dai_ur, dai_ul, dai_dr, dai_dl
return (row_l + row_r + col_u + col_d + dai_ur + dai_ul + dai_dr + dai_dl)
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
nk = input().split()
n = int(nk[0])
k = int(nk[1])
r_qC_q = input().split()
r_q = int(r_qC_q[0])
c_q = int(r_qC_q[1])
obstacles = []
for _ in range(k):
obstacles.append(list(map(int, input().rstrip().split())))
result = queensAttack(n, k, r_q, c_q, obstacles)
fptr.write(str(result) + '\n')
fptr.close()
| 33.15625 | 80 | 0.514138 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 290 | 0.068332 |
37b7e2ba7face03dd4882b67077f3424194c7773 | 271 | py | Python | full-problems/twiceCounter.py | vikas-t/DS-Algo | ea654d1cad5374c824c52da9d3815a9546eb43fa | [
"Apache-2.0"
] | null | null | null | full-problems/twiceCounter.py | vikas-t/DS-Algo | ea654d1cad5374c824c52da9d3815a9546eb43fa | [
"Apache-2.0"
] | null | null | null | full-problems/twiceCounter.py | vikas-t/DS-Algo | ea654d1cad5374c824c52da9d3815a9546eb43fa | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python3
# https://practice.geeksforgeeks.org/problems/twice-counter/0
def sol(words):
h = {}
res = 0
for word in words:
h[word] = h[word] + 1 if word in h else 1
for word in h:
if h[word] == 2:
res+=1
return res | 22.583333 | 61 | 0.549815 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 79 | 0.291513 |
37b95409e84e525a09791b57d6c976475b723763 | 1,810 | py | Python | tests/test_player.py | ssichynskyi/lotti-karotti-calc | 44eb39ce4c4bc8ddf4049d72268597c6d7411f84 | [
"Apache-2.0"
] | null | null | null | tests/test_player.py | ssichynskyi/lotti-karotti-calc | 44eb39ce4c4bc8ddf4049d72268597c6d7411f84 | [
"Apache-2.0"
] | null | null | null | tests/test_player.py | ssichynskyi/lotti-karotti-calc | 44eb39ce4c4bc8ddf4049d72268597c6d7411f84 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import unittest
from logic.player import Player
class TestPlayer(unittest.TestCase):
"""
Collection of unittests for Player class
"""
def setUp(self):
pass
def test_player_init(self):
player = Player(player_id=1, rabbits=2, active_rabbits=1, lost_rabbits=1)
self.assertEqual(len(player.lost_rabbits), 1)
self.assertEqual(player.lost_rabbits[0].player_id, 1)
self.assertEqual(player.lost_rabbits[0].number, 1)
print('lost rabbits are ok')
self.assertEqual(len(player.active_rabbits), 1)
self.assertEqual(player.active_rabbits[0].player_id, 1)
self.assertEqual(player.active_rabbits[0].number, 2)
self.assertEqual(len(player.ready_rabbits), 0)
def test_player_reset(self):
player = Player(player_id=1, rabbits=2, active_rabbits=1, lost_rabbits=1)
player.reset_condition()
self.assertEqual(len(player.lost_rabbits), 0, 'number of lost rabbits expected 0')
self.assertLessEqual(len(player.active_rabbits), 1, 'number of act. rabbits expected <= 1')
self.assertLessEqual(len(player.ready_rabbits), 2, 'number of ready rabbits expected <= 2')
self.assertEqual(len(player.ready_rabbits) + len(player.active_rabbits), 2)
def test_player_drop_rabbit(self):
player = Player(player_id=1, rabbits=2, active_rabbits=1, lost_rabbits=1)
player.drop_active_rabbit()
self.assertEqual(len(player.lost_rabbits), 2)
self.assertEqual(len(player.active_rabbits), 0)
self.assertEqual(len(player.ready_rabbits), 0)
def test_player_out_of_the_game(self):
player = Player(player_id=1, rabbits=0)
self.assertFalse(player.is_active)
self.assertEqual(player.get_active_rabbit(), None)
| 42.093023 | 99 | 0.69337 | 1,735 | 0.958564 | 0 | 0 | 0 | 0 | 0 | 0 | 212 | 0.117127 |
37bb4353b2134183f088de3f1a1110070cfbceae | 1,948 | py | Python | listcord/autoposter.py | Rishiraj0100/listcord.py | d24daeace1b38f54343f8a4c6c772f0df51e6682 | [
"MIT"
] | null | null | null | listcord/autoposter.py | Rishiraj0100/listcord.py | d24daeace1b38f54343f8a4c6c772f0df51e6682 | [
"MIT"
] | null | null | null | listcord/autoposter.py | Rishiraj0100/listcord.py | d24daeace1b38f54343f8a4c6c772f0df51e6682 | [
"MIT"
] | null | null | null | from typing import Callable, Mapping, TypedDict
import asyncio, aiohttp, discord
class Options(TypedDict):
interval: int
start: bool
class AutoPoster():
token: str
interval: int
bot: discord.Client
stopped: bool
_events: Mapping[str, Callable]
def __init__(self, token: str, bot: discord.Client, options: Options = {
'interval': 900000,
'start': True
}):
self.token = token
self.bot = bot
if 'interval' not in options: options['interval'] = 900000
else:
if not isinstance(options['interval'], int) or options['interval'] < 900000: raise TypeError('Invalid interval duration!')
if 'start' not in options: options['start'] = True
self.interval = options['interval']
self.stopped = not options['start']
def on_post(self) -> None:
return None
def on_error(self) -> None:
return None
self._events = {
'post': on_post,
'error': on_error
}
def on(self, event: str) -> Callable[[Callable], None]:
def add_listener(callback: Callable):
self._events[event] = callback
return add_listener
def emit(self, event: str, data):
if event in self._events: self._events[event](data)
async def init(self):
while not self.stopped:
async with aiohttp.ClientSession() as session:
async with session.post(f"https://listcord.gg/api/bot/{self.bot.user.id}/stats" , headers = {'Authorization' : self.token}, json = {'server_count': len(self.bot.guilds)}) as result:
if result.status != 200: self.emit('error', await result.json())
else: self.emit('post', await result.json())
await asyncio.sleep(self.interval)
def start(self):
self.stopped = False
def stop(self):
self.stopped = True
| 29.074627 | 197 | 0.589322 | 1,863 | 0.956366 | 0 | 0 | 0 | 0 | 508 | 0.26078 | 226 | 0.116016 |
37be218d785e6cf86d6783fafbd6ad3d59dda370 | 402 | py | Python | sols/190.py | Paul11100/LeetCode | 9896c579dff1812c0c76964db8d60603ee715e35 | [
"MIT"
] | null | null | null | sols/190.py | Paul11100/LeetCode | 9896c579dff1812c0c76964db8d60603ee715e35 | [
"MIT"
] | null | null | null | sols/190.py | Paul11100/LeetCode | 9896c579dff1812c0c76964db8d60603ee715e35 | [
"MIT"
] | null | null | null | class Solution:
# Reverse Format String (Accepted), O(1) time and space
def reverseBits(self, n: int) -> int:
s = '{:032b}'.format(n)[::-1]
return int(s, 2)
# Bit Manipulation (Top Voted), O(1) time and space
def reverseBits(self, n: int) -> int:
ans = 0
for i in range(32):
ans = (ans << 1) + (n & 1)
n >>= 1
return ans
| 28.714286 | 59 | 0.502488 | 401 | 0.997512 | 0 | 0 | 0 | 0 | 0 | 0 | 115 | 0.28607 |
37be7c7088612389d0a2fe0241112ba65382ad0d | 2,690 | py | Python | toolbox/database.py | AntoineOrsoni/running-to-the-moon | 8e2934b0aadf9c1f0f8dd9417f00e014be473b1a | [
"MIT"
] | null | null | null | toolbox/database.py | AntoineOrsoni/running-to-the-moon | 8e2934b0aadf9c1f0f8dd9417f00e014be473b1a | [
"MIT"
] | null | null | null | toolbox/database.py | AntoineOrsoni/running-to-the-moon | 8e2934b0aadf9c1f0f8dd9417f00e014be473b1a | [
"MIT"
] | null | null | null | import sqlite3
import contextlib
import json
from ast import literal_eval
# Execute a single statement
def execute_statement(command: str, filter: tuple):
with contextlib.closing(sqlite3.connect('sqlite/statistics.db')) as db_connection: # auto-closes
with db_connection: # auto-commits
with contextlib.closing(db_connection.cursor()) as db_cursor: # auto-closes
db_cursor.execute(command, filter)
# Fetch one output from the database
def fetch_one(command: str, filter: tuple) -> tuple:
with contextlib.closing(sqlite3.connect('sqlite/statistics.db')) as db_connection: # auto-closes
with db_connection: # auto-commits
with contextlib.closing(db_connection.cursor()) as db_cursor: # auto-closes
db_cursor.execute(command, filter)
result = db_cursor.fetchone()
return result
# Add an `output` in the `statistics` table
def add_output(output: str, type: str, week: int, current_time: str):
# Inserting a line in the `output` table
command = 'INSERT INTO statistics VALUES (?,?,?,?)'
filter = (output, type, week, current_time)
execute_statement(command, filter)
# Get the older timestamp for a given week
# All test from the same batch will have the same timestamp.
def get_oldest_timestamp(week: int):
command = ''' SELECT * FROM statistics
WHERE week = (?)
ORDER BY timestamp DESC
LIMIT 1'''
# filter is a tupple
filter = (str(week),)
# Sample output of db_cursor.fetchone()
#| output | type | week | timestamp |
#| ------ | ----------- | ---- | ---------- |
#| {json} | total | 1 | dd:mm:yyyy |
return fetch_one(command, filter)[3]
# Return the output for a specific test, oldest timestamp -- Only the `output` of the `type`
# Sample output
# {json}
def get_output_type(type: str, week: int):
timestamp = get_oldest_timestamp(week)
command = ''' SELECT * FROM statistics
WHERE type = (?) AND week = (?) AND timestamp = (?) '''
filter = (type, week, timestamp)
output = fetch_one(command, filter)[0]
# Verify we've been able to get an output which is not empty
# literal_eval converts a string with single quotes to dict
if output is not None:
return (literal_eval(output), timestamp)
else:
raise ValueError( f"output is empty. Check we have an output for :\n"
f" - week = {week},\n"
f" - type = {type},\n"
f" - timestamp = {timestamp}") | 36.849315 | 100 | 0.604461 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,254 | 0.466171 |
37bec73bf2cdf3c3cc14ccc4bc55a33295e038f0 | 457 | py | Python | Challenge 1/solution.py | Rishit-dagli/Google-FooBar | 85dc2dbb880bffa4568fa84657fc0d1bcd6b8974 | [
"Apache-2.0"
] | 5 | 2020-06-05T18:53:48.000Z | 2021-02-05T11:24:04.000Z | Challenge 1/solution.py | RahulSundar/Google-FooBar | e2673928ab3dde2e471c0f2d77b46e398ba0ec08 | [
"Apache-2.0"
] | null | null | null | Challenge 1/solution.py | RahulSundar/Google-FooBar | e2673928ab3dde2e471c0f2d77b46e398ba0ec08 | [
"Apache-2.0"
] | 2 | 2020-06-05T18:53:51.000Z | 2020-06-07T15:57:43.000Z | def count_frequency(a):
freq = dict()
for items in a:
freq[items] = a.count(items)
return freq
def solution(data, n):
frequency = count_frequency(data)
for key, value in frequency.items():
if value > n:
data = list(filter(lambda a: a != key, data))
return data
print(solution([1, 2, 3], 0))
print(solution([5, 10, 15, 10, 7], 1))
print(solution([1, 2, 2, 3, 3, 3, 4, 5, 5], 1))
| 26.882353 | 58 | 0.54267 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
37beddf03942364ffb2a1d3a16da68be3e1b7773 | 1,412 | py | Python | tdd_busca_animal/setup/tests.py | Jefferson472/apredendo-django | c01817d93493f588c1a3462c6e153cbbc3230508 | [
"MIT"
] | null | null | null | tdd_busca_animal/setup/tests.py | Jefferson472/apredendo-django | c01817d93493f588c1a3462c6e153cbbc3230508 | [
"MIT"
] | null | null | null | tdd_busca_animal/setup/tests.py | Jefferson472/apredendo-django | c01817d93493f588c1a3462c6e153cbbc3230508 | [
"MIT"
] | null | null | null | from django.test import LiveServerTestCase
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from animais.models import Animal
class AnimaisTestCase(LiveServerTestCase):
def setUp(self):
chrome_options = Options()
chrome_options.add_argument('--headless')
self.browser = webdriver.Chrome(executable_path='chromedriver.exe', chrome_options=chrome_options)
self.animal = Animal.objects.create(
nome_animal='Leão',
predador='Sim',
venenoso='Não',
domestico='Não'
)
def tearDown(self) -> None:
self.browser.quit()
def test_busca_animal(self):
"""Teste se um usuário pode buscar um animal pelo nome"""
home_page = self.browser.get(self.live_server_url)
brand_element = self.browser.find_element_by_css_selector('.navbar')
self.assertEqual('Busca Animal', brand_element.text)
buscar_animal_input = self.browser.find_element_by_css_selector('input#buscar-animal')
self.assertEqual(buscar_animal_input.get_attribute('placeholder'), "Exemplo: leão, urso...")
buscar_animal_input.send_keys('leão')
self.browser.find_element_by_css_selector('form button').click()
caracteristicas = self.browser.find_elements_by_css_selector('.result-description')
self.assertGreater(len(caracteristicas), 3)
| 38.162162 | 106 | 0.703258 | 1,253 | 0.883639 | 0 | 0 | 0 | 0 | 0 | 0 | 235 | 0.165726 |
37bfd7a583e97429e831b74add6932d378244f17 | 242 | py | Python | python/app/logconfig.py | brandond/obra-hacks | df451c6c6cd78b48f6e32bbd102a8e8a6bd77cb3 | [
"Apache-2.0"
] | null | null | null | python/app/logconfig.py | brandond/obra-hacks | df451c6c6cd78b48f6e32bbd102a8e8a6bd77cb3 | [
"Apache-2.0"
] | null | null | null | python/app/logconfig.py | brandond/obra-hacks | df451c6c6cd78b48f6e32bbd102a8e8a6bd77cb3 | [
"Apache-2.0"
] | null | null | null | import logging
import os
logging.basicConfig(level=os.environ.get('LOG_LEVEL', 'INFO'), format='[python %(name)s pid: %(process)d] %(levelname)s: %(message)s')
logger = logging.getLogger(__name__)
logger.info('{} imported'.format(__name__))
| 34.571429 | 134 | 0.731405 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 93 | 0.384298 |
37c2edca7854e044a40725c0eb4b1d2f0013ee57 | 824 | py | Python | xunit-autolabeler-v2/ast_parser/python/test_data/parser/nested_tags/nested_tags.py | GoogleCloudPlatform/repo-automation-playground | a4c8f104c246ede002f6c18fcebfc0496c8abb94 | [
"Apache-2.0"
] | 5 | 2019-07-11T17:35:44.000Z | 2021-10-09T01:49:04.000Z | xunit-autolabeler-v2/ast_parser/python/test_data/parser/nested_tags/nested_tags.py | GoogleCloudPlatform/repo-automation-playground | a4c8f104c246ede002f6c18fcebfc0496c8abb94 | [
"Apache-2.0"
] | 36 | 2019-08-27T18:20:21.000Z | 2022-01-12T21:29:00.000Z | xunit-autolabeler-v2/ast_parser/python/test_data/parser/nested_tags/nested_tags.py | GoogleCloudPlatform/repo-automation-playground | a4c8f104c246ede002f6c18fcebfc0496c8abb94 | [
"Apache-2.0"
] | 13 | 2019-10-30T19:39:51.000Z | 2021-04-04T09:31:52.000Z | # Copyright 2020 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START root_tag]
# [START nested_tag]
def nested_method():
return 'nested'
# [END nested_tag]
def root_method():
return 'root'
# [END root_tag]
# [START root_tag]
def another_root_method():
return 'another root'
# [END root_tag]
| 25.75 | 74 | 0.734223 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 696 | 0.84466 |
37c38ec1d818bae8f209aa11a156c96c83dbe54c | 3,470 | py | Python | external/vcm/tests/test_xarray_utils.py | jacnugent/fv3net | 84958651bdd17784fdab98f87ad0d65414c03368 | [
"MIT"
] | 5 | 2021-03-20T22:42:40.000Z | 2021-06-30T18:39:36.000Z | external/vcm/tests/test_xarray_utils.py | jacnugent/fv3net | 84958651bdd17784fdab98f87ad0d65414c03368 | [
"MIT"
] | 195 | 2021-09-16T05:47:18.000Z | 2022-03-31T22:03:15.000Z | external/vcm/tests/test_xarray_utils.py | ai2cm/fv3net | e62038aee0a97d6207e66baabd8938467838cf51 | [
"MIT"
] | 1 | 2021-06-16T22:04:24.000Z | 2021-06-16T22:04:24.000Z | import dask
import numpy as np
import pytest
import xarray as xr
from vcm.xarray_utils import (
_repeat_dataarray,
assert_identical_including_dtype,
isclose,
repeat,
)
@pytest.mark.parametrize("use_dask", [False, True])
@pytest.mark.parametrize(
("a", "b", "expected", "kwargs"),
[
([1.0], [1.0 + 1.0e-4], [False], {}),
([1.0], [1.0 + 1.0e-9], [True], {}),
([1.0], [1.0 + 1.0e-4], [True], {"rtol": 1.0e-3}),
([1.0], [1.0 + 1.0e-4], [True], {"atol": 1.0e-3}),
([np.nan], [np.nan], [False], {}),
([np.nan], [np.nan], [True], {"equal_nan": True}),
([1.0], [1.0], [True], {}),
([1.0], [1.0 + 1.0e-9], [False], {"rtol": 1.0e-10, "atol": 1.0e-10}),
],
)
def test_isclose(use_dask, a, b, expected, kwargs):
a = xr.DataArray(a)
b = xr.DataArray(b)
if use_dask:
a = a.chunk()
b = b.chunk()
expected = xr.DataArray(expected)
result = isclose(a, b, **kwargs)
assert_identical_including_dtype(result, expected)
@pytest.mark.parametrize(
"dim", ["x", "not_a_dim"], ids=["dim in DataArray", "dim absent from DataArray"]
)
@pytest.mark.parametrize("use_dask", [False, True])
def test__repeat_dataarray(dim, use_dask):
da = xr.DataArray([1, 2, 3], dims=["x"], coords=[[1, 2, 3]])
if use_dask:
da = da.chunk()
if dim == "x":
expected = xr.DataArray([1, 1, 2, 2, 3, 3], dims=["x"])
else:
expected = da.copy(deep=True)
result = _repeat_dataarray(da, 2, dim)
if use_dask:
assert isinstance(result.data, dask.array.Array)
assert_identical_including_dtype(result, expected)
@pytest.mark.parametrize("object_type", ["Dataset", "DataArray"])
@pytest.mark.parametrize("dim", ["x", "z"], ids=["dim present", "dim not present"])
@pytest.mark.parametrize(
("expected_foo_data", "repeats"),
[([1, 1, 2, 2, 3, 3], 2), ([1, 1, 2, 3], [2, 1, 1])],
ids=["integer repeats argument", "array repeats argument"],
)
def test_repeat(object_type, dim, expected_foo_data, repeats):
foo = xr.DataArray([1, 2, 3], dims=["x"], coords=[[1, 2, 3]], name="foo")
bar = xr.DataArray([1, 2, 3], dims=["y"], coords=[[1, 2, 3]], name="bar")
expected_foo = xr.DataArray(expected_foo_data, dims=["x"], name="foo")
expected_bar = bar.copy(deep=True)
if object_type == "Dataset":
obj = xr.merge([foo, bar])
expected = xr.merge([expected_foo, expected_bar])
else:
obj = foo
expected = expected_foo
if dim == "x":
result = repeat(obj, repeats, dim)
assert_identical_including_dtype(result, expected)
else:
with pytest.raises(ValueError, match="Cannot repeat over 'z'"):
repeat(obj, 2, dim)
@pytest.mark.parametrize("object_type", ["DataArray", "Dataset"])
def test_assert_identical_including_dtype(object_type):
a = xr.DataArray([1, 2], dims=["x"], coords=[[0, 1]], name="foo")
b = xr.DataArray([1, 2], dims=["x"], coords=[[0, 1]], name="foo")
c = a.astype("float64")
d = a.copy(deep=True)
d["x"] = d.x.astype("float64")
if object_type == "Dataset":
a = a.to_dataset()
b = b.to_dataset()
c = c.to_dataset()
d = d.to_dataset()
assert_identical_including_dtype(a, b)
with pytest.raises(AssertionError):
assert_identical_including_dtype(c, b)
with pytest.raises(AssertionError):
assert_identical_including_dtype(d, b)
| 30.982143 | 84 | 0.584438 | 0 | 0 | 0 | 0 | 3,273 | 0.943228 | 0 | 0 | 443 | 0.127666 |
37c68daf5d38e300ea366a9d35adffa55a0cf4c8 | 261 | py | Python | Python3/Exercises/DictionaryMethods/dictionary_methods.py | norbertosanchezdichi/TIL | 2e9719ddd288022f53b094a42679e849bdbcc625 | [
"MIT"
] | null | null | null | Python3/Exercises/DictionaryMethods/dictionary_methods.py | norbertosanchezdichi/TIL | 2e9719ddd288022f53b094a42679e849bdbcc625 | [
"MIT"
] | null | null | null | Python3/Exercises/DictionaryMethods/dictionary_methods.py | norbertosanchezdichi/TIL | 2e9719ddd288022f53b094a42679e849bdbcc625 | [
"MIT"
] | null | null | null | inventory = {'croissant': 19, 'bagel': 4, 'muffin': 8, 'cake': 1}
print(f'{inventory =}')
stock_list = inventory.copy()
print(f'{stock_list =}')
stock_list['hot cheetos'] = 25
stock_list.update({'cookie' : 18})
stock_list.pop('cake')
print(f'{stock_list =}') | 23.727273 | 65 | 0.655172 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 109 | 0.417625 |
37c6e4738bf1e11ca37459fa1acd1186d5fbaed3 | 3,337 | py | Python | libs/DuckDuckGoImages.py | Valken/FenrirScreenshotManager | 1a93784a5710098e37fbab9da458788e8fb1616d | [
"MIT"
] | 7 | 2021-07-30T10:58:04.000Z | 2022-02-20T16:38:50.000Z | libs/DuckDuckGoImages.py | Valken/FenrirScreenshotManager | 1a93784a5710098e37fbab9da458788e8fb1616d | [
"MIT"
] | 3 | 2021-08-03T05:45:27.000Z | 2021-11-17T22:30:33.000Z | libs/DuckDuckGoImages.py | Valken/FenrirScreenshotManager | 1a93784a5710098e37fbab9da458788e8fb1616d | [
"MIT"
] | 1 | 2021-08-04T11:44:07.000Z | 2021-08-04T11:44:07.000Z | #
# Direction modification of the original code by https://github.com/JorgePoblete/DuckDuckGoImages
# - Added use_name and target_resolution
#
import re
import io
import os
import json
import uuid
import shutil
import random
import requests
from PIL import Image
def download(query, folder='.', max_urls=None, thumbnails=False,
shuffle=False, remove_folder=False, use_name=None,
target_resolution=None):
if thumbnails:
urls = get_image_thumbnails_urls(query)
else:
urls = get_image_urls(query)
if shuffle:
random.shuffle(urls)
if max_urls is not None and len(urls) > max_urls:
urls = urls[:max_urls]
if remove_folder:
_remove_folder(folder)
_create_folder(folder)
return _download_urls(urls, folder, use_name, target_resolution)
def _download(url, folder, use_name, target_resolution):
try:
filename = str(uuid.uuid4().hex)
if use_name:
filename = use_name
while os.path.exists("{}/{}.jpg".format(folder, filename)):
filename = str(uuid.uuid4().hex)
response = requests.get(url, stream=True, timeout=1.0, allow_redirects=True)
with Image.open(io.BytesIO(response.content)) as im:
with open("{}/{}.jpg".format(folder, filename), 'wb') as out_file:
if target_resolution:
im = im.resize(target_resolution)
im.save(out_file)
return True
except:
return False
def _download_urls(urls, folder, use_name, target_resolution):
downloaded = 0
for index, url in enumerate(urls):
filename = use_name[index] if isinstance(use_name, list) == 1 else use_name
if _download(url, folder, filename, target_resolution):
downloaded += 1
return downloaded
def get_image_urls(query):
token = _fetch_token(query)
return _fetch_search_urls(query, token)
def get_image_thumbnails_urls(query):
token = _fetch_token(query)
return _fetch_search_urls(query, token, what="thumbnail")
def _fetch_token(query, URL="https://duckduckgo.com/"):
res = requests.post(URL, data={'q': query})
if res.status_code != 200:
return ""
match = re.search(r"vqd='([\d-]+)'", res.text, re.M|re.I)
if match is None:
return ""
return match.group(1)
def _fetch_search_urls(query, token, URL="https://duckduckgo.com/", what="image"):
query = {
"vqd": token,
"q": query,
"l": "wt-wt",
"o": "json",
"f": ",,,",
"p": "2"
}
urls = []
res = requests.get(URL+"i.js", params=query)
if res.status_code != 200:
return urls
data = json.loads(res.text)
for result in data["results"]:
urls.append(result[what])
while "next" in data:
res = requests.get(URL+data["next"], params=query)
if res.status_code != 200:
return urls
data = json.loads(res.text)
for result in data["results"]:
urls.append(result[what])
return urls
def _remove_folder(folder):
if os.path.exists(folder):
shutil.rmtree(folder, ignore_errors=True)
def _create_folder(folder):
if not os.path.exists(folder):
os.makedirs(folder)
| 28.521368 | 97 | 0.614924 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 337 | 0.100989 |
37c8a3b855ae67e8b55b1960188797f0c90fea95 | 592 | py | Python | Hackerrank/sherlockAndCost.py | nandani99/Hacktoberfest-1 | 83cdd9f6b5538fa266d0617d53409852111c89b5 | [
"MIT"
] | 255 | 2018-10-18T18:42:58.000Z | 2022-02-02T22:34:28.000Z | Hackerrank/sherlockAndCost.py | nandani99/Hacktoberfest-1 | 83cdd9f6b5538fa266d0617d53409852111c89b5 | [
"MIT"
] | 797 | 2018-10-16T05:52:30.000Z | 2022-02-08T23:02:51.000Z | Hackerrank/sherlockAndCost.py | nandani99/Hacktoberfest-1 | 83cdd9f6b5538fa266d0617d53409852111c89b5 | [
"MIT"
] | 1,115 | 2018-10-16T07:17:57.000Z | 2022-01-09T02:59:30.000Z | #!/bin/python3
import math
import os
import random
import re
import sys
# Complete the cost function below.
def cost(b):
n=len(b)
l, h = 0, 0
for i in range(1, n):
l, h = (max(l, h + b[i - 1] - 1),
max(l + b[i] - 1, h + abs(b[i] - b[i - 1])))
return max(l, h)
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
t = int(input())
for t_itr in range(t):
n = int(input())
B = list(map(int, input().rstrip().split()))
result = cost(B)
fptr.write(str(result) + '\n')
fptr.close()
| 17.411765 | 60 | 0.506757 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 79 | 0.133446 |
37c8e245a984cf38e441cc68ffe2db0691a55ea8 | 496 | py | Python | {{ cookiecutter.repo_name }}/app/config/config.py | ShilpaGopal/cookiecutter-ml-flask-serving | 9d1f56d0cda248fb2834b714390df7078ad24a22 | [
"MIT"
] | null | null | null | {{ cookiecutter.repo_name }}/app/config/config.py | ShilpaGopal/cookiecutter-ml-flask-serving | 9d1f56d0cda248fb2834b714390df7078ad24a22 | [
"MIT"
] | null | null | null | {{ cookiecutter.repo_name }}/app/config/config.py | ShilpaGopal/cookiecutter-ml-flask-serving | 9d1f56d0cda248fb2834b714390df7078ad24a22 | [
"MIT"
] | null | null | null | import os
import constants.constants as const
SVC_NAME = const.SVC_NAME
MODEL_CONFIG = {
'model_path': os.getenv('MODEL_PATH', 'data/model/crnn_model.h5')
}
LOGGER_CONFIG = {
'log_level': os.getenv('LOG_LEVEL', 'DEBUG'),
'log_handle': os.getenv('LOG_HANDLE', 'file'),
'log_path': os.getenv('LOG_PATH', 'log/'),
'log_file': os.getenv('LOG_FILE', SVC_NAME)
}
SERVICE_CONFIG = {
'port': os.getenv('PORT', 8000),
'workers': os.getenv('WORKERS', 1)
} | 24.8 | 70 | 0.631048 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 185 | 0.372984 |
37cb1c48f6f0fcf7f4afa1dcca776c8ef908dd41 | 9,222 | py | Python | archr/analyzers/datascout.py | trentn/archr | 04e7c30aa635ec77e2b82c0681a8974e9e89a239 | [
"BSD-2-Clause"
] | 58 | 2019-02-20T20:46:28.000Z | 2021-09-30T17:27:46.000Z | archr/analyzers/datascout.py | trentn/archr | 04e7c30aa635ec77e2b82c0681a8974e9e89a239 | [
"BSD-2-Clause"
] | 58 | 2019-02-20T20:22:51.000Z | 2022-01-28T21:19:01.000Z | archr/analyzers/datascout.py | trentn/archr | 04e7c30aa635ec77e2b82c0681a8974e9e89a239 | [
"BSD-2-Clause"
] | 16 | 2019-02-20T19:44:19.000Z | 2022-03-28T21:41:49.000Z | import logging
l = logging.getLogger("archr.analyzers.datascout")
from ..errors import ArchrError
from . import Analyzer
# Keystone engine 0.9.2 (incorrectly) defaults to radix 16. so we'd better off only using 0x-prefixed integers from now.
# See the related PR: https://github.com/keystone-engine/keystone/pull/382
# and the related issue: https://github.com/keystone-engine/keystone/issues/436
class DataScoutAnalyzer(Analyzer):
"""
Grabs the environment and auxiliary vector from the target.
"""
REQUIRED_IMPLANT = "shellphish_qemu"
def __init__(self, target, analyzer=None):
super().__init__(target)
self.env = None
self.argv = None
self.auxv = None
self.map = None
self.analyzer = analyzer
def _pushstr(self, s):
"""
push a string onto stack
"""
def _cutstr(bits, little=True):
w = bits // 8 # word size
byte_order = -1 if little else 1
n = ["0"] + [s[i:i + w].ljust(w, "\0")[::byte_order].encode('utf-8').hex() for i in range(0, len(s), w)][::-1]
return n
if self.target.target_arch == 'x86_64':
elems = _cutstr(64)
return "".join("mov rax, 0x%s; push rax; " % word for word in elems)
elif self.target.target_arch == 'i386':
elems = _cutstr(32)
return "".join("mov eax, 0x%s; push eax; " % word for word in elems)
elif self.target.target_arch in ('mips', 'mipsel'):
elems = _cutstr(32, little=self.target.target_arch != 'mips')
return "".join("li $t0, 0x%s; addi $sp, $sp, -4; sw $t0, 0($sp);" % word for word in elems)
elif self.target.target_arch == 'arm':
elems = _cutstr(32)
return "".join(f"movw r0, #0x{word} & 0xffff; movt r0, #0x{word} >> 16; push {{r0}};" for word in elems)
else:
raise NotImplementedError()
def read_file_shellcode(self, filename):
"""
shellcode to read the content of a file
"""
if self.target.target_arch == 'x86_64':
return (
self._pushstr(filename) +
"mov rdi, rsp; xor rsi, rsi; xor rdx, rdx; mov rax, 2; syscall;" + # fd = open(path, O_RDONLY, 0)
"mov r12, rax; sub rsp, 0x1000;" + # alloca 0x1000
"loop_head:" +
"xor rax, rax; mov rdi, r12; mov rsi, rsp; mov rdx, 0x1000; syscall;" + # n = read(fd, rsp, 0x1000)
"mov r13, rax;" + # save n
"mov rax, 1; mov rdi, 1; mov rsi, rsp; mov rdx, r13; syscall;" + # write(1, rsp, n)
"test r13, r13; jnz loop_head;" # loop untill we are done with the file
)
elif self.target.target_arch == 'i386':
return (
self._pushstr(filename) +
"mov ebx, esp; xor ecx, ecx; xor edx, edx; mov eax, 5; int 0x80;" + # n = open(path, O_RDONLY, 0)
"mov esi, eax; sub esp, 0x1000;" + # alloca 0x1000, fd = esi
"loop_head:" +
"mov eax, 3; mov ebx, esi; mov ecx, esp; mov edx, 0x1000; int 0x80;" + # n = read(fd, rsp, 0x1000)
"mov edi, eax;"+ # save n
"mov eax, 4; mov ebx, 1; mov ecx, esp; mov edx, edi; int 0x80;" + # write(1, rsp, n)
"test edi, edi; jnz loop_head;" # loop untill we are done with the file
)
elif self.target.target_arch in ('mips', 'mipsel'):
return (
self._pushstr(filename) +
"move $a0, $sp; xor $a1, $a1, $a1; xor $a2, $a2, $a2; li $v0, 0xfa5; syscall;" + # n = open(path, O_RDONLY, 0)
"move $s0, $v0; li $a0, 0x1000; sub $sp, $sp, $a0;" + # alloca 0x1000, fd = $s0
"loop_head:" +
"li $v0, 0xfa3; move $a0, $s0; move $a1, $sp; li $a2, 0x1000; syscall;" + # n = read(fd, rsp, 0x1000)
"move $s1, $v0;" + # save n
"li $v0, 0xfa4; li $a0, 1; move $a1, $sp; move $a2, $s1; syscall;" + # write(1, rsp, n)
"bne $s1, 0, loop_head;" # loop untill we are done with the file
)
elif self.target.target_arch == 'arm':
return (
self._pushstr(filename) +
"mov r0, sp; eor r1, r1; eor r2, r2; mov r7, #5; svc 0;" + # n = open(path, O_RDONLY, 0)
"mov r8, r0; sub sp, sp, 0x1000;" + # alloca 0x1000, fd = $r8
"loop_head:" +
"mov r7, #3; mov r0, r8; mov r1, sp; mov r2, 0x1000; svc 0;" + # n = read(fd, rsp, 0x1000)
"mov r9, r0;" + # save n to r9
"mov r7, #4; mov r0, 1; mov r1, sp; mov r2, r9; svc 0;" + # write(1, rsp, n)
"cmp r9, #0; bne loop_head;" # loop untill we are done with the file
)
else:
raise NotImplementedError("Unknown target architecure: \"%s\"!" % self.target.target_arch)
def echo_shellcode(self, what):
if self.target.target_arch == 'x86_64':
return (
self._pushstr(what) +
"mov rdi, 1; mov rsi, rsp; mov rdx, %#x; mov rax, 1; syscall;" % len(what) # n = write(1, rsp, 0x1000)
)
elif self.target.target_arch == 'i386':
return (
self._pushstr(what) +
"mov ebx, 1; mov ecx, esp; mov edx, %#x; mov eax, 4; int 0x80;" % len(what) # n = write(1, esp, 0x1000)
)
elif self.target.target_arch in ('mips', 'mipsel'):
return (
self._pushstr(what) +
"li $a0, 1; move $a1, $sp; li $a2, %#x; li $v0, 0xfa4; syscall;" % len(what) # n = write(1, sp, 0x1000)
)
elif self.target.target_arch == 'arm':
return (
self._pushstr(what) +
"mov r0, #1; mov r1, sp; mov r2, #%#x; mov r7, #4; svc 0;" % len(what) # n = write(1, sp, 0x1000)
)
else:
raise NotImplementedError()
def brk_shellcode(self):
if self.target.target_arch == 'x86_64':
return "mov rax, 0xc; xor rdi, rdi; syscall; mov rdi, rax; add rdi, 0x1000; mov rax, 0xc; syscall;"
elif self.target.target_arch == 'i386':
# n = brk 0
# brk n + 0x1000
return "mov eax, 0x2d; xor ebx, ebx; int 0x80; mov ebx, eax; add ebx, 0x1000; mov eax, 0x2d; int 0x80;"
elif self.target.target_arch in ('mips', 'mipsel'):
# n = brk 0
# brk n + 0x1000
return "xor $a0, $a0, $a0; li $v0, 0xfcd; syscall; add $a0, $v0, 0x1000; li $v0, 0xfcd; syscall;"
elif self.target.target_arch == 'arm':
# n = brk 0
# brk n + 0x1000
return "eor r0, r0; mov r7, #0x2d; svc 0; add r0, #0x1000; mov r7, #0x2d; svc 0;"
else:
raise NotImplementedError()
def exit_shellcode(self, exit_code=42):
if self.target.target_arch == 'x86_64':
return "mov rdi, %#x; mov rax, 0x3c; syscall;" % exit_code # exit(code)
elif self.target.target_arch == 'i386':
return "mov ebx, %#x; mov eax, 1; int 0x80;" % exit_code # exit(code)
elif self.target.target_arch in ('mips', 'mipsel'):
return "li $a0, %#x; li $v0, 0xfa1; syscall;" % exit_code # exit(code)
elif self.target.target_arch == 'arm':
return "mov r0, #%#x; mov r7, #1; svc 0;" % exit_code # exit(code)
else:
raise NotImplementedError()
def run_shellcode(self, shellcode, aslr=False, **kwargs):
exit_code = 42
# build the args
if self.analyzer:
args = self.analyzer._build_command()
else:
args = self.target.target_args
# run command within the shellcode context
with self.target.shellcode_context(args, asm_code=shellcode+self.exit_shellcode(exit_code=exit_code), aslr=aslr, **kwargs) as p:
output, stderr = p.communicate()
if p.returncode != exit_code:
raise ArchrError("DataScout failed to get info from the target process.\n"
"stdout: %s\nstderr: %s" % (output, stderr))
return output
def fire(self, aslr=False, **kwargs): #pylint:disable=arguments-differ
if self.target.target_os == 'cgc':
return [], [], b'', {}
if not self.argv:
output = self.run_shellcode(self.read_file_shellcode("/proc/self/cmdline"), aslr=aslr, **kwargs)
self.argv = output.split(b'\0')[:-1]
if not self.env:
output = self.run_shellcode(self.read_file_shellcode("/proc/self/environ"), aslr=aslr, **kwargs)
self.env = output.split(b'\0')[:-1]
if not self.auxv:
output = self.run_shellcode(self.read_file_shellcode("/proc/self/auxv"), aslr=aslr, **kwargs)
self.auxv = output
if not self.map:
output = self.run_shellcode(self.brk_shellcode()+self.read_file_shellcode("/proc/self/maps"), aslr=aslr, **kwargs)
self.map = parse_proc_maps(output)
return self.argv, self.env, self.auxv, self.map
from ..utils import parse_proc_maps
| 46.11 | 136 | 0.533941 | 8,782 | 0.952288 | 0 | 0 | 0 | 0 | 0 | 0 | 3,890 | 0.421817 |
805d2e10e957e70e749b95821942bd42c89d0b08 | 7,158 | py | Python | main.py | Parzival32/e-Dnevnik_API | 8f9ef8ef062a550dbcb21dbfe99b2274df2b4857 | [
"MIT"
] | null | null | null | main.py | Parzival32/e-Dnevnik_API | 8f9ef8ef062a550dbcb21dbfe99b2274df2b4857 | [
"MIT"
] | null | null | null | main.py | Parzival32/e-Dnevnik_API | 8f9ef8ef062a550dbcb21dbfe99b2274df2b4857 | [
"MIT"
] | null | null | null | from selenium import webdriver
from selenium.webdriver.chrome.options import Options
class api:
def __init__(self,username, passowrd, path):
self.username = username
self.password = passowrd
self.path = path
loginFailed = 'Login failed'
def auth(self):
chrome_options = Options()
chrome_options.add_argument("--headless")
driver = webdriver.Chrome(self.path, options=chrome_options)
driver.get("https://ocjene.skole.hr/login")
username = driver.find_element_by_name("username")
password = driver.find_element_by_name("password")
submit = driver.find_element_by_xpath('//input[@type="submit"]')
username.send_keys(self.username)
password.send_keys(self.password)
submit.click()
if driver.current_url == 'https://ocjene.skole.hr/course': driver.close(); return True
else: driver.close(); return self.loginFailed
def grade(self):
chrome_options = Options()
chrome_options.add_argument("--headless")
driver = webdriver.Chrome(self.path, options=chrome_options)
driver.get("https://ocjene.skole.hr/login")
username = driver.find_element_by_name("username")
password = driver.find_element_by_name("password")
submit = driver.find_element_by_xpath('//input[@type="submit"]')
username.send_keys(self.username)
password.send_keys(self.password)
submit.click()
if driver.current_url != 'https://ocjene.skole.hr/course': driver.close(); return self.loginFailed
grade_position = driver.find_element_by_xpath('//*[@id="class-administration-menu"]/div[1]/div/div[1]/span[1]')
grade = grade_position.text
driver.close()
return grade
def nameSurname(self):
info = []
chrome_options = Options()
chrome_options.add_argument("--headless")
driver = webdriver.Chrome(self.path, options=chrome_options)
driver.get("https://ocjene.skole.hr/login")
username = driver.find_element_by_name("username")
password = driver.find_element_by_name("password")
submit = driver.find_element_by_xpath('//input[@type="submit"]')
username.send_keys(self.username)
password.send_keys(self.password)
submit.click()
if driver.current_url != 'https://ocjene.skole.hr/course': driver.close(); return self.loginFailed
nameSurnamePosition = driver.find_element_by_xpath('//*[@id="header"]/div[2]/div/span')
nameSurname = nameSurnamePosition.text
name, surname = nameSurname.split()
info.append(name)
info.append(surname)
driver.close()
return info
def userNumber(self):
chrome_options = Options()
chrome_options.add_argument("--headless")
driver = webdriver.Chrome(self.path, options=chrome_options)
driver.get("https://ocjene.skole.hr/login")
username = driver.find_element_by_name("username")
password = driver.find_element_by_name("password")
submit = driver.find_element_by_xpath('//input[@type="submit"]')
username.send_keys(self.username)
password.send_keys(self.password)
submit.click()
if driver.current_url != 'https://ocjene.skole.hr/course': driver.close(); return self.loginFailed
driver.get("https://ocjene.skole.hr/personal_data")
userNumberPosition = driver.find_element_by_xpath('//*[@id="page-wrapper"]/div[4]/div/div[2]/span[2]')
userNumber = userNumberPosition.text
driver.close()
return userNumber
def getClassYear(self):
chrome_options = Options()
chrome_options.add_argument("--headless")
driver = webdriver.Chrome(self.path, options=chrome_options)
driver.get("https://ocjene.skole.hr/login")
username = driver.find_element_by_name("username")
password = driver.find_element_by_name("password")
submit = driver.find_element_by_xpath('//input[@type="submit"]')
username.send_keys(self.username)
password.send_keys(self.password)
submit.click()
if driver.current_url != 'https://ocjene.skole.hr/course': driver.close(); return self.loginFailed
year_position = driver.find_element_by_xpath('//*[@id="class-administration-menu"]/div[1]/div/div[1]/span[2]')
year = year_position.text
driver.close()
return year
def getSchool(self):
chrome_options = Options()
chrome_options.add_argument("--headless")
driver = webdriver.Chrome(self.path, options=chrome_options)
driver.get("https://ocjene.skole.hr/login")
username = driver.find_element_by_name("username")
password = driver.find_element_by_name("password")
submit = driver.find_element_by_xpath('//input[@type="submit"]')
username.send_keys(self.username)
password.send_keys(self.password)
submit.click()
if driver.current_url != 'https://ocjene.skole.hr/course': driver.close(); return self.loginFailed
school_position = driver.find_element_by_xpath('//*[@id="class-administration-menu"]/div[1]/div/div[2]/div[1]/span[1]')
school = school_position.text
driver.close()
return school
def userInfo(self):
information = []
chrome_options = Options()
chrome_options.add_argument("--headless")
driver = webdriver.Chrome(self.path, options=chrome_options)
driver.get("https://ocjene.skole.hr/login")
username = driver.find_element_by_name("username")
password = driver.find_element_by_name("password")
submit = driver.find_element_by_xpath('//input[@type="submit"]')
username.send_keys(self.username)
password.send_keys(self.password)
submit.click()
if driver.current_url != 'https://ocjene.skole.hr/course': driver.close(); return self.loginFailed
grade_position = driver.find_element_by_xpath('//*[@id="class-administration-menu"]/div[1]/div/div[1]/span[1]')
grade = grade_position.text
nameSurnamePosition = driver.find_element_by_xpath('//*[@id="header"]/div[2]/div/span')
nameSurname = nameSurnamePosition.text
name , surname = nameSurname.split()
year_position = driver.find_element_by_xpath('//*[@id="class-administration-menu"]/div[1]/div/div[1]/span[2]')
year = year_position.text
school_position = driver.find_element_by_xpath('//*[@id="class-administration-menu"]/div[1]/div/div[2]/div[1]/span[1]')
school = school_position.text
driver.get("https://ocjene.skole.hr/personal_data")
userNumberPosition = driver.find_element_by_xpath('//*[@id="page-wrapper"]/div[4]/div/div[2]/span[2]')
userNumber = userNumberPosition.text
information.append(name)
information.append(surname)
information.append(grade)
information.append(userNumber)
information.append(year)
information.append(school)
driver.close()
return information | 37.47644 | 127 | 0.659821 | 7,072 | 0.987985 | 0 | 0 | 0 | 0 | 0 | 0 | 1,502 | 0.209835 |
805ebe18518a4388e308601d10dffab1db81feb4 | 304 | py | Python | tests/file_test_util.py | lyksdu/langmodels | 5a49ee523e84513915f6fa747e03e5ef813aec17 | [
"MIT"
] | 9 | 2020-01-21T13:34:49.000Z | 2022-03-18T02:34:30.000Z | tests/file_test_util.py | lyksdu/langmodels | 5a49ee523e84513915f6fa747e03e5ef813aec17 | [
"MIT"
] | 6 | 2020-05-02T07:03:26.000Z | 2022-03-11T23:48:38.000Z | tests/file_test_util.py | lyksdu/langmodels | 5a49ee523e84513915f6fa747e03e5ef813aec17 | [
"MIT"
] | 3 | 2019-08-03T00:06:19.000Z | 2020-05-07T00:37:19.000Z | from typing import List
from unittest.mock import MagicMock
def file_mock_with_lines(lines: List[str]):
file_mock = MagicMock(spec=['__enter__', '__exit__'])
handle1 = file_mock.__enter__.return_value
handle1.__iter__.return_value = iter(map(lambda l: l + '\n', lines))
return file_mock | 33.777778 | 72 | 0.740132 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 25 | 0.082237 |
805f604d1603256f86a479dd81f3e5fcee43cae0 | 1,193 | py | Python | capreolus/index/tests/test_index.py | AlexWang000/capreolus | 00b0bf471ea0eb116ab973254ea61b0492405c54 | [
"Apache-2.0"
] | 1 | 2020-07-23T07:59:40.000Z | 2020-07-23T07:59:40.000Z | capreolus/index/tests/test_index.py | AlexWang000/capreolus | 00b0bf471ea0eb116ab973254ea61b0492405c54 | [
"Apache-2.0"
] | 1 | 2022-01-09T00:11:58.000Z | 2022-01-21T13:58:50.000Z | capreolus/index/tests/test_index.py | AlexWang000/capreolus | 00b0bf471ea0eb116ab973254ea61b0492405c54 | [
"Apache-2.0"
] | 3 | 2021-10-20T13:11:46.000Z | 2021-12-20T21:34:19.000Z | import pytest
from capreolus.collection import Collection, DummyCollection
from capreolus.index import Index
from capreolus.index import AnseriniIndex
from capreolus.tests.common_fixtures import tmpdir_as_cache, dummy_index
def test_anserini_create_index(tmpdir_as_cache):
index = AnseriniIndex({"_name": "anserini", "indexstops": False, "stemmer": "porter"})
index.modules["collection"] = DummyCollection({"_name": "dummy"})
assert not index.exists()
index.create_index()
assert index.exists()
def test_anserini_get_docs(tmpdir_as_cache, dummy_index):
docs = dummy_index.get_docs(["LA010189-0001"])
assert docs == ["Dummy Dummy Dummy Hello world, greetings from outer space!"]
docs = dummy_index.get_docs(["LA010189-0001", "LA010189-0002"])
assert docs == [
"Dummy Dummy Dummy Hello world, greetings from outer space!",
"Dummy LessDummy Hello world, greetings from outer space!",
]
def test_anserini_get_df(tmpdir_as_cache, dummy_index):
df = dummy_index.get_df("hello")
assert df == 2
def test_anserini_get_idf(tmpdir_as_cache, dummy_index):
idf = dummy_index.get_idf("hello")
assert idf == 0.1823215567939546
| 34.085714 | 90 | 0.738474 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 309 | 0.259011 |
80601f4e57df490107c52562edfb5325419b2d30 | 624 | py | Python | noheavenbot/utils/database_config.py | Molanito13/noheaven-bot | ad126d4601321ecabff9d1d214ce7d3f4e258c3e | [
"MIT"
] | 3 | 2018-10-13T14:05:24.000Z | 2018-12-25T21:40:21.000Z | noheavenbot/utils/database_config.py | Molanito13/noheaven-bot | ad126d4601321ecabff9d1d214ce7d3f4e258c3e | [
"MIT"
] | 2 | 2018-10-08T14:33:39.000Z | 2020-03-02T18:00:47.000Z | noheavenbot/utils/database_config.py | Molanito13/noheaven-bot | ad126d4601321ecabff9d1d214ce7d3f4e258c3e | [
"MIT"
] | 5 | 2018-10-08T14:18:58.000Z | 2020-11-01T17:55:51.000Z | import logging
try:
from asyncpg import create_pool
except ModuleNotFoundError:
logging.warning('Database not set up, install asyncpg')
from noheavenbot.utils.constants import EnvVariables
class Database:
@classmethod
async def connect(cls):
credentials = {'user': EnvVariables.get('DB_USER'),
'password': EnvVariables.get('DB_PASS'),
'database': EnvVariables.get('DB_DATABASE'),
'host': EnvVariables.get('DB_HOST'),
'port': EnvVariables.get('DB_PORT')}
return await create_pool(**credentials)
| 28.363636 | 67 | 0.621795 | 422 | 0.676282 | 0 | 0 | 401 | 0.642628 | 384 | 0.615385 | 125 | 0.200321 |
806099da6f4e0eb08385e71c81f065edd86b5eed | 472 | py | Python | myapi/serializers.py | zchuhui/django-rest-framework-example | 82ee470b581473a0e9f5772ede75a90f2dfe1c54 | [
"Apache-2.0"
] | null | null | null | myapi/serializers.py | zchuhui/django-rest-framework-example | 82ee470b581473a0e9f5772ede75a90f2dfe1c54 | [
"Apache-2.0"
] | null | null | null | myapi/serializers.py | zchuhui/django-rest-framework-example | 82ee470b581473a0e9f5772ede75a90f2dfe1c54 | [
"Apache-2.0"
] | null | null | null | from rest_framework import serializers
from .models import Hero,Company
class HeroSerializer(serializers.HyperlinkedModelSerializer):
'''
系列化 Hero model
'''
class Meta:
model = Hero
fields = ('id','name','alias')
class CompanySerializer(serializers.HyperlinkedModelSerializer):
'''
系列化 Company model
'''
class Meta:
model = Company
fields = ('id','name','address','city','official_website')
| 19.666667 | 66 | 0.633475 | 403 | 0.832645 | 0 | 0 | 0 | 0 | 0 | 0 | 145 | 0.299587 |
80611e229baae2429315f03fb9b32d57f7298e03 | 4,323 | py | Python | diplomacy/src/scripts/yamlizer.py | MaxStrange/nlp | 8cd417ab71c7321a25edd5519a133bb8f1a5d092 | [
"MIT"
] | 1 | 2019-04-22T16:45:32.000Z | 2019-04-22T16:45:32.000Z | diplomacy/src/scripts/yamlizer.py | afcarl/nlp | 8cd417ab71c7321a25edd5519a133bb8f1a5d092 | [
"MIT"
] | null | null | null | diplomacy/src/scripts/yamlizer.py | afcarl/nlp | 8cd417ab71c7321a25edd5519a133bb8f1a5d092 | [
"MIT"
] | 1 | 2018-06-17T14:54:02.000Z | 2018-06-17T14:54:02.000Z | """
This script takes a single message gathered from playdiplomacy.com as an input file and
outputs a YAML version of it so that it can be used as an input file into the program.
Usage:
python3 yamlizer.py msg.txt
(You can also feed it a list of files).
NOTE: This script is not designed to handle multiple recipient messages. So if you want to
run this script on a message that has a CC line like:
CC: ENGLAND, FRANCE, ITALY
You must alter this to be:
CC: ENGLAND
or
CC: FRANCE
or
CC: ITALY
But there is nothing stopping you from running this script on the file multiple times - one for each recipient.
Example:
[Contents of msg.txt]:
=====================================================
From: ITALY
Date: May 07 2017 15:04 (GMT-8) Spring 1903
CC: FRANCE
Re:Movement
Awesome.
About Bohemia... You should have Burgundy support Bohemia to Munich. Meanwhile, attack Ruhr with Belgium.
Unless you want me to contact Russia about what I should do with Bohemia. That would be fine too. Up to you.
======================================================
When fed through this script, this will output as msg.yml:
======================================================
year: 1903
season: Spring
a_to_b:
from_player: Italy
from_country: Italy
to_player: France
to_country: France
messages:
- >
"Re:Movement
Awesome.
About Bohemia... You should have Burgundy support Bohemia to Munich. Meanwhile, attack Ruhr with Belgium.
Unless you want me to contact Russia about what I should do with Bohemia. That would be fine too. Up to you."
======================================================
You are free to do what you want with the from_player and to_player fields. Those are not really used - they were included because I didn't know if they would be
necessary or not - turns out not.
You can also add more messages to the end of that and also add b_to_a messages as well. See examples.
"""
import os
import sys
class Message:
def __init__(self, txt):
from_line = next((line for line in txt if line.lower().startswith("from:")))
date_line = next((line for line in txt if line.lower().startswith("date:")))
to_line = next((line for line in txt if line.lower().startswith("cc:")))
to_line_index = [i for i, line in enumerate(txt) if line.lower().startswith("cc:")][0]
msg_body = os.linesep.join([line for line in txt[(to_line_index + 1):]])
self.from_ = from_line.split("From:")[1].strip()
self.date = date_line.split("Date:")[1].strip()
self.to = to_line.split("CC:")[1].strip()
self.message = msg_body.strip()
def __str__(self):
s = "FROM: " + str(self.from_) + os.linesep
s += "DATE: " + str(self.date) + os.linesep
s += "TO: " + str(self.to) + os.linesep
s += "Message: " + self.message
return s
def yamlize(txt):
"""
Converts the message text from playdiplomacy raw text to the YAML format that this program requires.
"""
message = Message(txt)
year = message.date.split(" ")[-1]
season = message.date.split(" ")[-2]
from_player = from_country = message.from_.lower().title()
to_player = to_country = message.to.lower().title()
msg = message.message.replace("\"", "\\\"")
msg = "\"" + msg + "\""
msg = msg.replace(os.linesep, os.linesep + " ")
yaml = "year: " + year + os.linesep
yaml += "season: " + season + os.linesep
yaml += "a_to_b:" + os.linesep
yaml += " from_player: " + from_player + os.linesep
yaml += " from_country: " + from_country + os.linesep
yaml += " to_player: " + to_player + os.linesep
yaml += " to_country: " + to_country + os.linesep
yaml += " messages:" + os.linesep
yaml += " - >" + os.linesep
yaml += msg
yaml += os.linesep
return yaml
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Need at least one file path.")
exit()
for file_path in sys.argv[1:]:
try:
with open(file_path) as f:
text = [line for line in f]
yaml = yamlize(text)
with open(os.path.splitext(file_path)[0] + ".yml", 'w') as f:
f.write(yaml)
except FileNotFoundError:
print("File", file_path, "does not exist.")
| 32.75 | 161 | 0.610687 | 907 | 0.209808 | 0 | 0 | 0 | 0 | 0 | 0 | 2,354 | 0.544529 |
80624d1280a0eb18b210f33d8b8631a74882c8c8 | 17,010 | py | Python | sdk/python/pulumi_alicloud/bastionhost/host_account_user_group_attachment.py | pulumi/pulumi-alicloud | 9c34d84b4588a7c885c6bec1f03b5016e5a41683 | [
"ECL-2.0",
"Apache-2.0"
] | 42 | 2019-03-18T06:34:37.000Z | 2022-03-24T07:08:57.000Z | sdk/python/pulumi_alicloud/bastionhost/host_account_user_group_attachment.py | pulumi/pulumi-alicloud | 9c34d84b4588a7c885c6bec1f03b5016e5a41683 | [
"ECL-2.0",
"Apache-2.0"
] | 152 | 2019-04-15T21:03:44.000Z | 2022-03-29T18:00:57.000Z | sdk/python/pulumi_alicloud/bastionhost/host_account_user_group_attachment.py | pulumi/pulumi-alicloud | 9c34d84b4588a7c885c6bec1f03b5016e5a41683 | [
"ECL-2.0",
"Apache-2.0"
] | 3 | 2020-08-26T17:30:07.000Z | 2021-07-05T01:37:45.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['HostAccountUserGroupAttachmentArgs', 'HostAccountUserGroupAttachment']
@pulumi.input_type
class HostAccountUserGroupAttachmentArgs:
def __init__(__self__, *,
host_account_ids: pulumi.Input[Sequence[pulumi.Input[str]]],
host_id: pulumi.Input[str],
instance_id: pulumi.Input[str],
user_group_id: pulumi.Input[str]):
"""
The set of arguments for constructing a HostAccountUserGroupAttachment resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] host_account_ids: A list IDs of the host account.
:param pulumi.Input[str] host_id: The ID of the host.
:param pulumi.Input[str] instance_id: The ID of the Bastionhost instance where you want to authorize the user group to manage the specified hosts and host accounts.
:param pulumi.Input[str] user_group_id: The ID of the user group that you want to authorize to manage the specified hosts and host accounts.
"""
pulumi.set(__self__, "host_account_ids", host_account_ids)
pulumi.set(__self__, "host_id", host_id)
pulumi.set(__self__, "instance_id", instance_id)
pulumi.set(__self__, "user_group_id", user_group_id)
@property
@pulumi.getter(name="hostAccountIds")
def host_account_ids(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
A list IDs of the host account.
"""
return pulumi.get(self, "host_account_ids")
@host_account_ids.setter
def host_account_ids(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "host_account_ids", value)
@property
@pulumi.getter(name="hostId")
def host_id(self) -> pulumi.Input[str]:
"""
The ID of the host.
"""
return pulumi.get(self, "host_id")
@host_id.setter
def host_id(self, value: pulumi.Input[str]):
pulumi.set(self, "host_id", value)
@property
@pulumi.getter(name="instanceId")
def instance_id(self) -> pulumi.Input[str]:
"""
The ID of the Bastionhost instance where you want to authorize the user group to manage the specified hosts and host accounts.
"""
return pulumi.get(self, "instance_id")
@instance_id.setter
def instance_id(self, value: pulumi.Input[str]):
pulumi.set(self, "instance_id", value)
@property
@pulumi.getter(name="userGroupId")
def user_group_id(self) -> pulumi.Input[str]:
"""
The ID of the user group that you want to authorize to manage the specified hosts and host accounts.
"""
return pulumi.get(self, "user_group_id")
@user_group_id.setter
def user_group_id(self, value: pulumi.Input[str]):
pulumi.set(self, "user_group_id", value)
@pulumi.input_type
class _HostAccountUserGroupAttachmentState:
def __init__(__self__, *,
host_account_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
host_id: Optional[pulumi.Input[str]] = None,
instance_id: Optional[pulumi.Input[str]] = None,
user_group_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering HostAccountUserGroupAttachment resources.
:param pulumi.Input[Sequence[pulumi.Input[str]]] host_account_ids: A list IDs of the host account.
:param pulumi.Input[str] host_id: The ID of the host.
:param pulumi.Input[str] instance_id: The ID of the Bastionhost instance where you want to authorize the user group to manage the specified hosts and host accounts.
:param pulumi.Input[str] user_group_id: The ID of the user group that you want to authorize to manage the specified hosts and host accounts.
"""
if host_account_ids is not None:
pulumi.set(__self__, "host_account_ids", host_account_ids)
if host_id is not None:
pulumi.set(__self__, "host_id", host_id)
if instance_id is not None:
pulumi.set(__self__, "instance_id", instance_id)
if user_group_id is not None:
pulumi.set(__self__, "user_group_id", user_group_id)
@property
@pulumi.getter(name="hostAccountIds")
def host_account_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list IDs of the host account.
"""
return pulumi.get(self, "host_account_ids")
@host_account_ids.setter
def host_account_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "host_account_ids", value)
@property
@pulumi.getter(name="hostId")
def host_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the host.
"""
return pulumi.get(self, "host_id")
@host_id.setter
def host_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "host_id", value)
@property
@pulumi.getter(name="instanceId")
def instance_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Bastionhost instance where you want to authorize the user group to manage the specified hosts and host accounts.
"""
return pulumi.get(self, "instance_id")
@instance_id.setter
def instance_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_id", value)
@property
@pulumi.getter(name="userGroupId")
def user_group_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the user group that you want to authorize to manage the specified hosts and host accounts.
"""
return pulumi.get(self, "user_group_id")
@user_group_id.setter
def user_group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_group_id", value)
class HostAccountUserGroupAttachment(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
host_account_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
host_id: Optional[pulumi.Input[str]] = None,
instance_id: Optional[pulumi.Input[str]] = None,
user_group_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides a Bastion Host Host Account Attachment resource to add list host accounts into one user group.
> **NOTE:** Available in v1.135.0+.
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
default_host = alicloud.bastionhost.Host("defaultHost",
instance_id="bastionhost-cn-tl32bh0no30",
host_name=var["name"],
active_address_type="Private",
host_private_address="172.16.0.10",
os_type="Linux",
source="Local")
default_host_account = []
for range in [{"value": i} for i in range(0, 3)]:
default_host_account.append(alicloud.bastionhost.HostAccount(f"defaultHostAccount-{range['value']}",
instance_id=default_host.instance_id,
host_account_name=f"example_value-{range['value']}",
host_id=default_host.host_id,
protocol_name="SSH",
password="YourPassword12345"))
default_user_group = alicloud.bastionhost.UserGroup("defaultUserGroup",
instance_id="bastionhost-cn-tl32bh0no30",
user_group_name=var["name"])
default_host_account_user_group_attachment = alicloud.bastionhost.HostAccountUserGroupAttachment("defaultHostAccountUserGroupAttachment",
instance_id=default_host.instance_id,
user_group_id=default_user_group.user_group_id,
host_id=default_host.host_id,
host_account_ids=[__item.host_account_id for __item in default_host_account])
```
## Import
Bastion Host Host Account can be imported using the id, e.g.
```sh
$ pulumi import alicloud:bastionhost/hostAccountUserGroupAttachment:HostAccountUserGroupAttachment example <instance_id>:<user_group_id>:<host_id>
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] host_account_ids: A list IDs of the host account.
:param pulumi.Input[str] host_id: The ID of the host.
:param pulumi.Input[str] instance_id: The ID of the Bastionhost instance where you want to authorize the user group to manage the specified hosts and host accounts.
:param pulumi.Input[str] user_group_id: The ID of the user group that you want to authorize to manage the specified hosts and host accounts.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: HostAccountUserGroupAttachmentArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a Bastion Host Host Account Attachment resource to add list host accounts into one user group.
> **NOTE:** Available in v1.135.0+.
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
default_host = alicloud.bastionhost.Host("defaultHost",
instance_id="bastionhost-cn-tl32bh0no30",
host_name=var["name"],
active_address_type="Private",
host_private_address="172.16.0.10",
os_type="Linux",
source="Local")
default_host_account = []
for range in [{"value": i} for i in range(0, 3)]:
default_host_account.append(alicloud.bastionhost.HostAccount(f"defaultHostAccount-{range['value']}",
instance_id=default_host.instance_id,
host_account_name=f"example_value-{range['value']}",
host_id=default_host.host_id,
protocol_name="SSH",
password="YourPassword12345"))
default_user_group = alicloud.bastionhost.UserGroup("defaultUserGroup",
instance_id="bastionhost-cn-tl32bh0no30",
user_group_name=var["name"])
default_host_account_user_group_attachment = alicloud.bastionhost.HostAccountUserGroupAttachment("defaultHostAccountUserGroupAttachment",
instance_id=default_host.instance_id,
user_group_id=default_user_group.user_group_id,
host_id=default_host.host_id,
host_account_ids=[__item.host_account_id for __item in default_host_account])
```
## Import
Bastion Host Host Account can be imported using the id, e.g.
```sh
$ pulumi import alicloud:bastionhost/hostAccountUserGroupAttachment:HostAccountUserGroupAttachment example <instance_id>:<user_group_id>:<host_id>
```
:param str resource_name: The name of the resource.
:param HostAccountUserGroupAttachmentArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(HostAccountUserGroupAttachmentArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
host_account_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
host_id: Optional[pulumi.Input[str]] = None,
instance_id: Optional[pulumi.Input[str]] = None,
user_group_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = HostAccountUserGroupAttachmentArgs.__new__(HostAccountUserGroupAttachmentArgs)
if host_account_ids is None and not opts.urn:
raise TypeError("Missing required property 'host_account_ids'")
__props__.__dict__["host_account_ids"] = host_account_ids
if host_id is None and not opts.urn:
raise TypeError("Missing required property 'host_id'")
__props__.__dict__["host_id"] = host_id
if instance_id is None and not opts.urn:
raise TypeError("Missing required property 'instance_id'")
__props__.__dict__["instance_id"] = instance_id
if user_group_id is None and not opts.urn:
raise TypeError("Missing required property 'user_group_id'")
__props__.__dict__["user_group_id"] = user_group_id
super(HostAccountUserGroupAttachment, __self__).__init__(
'alicloud:bastionhost/hostAccountUserGroupAttachment:HostAccountUserGroupAttachment',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
host_account_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
host_id: Optional[pulumi.Input[str]] = None,
instance_id: Optional[pulumi.Input[str]] = None,
user_group_id: Optional[pulumi.Input[str]] = None) -> 'HostAccountUserGroupAttachment':
"""
Get an existing HostAccountUserGroupAttachment resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] host_account_ids: A list IDs of the host account.
:param pulumi.Input[str] host_id: The ID of the host.
:param pulumi.Input[str] instance_id: The ID of the Bastionhost instance where you want to authorize the user group to manage the specified hosts and host accounts.
:param pulumi.Input[str] user_group_id: The ID of the user group that you want to authorize to manage the specified hosts and host accounts.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _HostAccountUserGroupAttachmentState.__new__(_HostAccountUserGroupAttachmentState)
__props__.__dict__["host_account_ids"] = host_account_ids
__props__.__dict__["host_id"] = host_id
__props__.__dict__["instance_id"] = instance_id
__props__.__dict__["user_group_id"] = user_group_id
return HostAccountUserGroupAttachment(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="hostAccountIds")
def host_account_ids(self) -> pulumi.Output[Sequence[str]]:
"""
A list IDs of the host account.
"""
return pulumi.get(self, "host_account_ids")
@property
@pulumi.getter(name="hostId")
def host_id(self) -> pulumi.Output[str]:
"""
The ID of the host.
"""
return pulumi.get(self, "host_id")
@property
@pulumi.getter(name="instanceId")
def instance_id(self) -> pulumi.Output[str]:
"""
The ID of the Bastionhost instance where you want to authorize the user group to manage the specified hosts and host accounts.
"""
return pulumi.get(self, "instance_id")
@property
@pulumi.getter(name="userGroupId")
def user_group_id(self) -> pulumi.Output[str]:
"""
The ID of the user group that you want to authorize to manage the specified hosts and host accounts.
"""
return pulumi.get(self, "user_group_id")
| 44.881266 | 172 | 0.65873 | 16,547 | 0.972781 | 0 | 0 | 14,014 | 0.823868 | 0 | 0 | 9,247 | 0.543621 |
8063dfe36de56cbc689a8b310df3ce5003370b6d | 432 | py | Python | str10.py | ABHISHEKSUBHASHSWAMI/String-Manipulation | e22efdbe76069e0280cc1acdeeabc4b663ac4f36 | [
"MIT"
] | null | null | null | str10.py | ABHISHEKSUBHASHSWAMI/String-Manipulation | e22efdbe76069e0280cc1acdeeabc4b663ac4f36 | [
"MIT"
] | null | null | null | str10.py | ABHISHEKSUBHASHSWAMI/String-Manipulation | e22efdbe76069e0280cc1acdeeabc4b663ac4f36 | [
"MIT"
] | null | null | null | #Program to change a given string to a new string where the first and last chars have been exchanged.
string=str(input("Enter a string :"))
first=string[0] #store first index element of string in variable
last=string[-1] #store last index element of string in variable
new=last+string[1:-1]+first #concatenate last the middle part and first part
print(new)
| 43.2 | 101 | 0.641204 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 262 | 0.606481 |
8063fd86f68ebf320baf9273c420614c93e98bab | 990 | py | Python | setup.py | MailboxValidator/mailboxvalidator-python | 740d64a4cd6a32bf7d65903c0d30164a8cfafcde | [
"MIT"
] | 9 | 2018-07-09T06:49:05.000Z | 2022-03-15T07:40:41.000Z | setup.py | MailboxValidator/mailboxvalidator-python | 740d64a4cd6a32bf7d65903c0d30164a8cfafcde | [
"MIT"
] | null | null | null | setup.py | MailboxValidator/mailboxvalidator-python | 740d64a4cd6a32bf7d65903c0d30164a8cfafcde | [
"MIT"
] | 1 | 2021-05-26T12:43:16.000Z | 2021-05-26T12:43:16.000Z | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="MailboxValidator",
version="1.2.0",
author="MailboxValidator.com",
author_email="support@mailboxvalidator.com",
description="Email verification module for Python using MailboxValidator API. It validates if the email is valid, from a free provider, contains high-risk keywords, whether it\'s a catch-all address and so much more.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/MailboxValidator/mailboxvalidator-python",
packages=setuptools.find_packages(),
tests_require=['pytest>=3.0.6'],
classifiers=(
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
),
) | 39.6 | 220 | 0.729293 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 640 | 0.646465 |
8067cbd79c8d52ed6e668b5f22fa1785482aed7a | 1,583 | py | Python | tests/graphql/tasks/test_task_queries.py | kids-first/kf-api-release-coordinator | 52831b2de2e0ce734d567289f3b10d720bce8a9e | [
"Apache-2.0"
] | 2 | 2018-09-12T01:39:05.000Z | 2020-09-23T20:09:05.000Z | tests/graphql/tasks/test_task_queries.py | kids-first/kf-api-release-coordinator | 52831b2de2e0ce734d567289f3b10d720bce8a9e | [
"Apache-2.0"
] | 89 | 2018-04-20T12:41:13.000Z | 2022-02-10T07:39:41.000Z | tests/graphql/tasks/test_task_queries.py | kids-first/kf-api-release-coordinator | 52831b2de2e0ce734d567289f3b10d720bce8a9e | [
"Apache-2.0"
] | 1 | 2018-08-31T18:52:42.000Z | 2018-08-31T18:52:42.000Z | import pytest
from coordinator.api.models import Study, Task
from coordinator.api.factories.release import ReleaseFactory
ALL_TASKS = """
query (
$state: String,
$createdBefore: Float,
$createdAfter: Float,
$orderBy:String
) {
allTasks(
state: $state,
createdBefore: $createdBefore,
createdAfter: $createdAfter,
orderBy: $orderBy
) {
edges {
node {
id
kfId
uuid
state
createdAt
}
}
}
}
"""
@pytest.mark.parametrize(
"user_type,expected",
[
("admin", lambda: 30),
("dev", lambda: 30),
("user", lambda: 20),
("anon", lambda: 10),
],
)
def test_list_all_tasks_permissions(db, test_client, user_type, expected):
"""
ADMIN - Can query all tasks
DEV - Can query all tasks
USER - Can query tasks from published releases, or releases that they
have a study in.
anonomous - Can query tasks from published releases
"""
study = Study(kf_id="SD_00000001")
study.save()
releases = ReleaseFactory.create_batch(10, state="staged")
releases = ReleaseFactory.create_batch(10, state="published")
releases = ReleaseFactory.create_batch(
10, state="staging", studies=[study]
)
client = test_client(user_type)
resp = client.post("/graphql", data={"query": ALL_TASKS})
# Test that the correct number of releases are returned
assert len(resp.json()["data"]["allTasks"]["edges"]) == expected()
| 25.126984 | 74 | 0.592546 | 0 | 0 | 0 | 0 | 1,008 | 0.636766 | 0 | 0 | 846 | 0.534428 |
80690cfff442374625eb395771e04a54901124cb | 3,469 | py | Python | GuessMyNumber.py | AkeBoss-tech/GuessingGame | 28fa96163f96ce0d907c5104975c2e706c483383 | [
"MIT"
] | null | null | null | GuessMyNumber.py | AkeBoss-tech/GuessingGame | 28fa96163f96ce0d907c5104975c2e706c483383 | [
"MIT"
] | 1 | 2020-11-09T17:30:45.000Z | 2020-11-12T17:42:56.000Z | GuessMyNumber.py | AkeBoss-tech/GuessingGame | 28fa96163f96ce0d907c5104975c2e706c483383 | [
"MIT"
] | null | null | null | import random
print("\tWelcome to 'Guess My Number!'")
# This stores the previous guesses and tells us whether they are right
class oldGuesses(object):
def __init__(self, low, high):
# This is the initialization data
self.guesses = []
self.low = low
self.high = high
# This function checks to see the guess isn't in the list and it adds it
def addNewGuess(self, guess):
if guess not in self.guesses:
self.guesses.append(guess)
def checkIfPossible(self, guess):
# This function checks if the guess is in the range or already in the list
isPossible = []
if self.low < guess < self.high:
isPossible.append(True)
else:
isPossible.append(False)
if guess in self.guesses:
isPossible.append(False)
else:
isPossible.append(True)
self.addNewGuess(guess)
if False in isPossible:
return False
else:
return True
def game():
# Ask the User the input number
lowest = int(input("\nWhat do you want the lowest (min) number to be? "))
highest = int(input("\nWhat do you want the highest (max) number to be? "))
# Telling the User the numbers again
print("Ok, I'm thinking of a number between ", lowest, " and", highest)
# Creating the new random number and initializing the game
the_number = random.randint(lowest, highest)
# I felt it was easier to have this class to check if the guess was possible
guesses = oldGuesses(lowest, highest)
tries = 0
x = True
while x:
guess = int(input("\nTake a Guess: "))
ifPossible = guesses.checkIfPossible(guess)
if ifPossible:
tries += 1
x = False
else:
print("Try Again.")
# The while loop until the user gets the right answer
while guess != the_number:
# Logic for the Game
if guess > the_number:
if highest > guess:
highest = guess
# Give the user their hint
print("\nLower..."
"\nHint:",lowest, "< x <", highest)
else:
if lowest < guess:
lowest = guess
# Give the user their hint
print("\nHigher..."
"\nHint:",lowest, "< x <", highest)
# Continues the loop
x = True
while x:
guess = int(input("\nTake a Guess: "))
ifPossible = guesses.checkIfPossible(guess)
if ifPossible:
tries += 1
x = False
else:
print("Try Again.")
# Congratulate User and tell them their score
print("\nYou Guessed It! The number was ", the_number)
# Change the message depending on the score
if tries < 5:
print("Nice Job! You did it in ", tries, " tries. Its a high score!")
else:
print("You did it in ", tries, " tries")
# Running the Game
game()
response = input("\nPlay Again (y/n)? ").lower()
# While Loop so the game keeps going whenever the player says y or Y
while response == "y":
game()
response = input("\nPlay Again (y/n)? ")
# Terminating the Program
print("\nThanks for playing!")
input("Enter to Exit") | 31.252252 | 83 | 0.545402 | 950 | 0.273854 | 0 | 0 | 0 | 0 | 0 | 0 | 1,309 | 0.377342 |
806a306e227f3481aa70716fefb9ae27d46b0347 | 2,249 | py | Python | videojuegos/arcanoid/ball.py | joseluisGA/videojuegos | a8795447fd40cd8fe032cadb4f2a1bd309a6e0de | [
"MIT"
] | null | null | null | videojuegos/arcanoid/ball.py | joseluisGA/videojuegos | a8795447fd40cd8fe032cadb4f2a1bd309a6e0de | [
"MIT"
] | null | null | null | videojuegos/arcanoid/ball.py | joseluisGA/videojuegos | a8795447fd40cd8fe032cadb4f2a1bd309a6e0de | [
"MIT"
] | null | null | null | import pygame
from settings import *
from pygame import Vector2
import random
from brick import Brick
class Ball(pygame.sprite.Sprite):
def __init__(self, x, y, groups, ball_image, bounce_fx):
pygame.sprite.Sprite.__init__(self, groups)
#self.image = pygame.Surface((BALL_WIDTH, BALL_HEIGHT))
#self.image.fill(BLUISHGREY)
self.image = ball_image
self.rect = self.image.get_rect()
self.rect.center = Vector2(x, y)
self.velocity = Vector2(0, 0)
self.bounce_fx = bounce_fx
self.asleep = True
def update(self, bricks, pad, ball_lost_callback):
if self.asleep:
if pad.velocity != 0:
self.velocity = Vector2(pad.velocity, -5).normalize()
self.asleep = False
return
position = self.rect.center + self.velocity.normalize() * BALL_SPEED
self.rect.centerx = position.x
self.collide_with('x', bricks)
self.rect.centery = position.y
self.collide_with('y', bricks)
if self.rect.centerx < 0:
self.rect.centerx = 0
self.velocity.x *= -1
self.bounce_fx.play()
if self.rect.centerx > WIDTH-1:
self.rect.centerx = WIDTH-1
self.velocity.x *= -1
self.bounce_fx.play()
if self.rect.centery < 0:
self.rect.centery = 0
self.velocity.y *= -1
self.bounce_fx.play()
if self.rect.centery > HEIGHT-1:
self.kill()
ball_lost_callback()
def collide_with(self, dir, groups):
hits = pygame.sprite.spritecollide(self, groups, False)
if len(hits) == 0:
return
if dir == 'x':
if self.velocity.x > 0:
self.rect.right = hits[0].rect.left
if self.velocity.x < 0:
self.rect.left = hits[0].rect.right
self.velocity.x *= -1
if dir == 'y':
if self.velocity.y > 0:
self.rect.bottom = hits[0].rect.top
if self.velocity.y < 0:
self.rect.top = hits[0].rect.bottom
self.velocity.y *= -1
if type(hits[0]) == Brick:
hits[0].breakIt()
| 30.808219 | 76 | 0.548244 | 2,143 | 0.952868 | 0 | 0 | 0 | 0 | 0 | 0 | 95 | 0.042241 |
806ab4cb70bbf55687e24d3de7904796e0abc17b | 1,839 | py | Python | config/tools/export.py | ghjinlei/one_mmorpg | 2e1b3393a0510020137f34f274ed419e7b82e22f | [
"MIT"
] | null | null | null | config/tools/export.py | ghjinlei/one_mmorpg | 2e1b3393a0510020137f34f274ed419e7b82e22f | [
"MIT"
] | null | null | null | config/tools/export.py | ghjinlei/one_mmorpg | 2e1b3393a0510020137f34f274ed419e7b82e22f | [
"MIT"
] | 1 | 2021-03-05T01:58:20.000Z | 2021-03-05T01:58:20.000Z | #!/usr/bin/env python3
#coding:utf-8
import os, sys
from sys import exit
from utils.excel import read_excel_data
import utils.path as utils_path
from utils.logger import Logger
os.chdir(utils_path.TOOLS_ROOT_PATH)
key_map = {
"cmd" : "string",
"lang" : "string",
"script" : "string",
"args" : "string",
"output_client" : "string",
"output_server" : "string",
}
cmd_info_map = read_excel_data("./export_cmds.xlsx", "main", key_map, "cmd")
logger = Logger(sys.stderr)
def save_autocode(filepath, data):
dirpath = os.path.dirname(filepath)
if not os.path.exists(dirpath):
os.makedirs(dirpath)
data = data.replace("\r\n", "\n")
with open(filepath, "w+") as f:
f.write(data)
f.flush()
f.close()
def gen_autocode_by_info(cmd_info):
if "output_client" in cmd_info:
pfile = os.popen("%s %s %s %s"%(cmd_info["lang"], cmd_info["script"], "-t c", cmd_info["args"]))
data = pfile.read()
pfile.close()
save_autocode(cmd_info["output_client"], data)
if "output_server" in cmd_info:
pfile = os.popen("%s %s %s %s"%(cmd_info["lang"], cmd_info["script"], "-t s", cmd_info["args"]))
data = pfile.read()
pfile.close()
save_autocode(cmd_info["output_server"], data)
return True
def gen_autocode(cmd):
cmd_info = cmd_info_map[cmd]
logger.info("start gen %s"%cmd)
ok = gen_autocode_by_info(cmd_info)
if ok:
logger.info("gen %s success"%cmd)
else:
logger.info("gen %s failed"%cmd)
def main():
if len(sys.argv) == 1:
while(True):
cmd = input("cmd:")
gen_autocode(cmd)
elif len(sys.argv) == 2:
cmd = sys.argv[1]
gen_autocode(cmd)
if __name__ == "__main__":
main()
| 25.191781 | 104 | 0.590538 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 382 | 0.207722 |
806addc0f2a0d7ca87049b8d8a87409dee80e047 | 9,481 | py | Python | src/compiler/plans.py | fritzo/pomagma | 224bb6adab3fc68e2d853e6365b4b86a8f7f468f | [
"Apache-2.0"
] | 10 | 2015-06-09T00:25:01.000Z | 2019-06-11T16:07:31.000Z | src/compiler/plans.py | fritzo/pomagma | 224bb6adab3fc68e2d853e6365b4b86a8f7f468f | [
"Apache-2.0"
] | 25 | 2015-03-23T23:16:01.000Z | 2017-08-29T03:35:59.000Z | src/compiler/plans.py | fritzo/pomagma | 224bb6adab3fc68e2d853e6365b4b86a8f7f468f | [
"Apache-2.0"
] | null | null | null | import math
from pomagma.compiler.expressions import Expression_1
from pomagma.compiler.util import log_sum_exp, memoize_make, set_with
def assert_in(element, set_):
assert element in set_, (element, set_)
def assert_not_in(element, set_):
assert element not in set_, (element, set_)
def assert_subset(subset, set_):
assert subset <= set_, (subset, set_)
OBJECT_COUNT = 1e4 # optimize for this many obs
LOGIC_COST = OBJECT_COUNT / 64.0 # perform logic on 64-bit words
LOG_OBJECT_COUNT = math.log(OBJECT_COUNT)
UNKNOWN = Expression_1('UNKNOWN')
def add_costs(costs):
return (log_sum_exp(*(LOG_OBJECT_COUNT * c for c in costs)) /
LOG_OBJECT_COUNT)
class Plan(object):
__slots__ = ['_args', '_cost', '_rank']
def __init__(self, *args):
self._args = args
self._cost = None
self._rank = None
@property
def cost(self):
if self._cost is None:
self._cost = math.log(self.op_count()) / LOG_OBJECT_COUNT
return self._cost
@property
def rank(self):
if self._rank is None:
s = repr(self)
self._rank = self.cost, len(s), s
return self._rank
def __lt__(self, other):
return self.rank < other.rank
def permute_symbols(self, perm):
return self.__class__.make(*(
a.permute_symbols(perm)
for a in self._args
))
@memoize_make
class Iter(Plan):
__slots__ = ['_repr', 'var', 'body', 'tests', 'lets', 'stack']
def __init__(self, var, body):
Plan.__init__(self, var, body)
assert var.is_var(), var
assert isinstance(body, Plan), body
self._repr = None
self.var = var
self.body = body
self.tests = []
self.lets = {}
self.stack = set()
self.optimize()
def add_test(self, test):
assert isinstance(test, Test), 'add_test arg is not a Test'
self.tests.append(test.expr)
self.stack.add(test)
def add_let(self, let):
assert isinstance(let, Let), 'add_let arg is not a Let'
assert let.var not in self.lets, 'add_let var is not in Iter.lets'
self.lets[let.var] = let.expr
self.stack.add(let)
def __repr__(self):
if self._repr is None:
# Optimized:
# tests = ['if {}'.format(t) for t in self.tests]
# lets = ['let {}'.format(l) for l in sorted(self.lets.iterkeys())]
# self._repr = 'for {0}: {1}'.format(
# ' '.join([str(self.var)] + tests + lets),
# self.body)
self._repr = 'for {}: {}'.format(self.var, self.body)
return self._repr
def validate(self, bound):
assert_not_in(self.var, bound)
bound = set_with(bound, self.var)
for test in self.tests:
assert_subset(test.vars, bound)
for var, expr in self.lets.iteritems():
assert_subset(expr.vars, bound)
assert_not_in(var, bound)
self.body.validate(bound)
def op_count(self, stack=None):
logic_cost = LOGIC_COST * (len(self.tests) + len(self.lets))
object_count = OBJECT_COUNT
for test_or_let in self.stack:
object_count *= test_or_let.prob()
let_cost = len(self.lets)
body_cost = self.body.op_count(stack=self.stack)
return logic_cost + object_count * (let_cost + body_cost)
def optimize(self):
node = self.body
new_lets = set()
while isinstance(node, Test) or isinstance(node, Let):
if isinstance(node, Let):
new_lets.add(node.var)
expr = node.expr
while expr.name == 'UNKNOWN':
expr = expr.args[0]
optimizable = (
self.var in expr.vars and
expr.vars.isdisjoint(new_lets) and
sum(1 for arg in expr.args if self.var == arg) == 1 and
sum(1 for arg in expr.args if self.var in arg.vars) == 1 and
(isinstance(node, Let) or expr.is_rel())
)
if optimizable:
if isinstance(node, Test):
self.add_test(node)
else:
self.add_let(node)
node = node.body
# TODO injective function inverse need not be iterated
@memoize_make
class IterInvInjective(Plan):
__slots__ = ['fun', 'value', 'var', 'body']
def __init__(self, fun, body):
Plan.__init__(self, fun, body)
assert fun.arity == 'InjectiveFunction'
self.fun = fun.name
self.value = fun.var
(self.var,) = fun.args
self.body = body
def __repr__(self):
return 'for {0} {1}: {2}'.format(self.fun, self.var, self.body)
def validate(self, bound):
assert_in(self.value, bound)
assert_not_in(self.var, bound)
self.body.validate(set_with(bound, self.var))
def op_count(self, stack=None):
return 4.0 + 0.5 * self.body.op_count() # amortized
@memoize_make
class IterInvBinary(Plan):
__slots__ = ['fun', 'value', 'var1', 'var2', 'body']
def __init__(self, fun, body):
Plan.__init__(self, fun, body)
assert fun.arity in ['BinaryFunction', 'SymmetricFunction']
self.fun = fun.name
self.value = fun.var
self.var1, self.var2 = fun.args
self.body = body
def __repr__(self):
return 'for {0} {1} {2}: {3}'.format(
self.fun, self.var1, self.var2, self.body)
def validate(self, bound):
assert_in(self.value, bound)
assert_not_in(self.var1, bound)
assert_not_in(self.var2, bound)
self.body.validate(set_with(bound, self.var1, self.var2))
def op_count(self, stack=None):
return 4.0 + 0.25 * OBJECT_COUNT * self.body.op_count() # amortized
@memoize_make
class IterInvBinaryRange(Plan):
__slots__ = ['fun', 'value', 'var1', 'var2', 'lhs_fixed', 'body']
def __init__(self, fun, fixed, body):
Plan.__init__(self, fun, fixed, body)
assert fun.arity in ['BinaryFunction', 'SymmetricFunction']
self.fun = fun.name
self.value = fun.var
self.var1, self.var2 = fun.args
assert self.var1 != self.var2
assert self.var1 == fixed or self.var2 == fixed
self.lhs_fixed = (fixed == self.var1)
self.body = body
def __repr__(self):
if self.lhs_fixed:
return 'for {0} ({1}) {2}: {3}'.format(
self.fun, self.var1, self.var2, self.body)
else:
return 'for {0} {1} ({2}): {3}'.format(
self.fun, self.var1, self.var2, self.body)
def validate(self, bound):
assert self.value in bound
if self.lhs_fixed:
assert_in(self.var1, bound)
assert_not_in(self.var2, bound)
self.body.validate(set_with(bound, self.var2))
else:
assert_in(self.var2, bound)
assert_not_in(self.var1, bound)
self.body.validate(set_with(bound, self.var1))
def op_count(self, stack=None):
return 4.0 + 0.5 * self.body.op_count() # amortized
@memoize_make
class Let(Plan):
__slots__ = ['var', 'expr', 'body']
def __init__(self, expr, body):
Plan.__init__(self, expr, body)
assert isinstance(body, Plan)
assert expr.is_fun()
self.var = expr.var
self.expr = expr
self.body = body
def __repr__(self):
return 'let {0}: {1}'.format(self.var, self.body)
def validate(self, bound):
assert_subset(self.expr.vars, bound)
assert_not_in(self.var, bound)
self.body.validate(set_with(bound, self.var))
__probs = {'NullaryFunction': 0.9}
def prob(self):
return self.__probs.get(self.expr.arity, 0.1)
def op_count(self, stack=None):
if stack and self in stack:
return self.body.op_count(stack=stack)
else:
return 1.0 + self.prob() * self.body.op_count(stack=stack)
@memoize_make
class Test(Plan):
__slots__ = ['expr', 'body']
def __init__(self, expr, body):
Plan.__init__(self, expr, body)
assert not expr.is_var()
assert isinstance(body, Plan)
self.expr = expr
self.body = body
def __repr__(self):
return 'if {0}: {1}'.format(self.expr, self.body)
def validate(self, bound):
assert_subset(self.expr.vars, bound)
self.body.validate(bound)
__probs = {'NLESS': 0.9}
def prob(self):
return self.__probs.get(self.expr.name, 0.1)
def op_count(self, stack=None):
if stack and self in stack:
return self.body.op_count(stack=stack)
else:
return 1.0 + self.prob() * self.body.op_count(stack=stack)
@memoize_make
class Ensure(Plan):
__slots__ = ['expr']
def __init__(self, expr):
Plan.__init__(self, expr)
assert expr.args, ('expr is not compound', expr)
self.expr = expr
def __repr__(self):
return 'ensure {0}'.format(self.expr)
def validate(self, bound):
assert_subset(self.expr.vars, bound)
def op_count(self, stack=None):
fun_count = 0
if self.expr.name == 'EQUATION':
for arg in self.expr.args:
if arg.is_fun():
fun_count += 1
return [1.0, 1.0 + 0.5 * 1.0, 2.0 + 0.75 * 1.0][fun_count]
| 30.003165 | 79 | 0.577893 | 8,582 | 0.905179 | 0 | 0 | 8,272 | 0.872482 | 0 | 0 | 951 | 0.100306 |
806f98a5c21a064cfb653296145b797c6a50d946 | 543 | py | Python | ossim/synchro/urls.py | devil-r/Os-simulator | afb99d57d16ebb598a66fcd967f1d67247b4efb4 | [
"MIT"
] | null | null | null | ossim/synchro/urls.py | devil-r/Os-simulator | afb99d57d16ebb598a66fcd967f1d67247b4efb4 | [
"MIT"
] | null | null | null | ossim/synchro/urls.py | devil-r/Os-simulator | afb99d57d16ebb598a66fcd967f1d67247b4efb4 | [
"MIT"
] | null | null | null | from django.conf.urls import url,include
from django.contrib import admin
from . import views
app_name = 'synchro'
urlpatterns =[
url(r'^semaphores/$', views.semaphores, name='semaphores'),
url(r'^socket/$', views.socket, name='socket'),
url(r'^deadlocks/$', views.deadlocks, name='deadlocks'),
url(r'^semaphores/demo/(?P<pk>[0-9]+)/$', views.sem_demo, name='sem_demo'),
url(r'^socket/demo/(?P<pk>[0-9]+)/$', views.socket_demo, name='socket_demo'),
url(r'^bankalgo/$', views.bankalgo, name='bankalgo'),
]
| 38.785714 | 82 | 0.64825 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 198 | 0.364641 |
806fcfc95b660907356bd62342b9bc9e14255a07 | 237 | py | Python | pySpectralFPK/__init__.py | alanmatzumiya/Paper | d65ff68475eb72324594701d06754d0d005f6a86 | [
"MIT"
] | 2 | 2019-03-19T23:55:45.000Z | 2020-06-03T19:10:51.000Z | pySpectralFPK/__init__.py | alanmatzumiya/Paper | d65ff68475eb72324594701d06754d0d005f6a86 | [
"MIT"
] | null | null | null | pySpectralFPK/__init__.py | alanmatzumiya/Paper | d65ff68475eb72324594701d06754d0d005f6a86 | [
"MIT"
] | null | null | null | """
Solvers define how a pde is solved, i.e., advanced in time.
.. autosummary::
.. codeauthor:: Alan Matzumiya <alan.matzumiya@gmail.com>
"""
from typing import List
from .setup_solver import FPK_solver
__all__ = [
"FPK_solver"
] | 18.230769 | 59 | 0.708861 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 154 | 0.649789 |
8070ca9d70fe00ca55a340a59c8f92988b806d9b | 1,427 | py | Python | style/predict/servable/base.py | imagination-ai/kerem-side-projects-monorepo | 3d9da9d57f305ac2d6a03bab3787acfbee7269ee | [
"MIT"
] | null | null | null | style/predict/servable/base.py | imagination-ai/kerem-side-projects-monorepo | 3d9da9d57f305ac2d6a03bab3787acfbee7269ee | [
"MIT"
] | 2 | 2022-01-20T15:46:39.000Z | 2022-02-16T20:51:47.000Z | style/predict/servable/base.py | imagination-ai/kerem-side-projects-monorepo | 3d9da9d57f305ac2d6a03bab3787acfbee7269ee | [
"MIT"
] | null | null | null | from abc import ABC, abstractmethod
import dill
class BaseServable(ABC):
SCHEMA_SLUG = "schema.json"
MODEL_TYPE: str
MODEL_VARIANT: str
def __init__(self, model):
self.model = model
@abstractmethod
def export(self, path):
raise NotImplementedError
@abstractmethod
def run_inference(self, *args, **kwargs):
raise NotImplementedError
@classmethod
@abstractmethod
def load(cls, path):
raise NotImplementedError
class SklearnBasedClassifierServable(BaseServable):
MODEL_VARIANT = "sklearn-classification"
def export(self, path):
with open(path, "wb") as f:
dill.dump(self.model, f)
def run_inference(self, texts):
prediction = self.model.predict([texts])
print(prediction)
return prediction[0]
def run_inference_multiclass(self, texts):
prob = self.model.predict_proba([texts]).tolist()[0]
labels = self.model.classes_
output = sorted(zip(labels, prob), reverse=True, key=lambda e: e[1])
return output
@classmethod
def load(cls, path):
return cls(model=dill.load(open(path, "rb")))
class MockServable(BaseServable):
def run_inference(self, texts):
return {"Bon jovi": 0.15, "Kerem": 0.55, "Osman": 0.30}
@classmethod
def load(cls, path):
return cls(model=None)
def export(self, path):
pass
| 23.393443 | 76 | 0.641205 | 1,369 | 0.959355 | 0 | 0 | 422 | 0.295725 | 0 | 0 | 69 | 0.048353 |
80723d94b3de6b981e622070fc675bae6d441510 | 4,108 | py | Python | bot/Bot.py | Facco98/TwitchBotPy | 27b4fb0f76542db68621b52ada316bf00fbf68d2 | [
"MIT"
] | 1 | 2020-05-14T02:14:10.000Z | 2020-05-14T02:14:10.000Z | bot/Bot.py | Facco98/TwitchBotPy | 27b4fb0f76542db68621b52ada316bf00fbf68d2 | [
"MIT"
] | null | null | null | bot/Bot.py | Facco98/TwitchBotPy | 27b4fb0f76542db68621b52ada316bf00fbf68d2 | [
"MIT"
] | null | null | null | import websocket
from threading import Thread
from bot.Command import Command
import time
class Bot:
def __init__(self, username, password, host):
self.__commands = dict()
self._username = username
self._password = password
self.__host = host
self.__threadStarted = False
self.__thread = None
def connect(self):
self.__websocket = websocket.create_connection(self.__host)
def join(self, channel):
self.__websocket.send("JOIN #" + channel)
def send(self, msg):
self.__websocket.send(msg)
def send_message_to(self, channel, message=""):
self.__websocket.send("PRIVMSG #" + channel + " :" + message)
def start_listening(self, callback):
try:
if not self.__threadStarted:
self.__thread = Thread(target=self.__listen_function__, args=(callback,))
self.__thread.deamon = True
self.__threadStarted = True
self.__thread.start()
except Exception:
pass
def __listen_function__(self, callback):
try:
while self.__threadStarted:
received = self.__websocket.recv()
callback(received)
except Exception:
pass
def stop_listening(self):
self.__threadStarted = False
self.__thread = None
def disconnect(self):
if self.__threadStarted:
self.stop_listening()
self.__websocket.close()
def add_command(self, command):
if isinstance(command, Command):
self.__commands[command.name()] = command
else:
raise ValueError("\"command\" must be an instance of class Command")
def responds_to(self, cmd):
return cmd in self.__commands
def execute_command(self, cmd, params):
if self.responds_to(cmd):
self.__commands[cmd].execute(params)
class TwitchBot(Bot):
def __init__(self, username, password):
super().__init__(username, password, "ws://irc-ws.chat.twitch.tv:80")
self.on_message = self.__default_on_message
self.on_command = self.__default_on_command
self.unknown_command = self.__defualt_unknown_command
def connect(self, channels=[]):
super().connect()
super().send("PASS " + self._password)
super().send("NICK " + self._username)
for channel in channels:
self.join(channel)
def start_listening(self, callback=None ):
if callback is None:
super().start_listening(self.dispatch)
else:
super().start_listening(callback)
def dispatch(self, msg):
if msg == "PING :tmi.twitch.tv":
super().send("PONG :tmi.twitch.tv")
else:
try:
finenome = msg.index("!")
who = msg[1:finenome]
inizioCanale = msg.index("#")
fineCanale = msg.index(" :")
canale = msg[inizioCanale+1:fineCanale]
content = msg[fineCanale+2: ]
if content.startswith("!"):
cmd, other = self.__parse_command(content+" ")
self.on_command(cmd.strip(), other.strip(), who, canale)
else:
self.on_message(content.strip(), who, canale)
except Exception:
pass
def __parse_command(self, str):
try:
finecomando = str.find(" ")
cmd = str[1:finecomando]
content = str[finecomando+1:]
return cmd, content
except Exception as ex:
pass
def __default_on_message(self, msg, who, channel):
pass
def __default_on_command(self, cmd, other, who, channel):
if super().responds_to(cmd):
super().execute_command(cmd, [other, who, channel])
else:
self.unknown_command(cmd, who, channel)
def __defualt_unknown_command(self, cmd, who, channel):
super().send_message_to("#"+channel, "@" + who + ", unknown command")
| 29.342857 | 89 | 0.579357 | 4,010 | 0.976144 | 0 | 0 | 0 | 0 | 0 | 0 | 206 | 0.050146 |
8072fc6a4d2ad15263a6a7c0a5c6bd8b4fe8169a | 790 | py | Python | test_python_toolbox/test_cheat_hashing.py | hboshnak/python_toolbox | cb9ef64b48f1d03275484d707dc5079b6701ad0c | [
"MIT"
] | 119 | 2015-02-05T17:59:47.000Z | 2022-02-21T22:43:40.000Z | test_python_toolbox/test_cheat_hashing.py | hboshnak/python_toolbox | cb9ef64b48f1d03275484d707dc5079b6701ad0c | [
"MIT"
] | 4 | 2019-04-24T14:01:14.000Z | 2020-05-21T12:03:29.000Z | test_python_toolbox/test_cheat_hashing.py | hboshnak/python_toolbox | cb9ef64b48f1d03275484d707dc5079b6701ad0c | [
"MIT"
] | 14 | 2015-03-30T06:30:42.000Z | 2021-12-24T23:45:11.000Z | # Copyright 2009-2017 Ram Rachum.
# This program is distributed under the MIT license.
'''Testing module for `python_toolbox.abc_tools.AbstractStaticMethod`.'''
import copy
from python_toolbox.cheat_hashing import cheat_hash
def test_cheat_hash():
'''Test `cheat_hash` on various objects.'''
things = [
1,
7,
4.5,
[1, 2, 3.4],
(1, 2, 3.4),
{1: 2, 3: 4.5},
{1, 2, 3.4},
[1, [1, 2], 3],
[1, {frozenset((1, 2)): 'meow'}, 3],
sum,
None,
(None, {None: None})
]
things_copy = copy.deepcopy(things)
for thing, thing_copy in zip(things, things_copy):
assert cheat_hash(thing) == cheat_hash(thing) == \
cheat_hash(thing_copy) == cheat_hash(thing_copy)
| 23.235294 | 73 | 0.562025 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 207 | 0.262025 |
807387b7956f8f032bd74789931a717ad7a739aa | 2,374 | py | Python | TikiBot/dispensing_screen.py | schuetzi99/TikiBot | cf940571af897aec048b1926b0a0b6d00997b032 | [
"BSD-2-Clause"
] | null | null | null | TikiBot/dispensing_screen.py | schuetzi99/TikiBot | cf940571af897aec048b1926b0a0b6d00997b032 | [
"BSD-2-Clause"
] | null | null | null | TikiBot/dispensing_screen.py | schuetzi99/TikiBot | cf940571af897aec048b1926b0a0b6d00997b032 | [
"BSD-2-Clause"
] | null | null | null | try: # Python 2
from Tkinter import * # noqa
except ImportError: # Python 3
from tkinter import * # noqa
import time
from rectbutton import RectButton
from serial_connection import SerialConnection
UPDATE_MS = 20
DISPLAY_MS = 125
class DispensingScreen(Frame):
def __init__(self, master, recipe, amount):
super(DispensingScreen, self).__init__(master)
self.master = master
self.ser = SerialConnection()
self.recipe = recipe
self.last_disp = 0.0
self.desc = Text(self, relief=FLAT, wrap=NONE, state=DISABLED)
backbtn = RectButton(self, text="Abbruch", command=self.handle_button_back)
self.bgcolor = master.bgcolor
self.configure(bg=self.bgcolor)
self.desc.grid(column=0, row=0, sticky=N+E+W+S)
backbtn.grid(column=0, row=1, padx=10, pady=10, sticky=E+W+S)
self.grid_columnconfigure(0, weight=1)
self.grid_rowconfigure(0, weight=1)
recipe.startDispensing(amount)
self.pid = self.after(UPDATE_MS, self.update_screen)
def update_screen(self):
self.pid = None
recipe = self.recipe
recipe.updateDispensing()
#now = time.time() * 1000.0
#if now - self.last_disp >= 1:
#if now - self.last_disp >= DISPLAY_MS:
#self.last_disp = now
self.desc.config(state=NORMAL)
self.desc.delete(0.0, END)
self.desc.tag_config("header", background="#077", foreground="white")
self.desc.tag_config("ingr", lmargin1=10, lmargin2=20)
self.desc.tag_config("percent", foreground="#c44")
self.desc.insert(END, "Dispensing: %s\n" % recipe.getName(), "header")
for ingr in recipe.dispensing:
self.desc.insert(END, ingr.readableDesc(metric=self.master.use_metric), "ingr")
self.desc.insert(END, " ")
self.desc.insert(END, "%.0f%%\n" % ingr.percentDone(), 'percent')
self.desc.config(state=DISABLED)
self.master.update()
if recipe.doneDispensing():
self.master.save_configs()
self.master.screen_pop_to_top()
else:
self.pid = self.after(UPDATE_MS, self.update_screen)
def handle_button_back(self):
if self.pid != None:
self.after_cancel(self.pid)
self.recipe.cancelDispensing()
self.master.screen_pop()
| 35.432836 | 91 | 0.632266 | 2,125 | 0.895114 | 0 | 0 | 0 | 0 | 0 | 0 | 254 | 0.106992 |
8078d7a28563705ea00176138060393918bcd846 | 219 | py | Python | 0.1/game/maptest.py | icefoxen/games-drewfe | 7606330b89e4ad37e98637e79af2c058b18112d7 | [
"MIT"
] | null | null | null | 0.1/game/maptest.py | icefoxen/games-drewfe | 7606330b89e4ad37e98637e79af2c058b18112d7 | [
"MIT"
] | null | null | null | 0.1/game/maptest.py | icefoxen/games-drewfe | 7606330b89e4ad37e98637e79af2c058b18112d7 | [
"MIT"
] | null | null | null | import pygame
from pygame.locals import *
from map import *
a = Map( 'test.map' )
scr = pygame.display.set_mode( (400, 300) )
a.draw( scr, 0, 0, 400, 300 )
pygame.display.flip()
#a.printTileset()
while True:
pass
| 15.642857 | 43 | 0.671233 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 27 | 0.123288 |
8078e354e9895b4d66d66de7d6661a57c613976b | 2,743 | py | Python | lib/n_queens_csp.py | ToniRV/Constraint-Satisfaction-Notebook | a3f0dbbc3b45cff8bec0fa087cdebffaf38d3a26 | [
"MIT"
] | 8 | 2018-11-20T16:35:15.000Z | 2021-04-24T09:15:01.000Z | lib/n_queens_csp.py | ToniRV/Constraint-Satisfaction-Notebook | a3f0dbbc3b45cff8bec0fa087cdebffaf38d3a26 | [
"MIT"
] | 14 | 2018-11-12T21:16:04.000Z | 2018-12-07T21:00:01.000Z | lib/n_queens_csp.py | ToniRV/Constraint-Satisfaction-Notebook | a3f0dbbc3b45cff8bec0fa087cdebffaf38d3a26 | [
"MIT"
] | 6 | 2019-07-02T09:04:57.000Z | 2022-01-20T15:09:04.000Z | from __future__ import print_function
import time
import copy
from lib.backtracking import backtracking_search
from lib.csp import CSP
def queen_constraint(A, a, B, b):
"""Constraint is satisfied (true) if A, B are really the same variable,
or if they are not in the same row, down diagonal, or up diagonal."""
return A == B or (a != b and A + a != B + b and A - a != B - b)
def all_different(L):
""" Utility function to check that all values in the list are different """
isinstance(L, list)
result = set()
for value in L:
if value not in result:
result.add(value)
else:
return False
return True
class NQueensCSP(CSP):
"""Make a CSP for the nQueens problem for search with backtracking """
def __init__(self, n):
"""Initialize data structures for n Queens."""
# Indices of variables in the problem.
variables = list(range(n))
# Initial domains of the variables.
domains = {var:list(range(n)) for var in variables}
# What are the neighbors of a given var, can include itself.
neighbors = {var:list(range(n)) for var in variables}
CSP.__init__(self, variables, domains, neighbors, queen_constraint)
def is_consistent(self, var, val, assignment):
""" Check if the attempted var = val assignment is consistent with current assignment """
# Add var = val in the list of assignments as a first attempt.
# Slow because we are copying, but perfect for pedagogical purposes.
attempt_assignment = copy.deepcopy(assignment)
if var != None and val != None:
attempt_assignment.update({var: val})
# Check for same column constraint is implicit in formulation.
# Check for same row constraint:
c_row = all_different(attempt_assignment.values())
# Check for same diagonal constraint:
diag_1 = [key + value for key, value in attempt_assignment.items()]
diag_2 = [key - value for key, value in attempt_assignment.items()]
c_diag_1 = all_different(diag_1)
c_diag_2 = all_different(diag_2)
return c_row and c_diag_1 and c_diag_2
if __name__== "__main__":
# Solve n queens.
n_queens = NQueensCSP(15)
# Backtracking
start_time = time.time()
solution = backtracking_search(n_queens, fc=False)
end_time = time.time()
print(solution)
print(end_time - start_time)
n_queens.display(solution)
# Backtracking with forward checking
start_time = time.time()
solution = backtracking_search(n_queens, fc=True)
end_time = time.time()
print(solution)
print(end_time - start_time)
n_queens.display(solution) | 37.067568 | 97 | 0.65658 | 1,529 | 0.557419 | 0 | 0 | 0 | 0 | 0 | 0 | 896 | 0.32665 |
807bbe6aaa16696f030c03d7692e99d63f213360 | 7,363 | py | Python | race_service/views/race_results.py | langrenn-sprint/race-service | a5f3ca8d381e8e4d81bae25cccee0cb47b64400b | [
"Apache-2.0"
] | null | null | null | race_service/views/race_results.py | langrenn-sprint/race-service | a5f3ca8d381e8e4d81bae25cccee0cb47b64400b | [
"Apache-2.0"
] | 62 | 2021-09-14T18:47:38.000Z | 2022-03-21T22:14:20.000Z | race_service/views/race_results.py | langrenn-sprint/race-service | a5f3ca8d381e8e4d81bae25cccee0cb47b64400b | [
"Apache-2.0"
] | null | null | null | """Resource module for race_results resources."""
import json
import logging
import os
from typing import List, Union
from aiohttp.web import (
HTTPNotFound,
HTTPUnprocessableEntity,
Response,
View,
)
from dotenv import load_dotenv
from race_service.adapters import UsersAdapter
from race_service.models import (
IndividualSprintRace,
IntervalStartRace,
RaceResult,
TimeEvent,
)
from race_service.services import (
IllegalValueException,
RaceNotFoundException,
RaceResultNotFoundException,
RaceResultsService,
RacesService,
TimeEventsService,
)
from .utils import extract_token_from_request
load_dotenv()
HOST_SERVER = os.getenv("HOST_SERVER", "localhost")
HOST_PORT = os.getenv("HOST_PORT", "8080")
BASE_URL = f"http://{HOST_SERVER}:{HOST_PORT}"
class RaceResultsView(View):
"""Class representing race_results resource."""
async def get(self) -> Response:
"""Get route function."""
db = self.request.app["db"]
race_id = self.request.match_info["raceId"]
if "timingPoint" in self.request.rel_url.query:
timing_point = self.request.rel_url.query["timingPoint"]
race_results = (
await RaceResultsService.get_race_results_by_race_id_and_timing_point(
db, race_id, timing_point
)
)
else:
race_results = await RaceResultsService.get_race_results_by_race_id(
db, race_id
)
# We expand references to time-events in race-results ranking-sequence:
for race_result in race_results:
time_events: List[TimeEvent] = []
time_events_sorted: List[TimeEvent] = []
for time_event_id in race_result.ranking_sequence:
time_event = await TimeEventsService.get_time_event_by_id(
db, time_event_id
)
time_events.append(time_event)
# We sort the time-events on rank:
time_events_sorted = sorted(
time_events,
key=lambda k: (
k.rank is not None,
k.rank != "",
k.rank,
),
reverse=False,
)
race_result.ranking_sequence = time_events_sorted # type: ignore
list = []
for race_result in race_results:
list.append(race_result.to_dict())
body = json.dumps(list, default=str, ensure_ascii=False)
return Response(status=200, body=body, content_type="application/json")
class RaceResultView(View):
"""Class representing a single race_result resource."""
async def get(self) -> Response:
"""Get route function."""
db = self.request.app["db"]
race_result_id = self.request.match_info["raceResultId"]
logging.debug(f"Got get request for race_result {race_result_id}")
try:
race_result = await RaceResultsService.get_race_result_by_id(
db, race_result_id
)
# We expand references to time-events in race-result's ranking-sequence:
time_events: List[TimeEvent] = []
for time_event_id in race_result.ranking_sequence:
time_event = await TimeEventsService.get_time_event_by_id(
db, time_event_id
)
time_events.append(time_event)
# We sort the time-events on rank:
time_events_sorted = sorted(
time_events,
key=lambda k: (
k.rank is not None,
k.rank != "",
k.rank,
),
reverse=False,
)
race_result.ranking_sequence = time_events_sorted # type: ignore
except RaceResultNotFoundException as e:
raise HTTPNotFound(reason=str(e)) from e
logging.debug(f"Got race_result: {race_result}")
body = race_result.to_json()
return Response(status=200, body=body, content_type="application/json")
async def put(self) -> Response:
"""Put route function."""
db = self.request.app["db"]
token = extract_token_from_request(self.request)
try:
await UsersAdapter.authorize(token, roles=["admin", "race-result-admin"])
except Exception as e:
raise e from e
body = await self.request.json()
race_result_id = self.request.match_info["raceResultId"]
logging.debug(
f"Got request-body {body} for {race_result_id} of type {type(body)}"
)
body = await self.request.json()
logging.debug(f"Got put request for race_result {body} of type {type(body)}")
try:
race_result = RaceResult.from_dict(body)
except KeyError as e:
raise HTTPUnprocessableEntity(
reason=f"Mandatory property {e.args[0]} is missing."
) from e
try:
await RaceResultsService.update_race_result(db, race_result_id, race_result)
except IllegalValueException as e:
raise HTTPUnprocessableEntity(reason=str(e)) from e
except RaceResultNotFoundException as e:
raise HTTPNotFound(reason=str(e)) from e
return Response(status=204)
async def delete(self) -> Response:
"""Delete the race-result and all the race_results in it."""
db = self.request.app["db"]
token = extract_token_from_request(self.request)
try:
await UsersAdapter.authorize(token, roles=["admin", "race-result-admin"])
except Exception as e:
raise e from e
race_result_for_deletion_id = self.request.match_info["raceResultId"]
logging.debug(
f"Got delete request for race_result {race_result_for_deletion_id}"
)
try:
race_result: RaceResult = await RaceResultsService.get_race_result_by_id(
db, race_result_for_deletion_id
)
# We need to remove the race-result from the race containing the race-result:
try:
race: Union[
IndividualSprintRace, IntervalStartRace
] = await RacesService.get_race_by_id(db, race_result.race_id)
except RaceNotFoundException as e:
raise HTTPNotFound(
reason=(
f"DB is inconsistent: cannot find race with id "
f"{race_result.race_id} of race-result with id {race_result.id}"
)
) from e
del race.results[race_result.timing_point]
await RacesService.update_race(db, race.id, race)
# We can finally delete the race-result:
await RaceResultsService.delete_race_result(db, race_result_for_deletion_id)
except RaceResultNotFoundException as e:
raise HTTPNotFound(reason=str(e)) from e
return Response(status=204)
| 37.566327 | 90 | 0.580877 | 6,510 | 0.88415 | 0 | 0 | 0 | 0 | 6,309 | 0.856852 | 1,348 | 0.183078 |
807c5029f34b249c21ce5270b815763c5f2132a5 | 3,549 | py | Python | 01/hierarchy/company_management.py | MayaScarlet/runestone-pythonds3 | 9ac53482568b34e7272c122a5545a50da05b69fe | [
"MIT"
] | null | null | null | 01/hierarchy/company_management.py | MayaScarlet/runestone-pythonds3 | 9ac53482568b34e7272c122a5545a50da05b69fe | [
"MIT"
] | null | null | null | 01/hierarchy/company_management.py | MayaScarlet/runestone-pythonds3 | 9ac53482568b34e7272c122a5545a50da05b69fe | [
"MIT"
] | null | null | null | """
Class hierarchy for company management
"""
class Company:
def __init__(self, company_name, location):
self.company_name = company_name
self.location = location
def __str__(self):
return f"Company: {self.company_name}, {self.location}"
def __repr(self):
return f"Company: {self.company_name}, {self.location}"
class Management(Company):
def __init__(self, company_name, location, employee, position, management_type, **kwargs):
self.management_type = management_type
self.employee = employee
self.position = position
super().__init__(company_name, location)
def __str__(self):
return f"{self.employee}: {self.position} in {self.management_type} management at {self.company_name}"
def __repr__(self):
return f"{self.employee}: {self.position} in {self.management_type} management at {self.company_name}"
def type(self):
return f"{self.management_type}"
def position(self):
return f"{self.position}"
class TopLevelManagement(Management):
def __init__(self, company_name, location, employee, position, **kwargs):
super().__init__(company_name, location, employee, position, management_type='Top level', **kwargs)
@classmethod
def chairman(cls) -> 'TopLevelManagement':
return cls('Xamarin Ltd', 'John Doe', 'Chairman', 'United States')
@classmethod
def vice_president(cls) -> 'TopLevelManagement':
return cls('Xamarin Ltd', 'Jane Doe', 'Vice President', 'United States')
@classmethod
def ceo(cls) -> 'TopLevelManagement':
return cls('Xamarin Ltd', 'Jeanne Donne', 'CEO', 'United States')
class MiddleManagement(Management):
def __init__(self, company_name, location, employee, position, **kwargs):
super().__init__(company_name, location, employee, position, management_type='Middle', **kwargs)
@classmethod
def general_manager(cls) -> 'MiddleManagement':
return cls('Xamarin Ltd', 'Aran Diego', 'General Manager', 'United States')
@classmethod
def regional_manager(cls) -> 'MiddleManagement':
return cls('Xamarin Ltd', 'Antuan Doe', 'General Manager', 'United States')
class FirstLineManagement(Management):
def __init__(self, company_name, location, employee, position, **kwargs):
super().__init__(company_name, location, employee, position, management_type='First line', **kwargs)
@classmethod
def supervisor(cls) -> 'FirstLineManagement':
return cls('Xamarin Ltd', 'Sam Pauline', 'Supervisor', 'United States')
@classmethod
def office_manager(cls) -> 'FirstLineManagement':
return cls('Xamarin Ltd', 'Sarah Jones', 'Office Manager', 'United States')
@classmethod
def team_leader(cls) -> 'FirstLineManagement':
return cls('Xamarin Ltd', 'Amy Joe', 'Team Leader', 'United States')
def main():
company = Company('Xamarin Ltd', 'United States')
print(company)
chairman = TopLevelManagement.chairman()
print(chairman)
ceo = TopLevelManagement.ceo()
print(ceo)
general_manager = MiddleManagement.general_manager()
print(general_manager)
regional_manager = MiddleManagement.regional_manager()
print(regional_manager)
supervisor = FirstLineManagement.supervisor()
print(supervisor)
office_manager = FirstLineManagement.office_manager()
print(office_manager)
team_leader = FirstLineManagement.team_leader()
print(team_leader)
if __name__ == '__main__':
main()
| 31.6875 | 110 | 0.688363 | 2,837 | 0.79938 | 0 | 0 | 1,137 | 0.320372 | 0 | 0 | 1,026 | 0.289096 |
807c60f448466d06587af2768eab735c6b00629e | 831 | py | Python | config.example.py | isaacnoboa/balaguer_bot | 1b7d61db7ebfc1b9067e6ac1762b077ed259ecb8 | [
"MIT"
] | null | null | null | config.example.py | isaacnoboa/balaguer_bot | 1b7d61db7ebfc1b9067e6ac1762b077ed259ecb8 | [
"MIT"
] | null | null | null | config.example.py | isaacnoboa/balaguer_bot | 1b7d61db7ebfc1b9067e6ac1762b077ed259ecb8 | [
"MIT"
] | null | null | null | # Make sure to rename this file as "config.py" before running the bot.
verbose=True
api_token='0000000000:XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
# Enter the user ID and a readable name for each user in your group.
# TODO make balaguer automatically collect user IDs.
# But that's only useful if the bot actually gathers widespread usage.
all_users={
000000000: 'Readable Name',
000000000: 'Readable Name',
000000000: 'Readable Name',
000000000: 'Readable Name',
}
# A list containing the user IDs of who can access the admin features of the bot.
admins = [000000000,
000000000,
000000000,
000000000]
# A list of the groups where the bot is allowed to operate (usually the main group and an admin test group)
approved_groups = [-000000000,
-000000000] | 31.961538 | 108 | 0.701564 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 564 | 0.6787 |
807f6be89f92fe14527544c6ac0f3f6e3bb7e50f | 1,066 | py | Python | server/src/service/image/i_image_controller_service.py | konrad2508/picgal | 7f7822a02145fd2efa697e1c7750af9af680a3da | [
"MIT"
] | 4 | 2021-12-31T10:06:34.000Z | 2022-01-16T16:34:50.000Z | server/src/service/image/i_image_controller_service.py | konrad2508/picgal | 7f7822a02145fd2efa697e1c7750af9af680a3da | [
"MIT"
] | null | null | null | server/src/service/image/i_image_controller_service.py | konrad2508/picgal | 7f7822a02145fd2efa697e1c7750af9af680a3da | [
"MIT"
] | null | null | null | from abc import ABC, abstractmethod
from model.image.data.count_data import CountData
from model.image.data.image_data import ImageData
from model.image.data.tag_data import TagData
from model.image.data.virtual_tag_data import VirtualTagData
from model.image.request.image_modification_request import ImageModificationRequest
class IImageControllerService(ABC):
@abstractmethod
def get_infos(self, image_url: str, preview_url: str, sample_url: str, tags: str | None, page: int) -> list[ImageData]: ...
@abstractmethod
def modify_info(self, image_url: str, preview_url: str, sample_url: str, id: int, modifications: ImageModificationRequest) -> ImageData: ...
@abstractmethod
def get_infos_count(self, tags: str | None) -> CountData: ...
@abstractmethod
def get_tags(self) -> list[TagData | VirtualTagData]: ...
@abstractmethod
def get_image_path(self, id: int) -> str: ...
@abstractmethod
def get_preview_path(self, id: int) -> str: ...
@abstractmethod
def get_sample_path(self, id: int) -> str: ...
| 34.387097 | 144 | 0.732645 | 735 | 0.689493 | 0 | 0 | 659 | 0.618199 | 0 | 0 | 0 | 0 |
807f973112c1400fd4998207bea6ad287765ac99 | 2,131 | py | Python | Common/if_else.py | Heisenberg710/Python_Crash_Course | 4303f1a54c430e9897cccef6c5fff64a655e2972 | [
"Apache-2.0"
] | null | null | null | Common/if_else.py | Heisenberg710/Python_Crash_Course | 4303f1a54c430e9897cccef6c5fff64a655e2972 | [
"Apache-2.0"
] | null | null | null | Common/if_else.py | Heisenberg710/Python_Crash_Course | 4303f1a54c430e9897cccef6c5fff64a655e2972 | [
"Apache-2.0"
] | null | null | null | # if语句
games = ['CS GO', 'wow', 'deathStranding']
for game in games:
if game == 'wow': # 判断是否相等用'=='
print(game.upper())
# 检查是否相等
sport = 'football'
if sport == 'FOOTBALL':
print('yes')
else:
print('No') # 此处输出结果为No说明大小写不同不被认同是同一string、转化为小写在进行对比
for game in games:
if game.lower() == 'cs go':
print('是cs go了')
# 检查是否不相等
for game in games:
if game != 'wow':
print('该游戏不是wow,该游戏是' + game)
else:
print('该游戏是wow')
# 比较数字
ages = [15, 31, 22, 18]
for age in ages:
if age >= 18:
print('已成年')
else:
print('未成年')
# 使用and检查多个条件
i = 17
j = 21
if i > 18 and j > 18:
print('\n两者都已成年')
else:
print('\n有未成年混入其中')
# 使用or也能检查多个条件 、只不过只要满足其中任意一个就会返回、两个选项都不满足时返回False
i = 17
j = 12
if i > 18 or j > 18:
print('\n两者里有成年人')
else:
print('\n都是未成年')
# 检查特定值是否存在可以选择用in
if 'wow' in games:
print('\nwow已经在游戏库中')
else:
games.append('OverWatch')
print('\nwow不在库中,现已经添加进去')
if 'OverWatch' in games:
print('\n守望先锋已经在游戏库中')
else:
games.append('OverWatch')
print('\nOverWatch不在库中,现已经添加进去')
# if-elif-else结构
age = 4
if age <= 4:
price = 0
elif 4 < age <= 18:
price = 25
else:
price = 50
print('您的票价为:' + str(price) + '元,谢谢参观!')
# 多个elif代码块
age = 31
if age <= 4:
price = 0
elif 4 < age <= 18:
price = 5
elif 18 < age < 65:
price = 10
elif 65 <= age:
price = 5
print('\n购买票价为:' + str(price) + '元 谢谢参观!')
ob = ['雕哥', '宝哥', '龙神', '胖头', '大狗', '大Mu', '核桃', '谢彬', '马甲', '566']
for player in ob:
if player == '谢彬':
print('谢彬是谁???')
elif player == '胖头':
print('法国士兵!!!')
else:
print(player + 'nb!!!')
# 检查列表是否为空
games = []
if games:
for game in games:
print('\n圣诞大折扣所要购买的游戏有:')
else:
print('\n购物车中没有任何游戏、快去添加吧!')
# 使用多个列表
my_games = ['Dota2', 'CS GO', 'WOW', 'Over Watch', 'Death Stranding', 'Cyberpunk2077', 'Dark Dungeon']
fri_games = ['Dota2', 'CS GO', 'WOW', 'lol']
for fri_game in fri_games:
if fri_game in my_games:
print('我们共同喜欢的游戏有:' + fri_game.upper())
else:
print('我不喜欢玩' + fri_game.upper() + '但是她好像蛮喜欢的')
| 16.912698 | 102 | 0.563585 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,456 | 0.515398 |
807f9c9ab79882e8cda374e3c06329f34a95d56f | 1,299 | py | Python | src/favorites_crawler/itemloaders.py | RyouMon/FavoritesCrawler | c11750ea4094cd7e4d91a0dd79c9ee21a066c0ee | [
"MIT"
] | 2 | 2022-02-05T04:24:55.000Z | 2022-02-22T23:50:23.000Z | src/favorites_crawler/itemloaders.py | RyouMon/FavoritesCrawler | c11750ea4094cd7e4d91a0dd79c9ee21a066c0ee | [
"MIT"
] | 3 | 2022-02-22T13:35:29.000Z | 2022-02-28T13:29:56.000Z | src/favorites_crawler/itemloaders.py | RyouMon/FavoritesCrawler | c11750ea4094cd7e4d91a0dd79c9ee21a066c0ee | [
"MIT"
] | null | null | null | from itemloaders import ItemLoader
from itemloaders.processors import Join, Compose, MapCompose
from favorites_crawler import items
from favorites_crawler.processors import take_first, identity, get_nhentai_id, original_url_from_nhentai_thumb_url
from favorites_crawler.processors import replace_space_with_under_scope
class PixivIllustItemLoader(ItemLoader):
"""Pixiv Illust Loader"""
default_item_class = items.PixivIllustItem
default_output_processor = take_first
image_urls_out = identity
class YanderePostItemLoader(ItemLoader):
"""Yandere Post Loader"""
default_item_class = items.YanderePostItem
default_output_processor = take_first
image_urls_out = identity
class NHentaiGalleryItemLoader(ItemLoader):
default_item_class = items.NHentaiGalleryItem
default_output_processor = take_first
id_out = Compose(take_first, get_nhentai_id)
title_out = Join('')
image_urls_out = MapCompose(original_url_from_nhentai_thumb_url)
tags_out = MapCompose(replace_space_with_under_scope)
characters_out = MapCompose(replace_space_with_under_scope)
class LemonPicPostItemLoader(ItemLoader):
default_item_class = items.LemonPicPostItem
default_output_processor = take_first
image_urls_out = identity
tags_out = identity
| 30.928571 | 114 | 0.810624 | 967 | 0.744419 | 0 | 0 | 0 | 0 | 0 | 0 | 52 | 0.040031 |
8080e39246d1d1153fe7413a6638e093c9c74917 | 1,330 | py | Python | preprocessed_data/RGHS/Code/LabStretching.py | SaiKrishna1207/Underwater-Image-Segmentation | 78def27e577b10e6722c02807bdcfeb7ba53d760 | [
"MIT"
] | null | null | null | preprocessed_data/RGHS/Code/LabStretching.py | SaiKrishna1207/Underwater-Image-Segmentation | 78def27e577b10e6722c02807bdcfeb7ba53d760 | [
"MIT"
] | null | null | null | preprocessed_data/RGHS/Code/LabStretching.py | SaiKrishna1207/Underwater-Image-Segmentation | 78def27e577b10e6722c02807bdcfeb7ba53d760 | [
"MIT"
] | null | null | null | import cv2
from skimage.color import rgb2hsv,hsv2rgb
import numpy as np
from skimage.color import rgb2lab, lab2rgb
from global_StretchingL import global_stretching
from global_stretching_ab import global_Stretching_ab
def LABStretching(sceneRadiance):
sceneRadiance = np.clip(sceneRadiance, 0, 255)
sceneRadiance = np.uint8(sceneRadiance)
height = len(sceneRadiance)
width = len(sceneRadiance[0])
img_lab = rgb2lab(sceneRadiance)
L, a, b = cv2.split(img_lab)
img_L_stretching = global_stretching(L, height, width)
img_a_stretching = global_Stretching_ab(a, height, width)
img_b_stretching = global_Stretching_ab(b, height, width)
labArray = np.zeros((height, width, 3), 'float64')
labArray[:, :, 0] = img_L_stretching
labArray[:, :, 1] = img_a_stretching
labArray[:, :, 2] = img_b_stretching
img_rgb = lab2rgb(labArray) * 255
return img_rgb
sceneRadiance = np.clip(sceneRadiance, 0, 255)
sceneRadiance = np.uint8(sceneRadiance)
height = len(sceneRadiance)
width = len(sceneRadiance[0])
img_hsv = rgb2hsv(sceneRadiance)
img_hsv[:, :, 1] = global_stretching(img_hsv[:, :, 1], height, width)
img_hsv[:, :, 2] = global_stretching(img_hsv[:, :, 2], height, width)
img_rgb = hsv2rgb(img_hsv) * 255
return img_rgb | 22.931034 | 73 | 0.7 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 | 0.006767 |
80866b9b24652869ee934797eac93607bc5366aa | 386 | py | Python | cursecreatortests.py | kushtrimh/curse-of-tenebrae | 374ca8684debb686a651c216e2be78f449b983de | [
"BSD-2-Clause"
] | null | null | null | cursecreatortests.py | kushtrimh/curse-of-tenebrae | 374ca8684debb686a651c216e2be78f449b983de | [
"BSD-2-Clause"
] | null | null | null | cursecreatortests.py | kushtrimh/curse-of-tenebrae | 374ca8684debb686a651c216e2be78f449b983de | [
"BSD-2-Clause"
] | null | null | null | import unittest
import mock
import Tkinter
from cursecreator import Application
class TestNPCCreator(unittest.TestCase):
def setUp(self):
root = Tkinter.Tk()
self.app = Application(root)
def test_attribute_fixer(self):
self.assertTrue(self.app.attribute_fixer("health", 0))
self.assertFalse(self.app.attribute_fixer("banana", 0))
if __name__ == "__main__":
unittest.main() | 24.125 | 57 | 0.766839 | 260 | 0.673575 | 0 | 0 | 0 | 0 | 0 | 0 | 26 | 0.067358 |
8086a65cb78ca3eae82df3569f6ecdb2885f115e | 204 | py | Python | tools/check_encrypted_hash.py | airladon/ThisIGet | e54058056ed593ff1097ef4505a5ce97ea09d94b | [
"BSD-3-Clause"
] | 5 | 2020-06-19T22:06:16.000Z | 2022-02-06T17:13:14.000Z | tools/check_encrypted_hash.py | airladon/ThisIGet | e54058056ed593ff1097ef4505a5ce97ea09d94b | [
"BSD-3-Clause"
] | 3 | 2021-03-09T00:43:58.000Z | 2021-06-15T17:44:22.000Z | tools/check_encrypted_hash.py | airladon/ThisIGet | e54058056ed593ff1097ef4505a5ce97ea09d94b | [
"BSD-3-Clause"
] | null | null | null | import sys
sys.path.insert(0, './app/app')
from tools import decrypt, check_hash # noqa
# sys.argv[1] = plain text
# sys.argv[2] = hash to compare
print(check_hash(sys.argv[1], decrypt(sys.argv[2])))
| 25.5 | 52 | 0.691176 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 74 | 0.362745 |
808721236b1e32630e399cfed97544fb1b0949bc | 813 | py | Python | external/modbus_tk/modbus_tk/__init__.py | iconnect-iot/intel-device-resource-mgt-lib | 3be2f7342b6f79836439e9712e2a36939c986760 | [
"Apache-2.0"
] | 2 | 2020-03-26T07:23:09.000Z | 2020-04-19T14:20:42.000Z | external/modbus_tk/modbus_tk/__init__.py | iconnect-iot/intel-device-resource-mgt-lib | 3be2f7342b6f79836439e9712e2a36939c986760 | [
"Apache-2.0"
] | 1 | 2020-06-02T04:32:15.000Z | 2020-06-02T04:32:34.000Z | external/modbus_tk/modbus_tk/__init__.py | iconnect-iot/intel-device-resource-mgt-lib | 3be2f7342b6f79836439e9712e2a36939c986760 | [
"Apache-2.0"
] | 15 | 2018-04-06T11:43:38.000Z | 2022-02-07T03:25:43.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Modbus TestKit: Implementation of Modbus protocol in python
(C)2009 - Luc Jean - luc.jean@gmail.com
(C)2009 - Apidev - http://www.apidev.fr
This is distributed under GNU LGPL license, see license.txt
Make possible to write modbus TCP and RTU master and slave for testing purpose
Modbus TestKit is different from pymodbus which is another implementation of
the modbus stack in python
contributors:
----------------------------------
* OrangeTux
* denisstogl
* MELabs
* idahogray
* riaan.doorduin
* tor.sjowall
* smachin1000
* GadgetSteve
* dhoomakethu
* zerox1212
* ffreckle
* Matthew West
Please let us know if your name is missing!
"""
VERSION = '0.5.4'
import logging
LOGGER = logging.getLogger("modbus_tk")
| 21.394737 | 80 | 0.675277 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 747 | 0.918819 |
80892851d8acc79f94eb86a198a24af8261d25bc | 2,972 | py | Python | utils/scripts/visualize_allocations.py | dmoham1476/glow | 3de58297c0cd1ede1854e6333af212ea51605aaa | [
"Apache-2.0"
] | null | null | null | utils/scripts/visualize_allocations.py | dmoham1476/glow | 3de58297c0cd1ede1854e6333af212ea51605aaa | [
"Apache-2.0"
] | null | null | null | utils/scripts/visualize_allocations.py | dmoham1476/glow | 3de58297c0cd1ede1854e6333af212ea51605aaa | [
"Apache-2.0"
] | 1 | 2018-10-07T04:27:31.000Z | 2018-10-07T04:27:31.000Z | #!/usr/bin/env python
import sys
from PIL import Image
# This script is used to visualize memory allocations in the Glow compiler.
#
# Usage: ./visualize.py dump.txt
#
# The script will dump a sequence of bitmap files that can be combined into a
# video. Example: heap100123.bmp, heap heap100124.bmp, heap100125.bmp ... )
#
# On mac and linux this command will generate a gif file:
# convert -delay 10 -loop 0 *.bmp video.gif
#
# The input file should contain a list of allocation/deallocation commands.
# Allocation commands (marked with the letter 'a') report the start address and
# the size of the buffer, and the deallocation commands (marked with the letter
# 'd') report the address of the buffer. You can generate these command lists
# by inserting printf calls into the Glow memory allocator.
#
# Example input:
# a 348864 20000
# a 368896 20000
# a 388928 20000
# a 408960 200000
# d 388928
# d 368896
# d 348864
content = open(sys.argv[1]).read()
lines = content.split('\n')
canvas_size = 512
pixelsize = 8
img = Image.new("RGB", (canvas_size, canvas_size), "black")
pixels = img.load()
# Use this number to assign file names to frames in the video.
filename_counter = 10000000
# Maps from address to size
sizes={}
color_index = 0
colors=[(218, 112, 214), (255, 182, 193), (250, 235, 215), (255, 250, 205),
(210, 105, 30), (210, 180, 140), (188, 143, 143), (255, 240, 245),
(230, 230, 250), (255, 255, 240)]
def getColor():
global color_index
color_index+=1
return colors[color_index % len(colors)]
def setPixel(addr, color):
# Don't draw out-of-bounds pixels.
if (addr >= canvas_size * canvas_size): return
# Only draw pixels that are aligned to the block size.
if (addr % pixelsize != 0): return
# Draw large pixels.
sx = addr%canvas_size
sy = addr/canvas_size
sx = int(sx/pixelsize)
sy = int(sy/pixelsize)
for x in range(pixelsize):
for y in range(pixelsize):
pixels[sx*pixelsize + x, sy*pixelsize + y] = color
def saveFrame():
global filename_counter
filename_counter+=1
img.save("heap" + str(filename_counter) + ".bmp")
for line in lines:
tokens = line.split()
if (len(tokens) < 1): break
print(tokens)
if (tokens[0] == 'a'):
frm = int(tokens[1])
sz = int(tokens[2])
sizes[frm] = sz
if (frm + sz >= canvas_size * canvas_size): continue
for i in range(sz): setPixel(i + frm ,(255,255,255)) # allocate
saveFrame()
cc = getColor()
for i in range(sz): setPixel(i + frm ,cc) # allocated
saveFrame()
if (tokens[0] == 'd'):
frm = int(tokens[1])
sz = sizes[frm]
if (frm + sz >= canvas_size * canvas_size): continue
for i in range(sz): setPixel(i + frm ,(128,0,0)) # deallocate
saveFrame()
for i in range(sz): setPixel(i + frm ,(15,15,15)) # previously allocated
saveFrame()
| 29.137255 | 80 | 0.637281 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,176 | 0.395693 |
808b08abd3c47131069b55ad41fa61232066f350 | 6,083 | py | Python | src/c3nav/control/models.py | NoMoKeTo/c3nav-new | f696da7c9c875aad0505b589e7fab7c58105b0a6 | [
"Apache-2.0"
] | null | null | null | src/c3nav/control/models.py | NoMoKeTo/c3nav-new | f696da7c9c875aad0505b589e7fab7c58105b0a6 | [
"Apache-2.0"
] | null | null | null | src/c3nav/control/models.py | NoMoKeTo/c3nav-new | f696da7c9c875aad0505b589e7fab7c58105b0a6 | [
"Apache-2.0"
] | null | null | null | from contextlib import contextmanager
from typing import Dict
from django.conf import settings
from django.contrib.auth.models import User
from django.core.cache import cache
from django.db import models, transaction
from django.utils.functional import cached_property, lazy
from django.utils.translation import gettext_lazy as _
from c3nav.mapdata.models import Space
class UserPermissions(models.Model):
"""
User Permissions
"""
user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, primary_key=True)
review_changesets = models.BooleanField(default=False, verbose_name=_('can review changesets'))
direct_edit = models.BooleanField(default=False, verbose_name=_('can activate direct editing'))
max_changeset_changes = models.PositiveSmallIntegerField(default=10, verbose_name=_('max changes per changeset'))
editor_access = models.BooleanField(default=False, verbose_name=_('can always access editor'))
base_mapdata_access = models.BooleanField(default=False, verbose_name=_('can always access base map data'))
manage_map_updates = models.BooleanField(default=False, verbose_name=_('manage map updates'))
control_panel = models.BooleanField(default=False, verbose_name=_('can access control panel'))
grant_permissions = models.BooleanField(default=False, verbose_name=_('can grant control permissions'))
manage_announcements = models.BooleanField(default=False, verbose_name=_('manage announcements'))
grant_all_access = models.BooleanField(default=False, verbose_name=_('can grant access to everything'))
grant_space_access = models.BooleanField(default=False, verbose_name=_('can grant space access'))
review_all_reports = models.BooleanField(default=False, verbose_name=_('can review all reports'))
review_group_reports = models.ManyToManyField('mapdata.LocationGroup', blank=True,
limit_choices_to={'access_restriction': None},
verbose_name=_('can review reports belonging to'))
api_secret = models.CharField(null=True, blank=True, max_length=64, verbose_name=_('API secret'))
class Meta:
verbose_name = _('User Permissions')
verbose_name_plural = _('User Permissions')
default_related_name = 'permissions'
def __init__(self, *args, initial=False, **kwargs):
super().__init__(*args, **kwargs)
if initial and self.user_id and self.user.is_superuser:
for field in UserPermissions._meta.get_fields():
if isinstance(field, models.BooleanField):
setattr(self, field.name, True)
@staticmethod
def get_cache_key(pk):
return 'control:permissions:%d' % pk
@cached_property
def review_group_ids(self):
if self.pk is None:
return ()
return tuple(self.review_group_reports.values_list('pk', flat=True))
@cached_property
def can_review_reports(self):
return self.review_all_reports or self.review_group_ids
@classmethod
@contextmanager
def lock(cls, pk):
with transaction.atomic():
User.objects.filter(pk=pk).select_for_update()
yield
@classmethod
def get_for_user(cls, user, force=False) -> 'UserPermissions':
if not user.is_authenticated:
return cls()
cache_key = cls.get_cache_key(user.pk)
result = None
if not force:
result = cache.get(cache_key, None)
for field in cls._meta.get_fields():
if not hasattr(result, field.attname):
result = None
break
if result:
return result
with cls.lock(user.pk):
result = cls.objects.filter(pk=user.pk).first()
if not result:
result = cls(user=user, initial=True)
# noinspection PyStatementEffect
result.review_group_ids
cache.set(cache_key, result, 900)
return result
def save(self, *args, **kwargs):
with self.lock(self.user_id):
super().save(*args, **kwargs)
cache_key = self.get_cache_key(self.pk)
cache.set(cache_key, self, 900)
@property
def can_access_base_mapdata(self):
return settings.PUBLIC_BASE_MAPDATA or self.base_mapdata_access
get_permissions_for_user_lazy = lazy(UserPermissions.get_for_user, UserPermissions)
class UserSpaceAccess(models.Model):
"""
User Authorities
"""
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
space = models.ForeignKey(Space, on_delete=models.CASCADE)
can_edit = models.BooleanField(_('can edit'), default=False)
class Meta:
verbose_name = _('user space access')
verbose_name_plural = _('user space accesses')
default_related_name = 'spaceaccesses'
unique_together = (('user', 'space'))
@staticmethod
def get_cache_key(pk):
return 'control:spaceaccesses:%d' % pk
@classmethod
def get_for_user(cls, user, force=False) -> Dict[int, bool]:
if not user.is_authenticated:
return {}
cache_key = cls.get_cache_key(user.pk)
result = None
if not force:
result = cache.get(cache_key, None)
for field in cls._meta.get_fields():
if not hasattr(result, field.attname):
result = None
break
if result:
return result
with UserPermissions.lock(user.pk):
result = dict(cls.objects.filter(user=user).values_list('space_id', 'can_edit'))
cache.set(cache_key, result, 900)
return result
def save(self, *args, **kwargs):
with UserPermissions.lock(self.user_id):
UserPermissions.objects.filter(user_id=self.user_id).select_for_update()
super().save(*args, **kwargs)
cache_key = self.get_cache_key(self.user_id)
cache.delete(cache_key)
| 39.75817 | 117 | 0.662173 | 5,620 | 0.923886 | 130 | 0.021371 | 2,250 | 0.369883 | 0 | 0 | 719 | 0.118198 |
808b93fe478dc64ffdbfa77f3e6535748754649c | 1,296 | py | Python | OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/raw/EGL/MESA/drm_image.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/raw/EGL/MESA/drm_image.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/raw/EGL/MESA/drm_image.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | '''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.EGL import _types as _cs
# End users want this...
from OpenGL.raw.EGL._types import *
from OpenGL.raw.EGL import _errors
from OpenGL.constant import Constant as _C
import ctypes
_EXTENSION_NAME = 'EGL_MESA_drm_image'
def _f( function ):
return _p.createFunction( function,_p.PLATFORM.EGL,'EGL_MESA_drm_image',error_checker=_errors._error_checker)
EGL_DRM_BUFFER_FORMAT_ARGB32_MESA=_C('EGL_DRM_BUFFER_FORMAT_ARGB32_MESA',0x31D2)
EGL_DRM_BUFFER_FORMAT_MESA=_C('EGL_DRM_BUFFER_FORMAT_MESA',0x31D0)
EGL_DRM_BUFFER_MESA=_C('EGL_DRM_BUFFER_MESA',0x31D3)
EGL_DRM_BUFFER_STRIDE_MESA=_C('EGL_DRM_BUFFER_STRIDE_MESA',0x31D4)
EGL_DRM_BUFFER_USE_MESA=_C('EGL_DRM_BUFFER_USE_MESA',0x31D1)
EGL_DRM_BUFFER_USE_SCANOUT_MESA=_C('EGL_DRM_BUFFER_USE_SCANOUT_MESA',0x00000001)
EGL_DRM_BUFFER_USE_SHARE_MESA=_C('EGL_DRM_BUFFER_USE_SHARE_MESA',0x00000002)
@_f
@_p.types(_cs.EGLImageKHR,_cs.EGLDisplay,arrays.GLintArray)
def eglCreateDRMImageMESA(dpy,attrib_list):pass
@_f
@_p.types(_cs.EGLBoolean,_cs.EGLDisplay,_cs.EGLImageKHR,arrays.GLintArray,arrays.GLintArray,arrays.GLintArray)
def eglExportDRMImageMESA(dpy,image,name,handle,stride):pass
| 48 | 114 | 0.828704 | 0 | 0 | 0 | 0 | 290 | 0.223765 | 0 | 0 | 350 | 0.270062 |
808d4a310cf29c6e0b82a31c24c4054f29726254 | 7,866 | py | Python | pysnmp/hlapi/v1arch/asyncore/ntforg.py | RKinsey/pysnmp | 96b5cf31e2f5d19f34d0dd1075014c488f6a5789 | [
"BSD-2-Clause"
] | 492 | 2016-03-13T11:03:13.000Z | 2022-03-21T02:52:57.000Z | pysnmp/hlapi/v1arch/asyncore/ntforg.py | bartomo/pysnmp | becd15c79c9a6b5696928ecd50bf5cca8b1770a1 | [
"BSD-2-Clause"
] | 372 | 2016-03-29T22:42:05.000Z | 2022-03-26T10:28:25.000Z | pysnmp/hlapi/v1arch/asyncore/ntforg.py | bartomo/pysnmp | becd15c79c9a6b5696928ecd50bf5cca8b1770a1 | [
"BSD-2-Clause"
] | 197 | 2016-03-13T11:01:54.000Z | 2022-03-07T19:52:15.000Z | #
# This file is part of pysnmp software.
#
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
# License: http://snmplabs.com/pysnmp/license.html
#
from pysnmp.hlapi.v1arch.auth import *
from pysnmp.hlapi.v1arch.asyncore import *
from pysnmp.hlapi.varbinds import *
from pysnmp.smi.rfc1902 import *
from pysnmp.proto.api import v2c
from pysnmp.proto.proxy import rfc2576
from pysnmp import error
__all__ = ['sendNotification']
VB_PROCESSOR = NotificationOriginatorVarBinds()
def sendNotification(snmpDispatcher, authData, transportTarget,
notifyType, *varBinds, **options):
"""Send SNMP notification.
Based on passed parameters, prepares SNMP TRAP or INFORM
notification (:RFC:`1905#section-4.2.6`) and schedules its
transmission by I/O framework at a later point of time.
Parameters
----------
snmpDispatcher: :py:class:`~pysnmp.hlapi.v1arch.asyncore.SnmpDispatcher`
Class instance representing asyncore-based asynchronous event loop and
associated state information.
authData: :py:class:`~pysnmp.hlapi.CommunityData` or :py:class:`~pysnmp.hlapi.UsmUserData`
Class instance representing SNMP credentials.
transportTarget: :py:class:`~pysnmp.hlapi.asyncore.UdpTransportTarget` or
:py:class:`~pysnmp.hlapi.asyncore.Udp6TransportTarget`
Class instance representing transport type along with SNMP peer address.
notifyType: str
Indicates type of notification to be sent. Recognized literal
values are *trap* or *inform*.
\*varBinds: :class:`tuple` of OID-value pairs or :py:class:`~pysnmp.smi.rfc1902.ObjectType` or :py:class:`~pysnmp.smi.rfc1902.NotificationType`
One or more objects representing MIB variables to place
into SNMP notification. It could be tuples of OID-values
or :py:class:`~pysnmp.smi.rfc1902.ObjectType` class instances
of :py:class:`~pysnmp.smi.rfc1902.NotificationType` objects.
Besides user variable-bindings, SNMP Notification PDU requires at
least two variable-bindings to be present:
0. SNMPv2-MIB::sysUpTime.0 = <agent uptime>
1. SNMPv2-SMI::snmpTrapOID.0 = <notification ID>
When sending SNMPv1 TRAP, more variable-bindings could be present:
2. SNMP-COMMUNITY-MIB::snmpTrapAddress.0 = <agent-IP>
3. SNMP-COMMUNITY-MIB::snmpTrapCommunity.0 = <snmp-community-name>
4. SNMP-COMMUNITY-MIB::snmpTrapEnterprise.0 = <enterprise-OID>
If user does not supply some or any of the above variable-bindings or
if they are at the wrong positions, the system will add/reorder the
missing ones automatically.
On top of that, some notification types imply including some additional
variable-bindings providing additional details on the event being
reported. Therefore it is generally easier to use
:py:class:`~pysnmp.smi.rfc1902.NotificationType` object which will
help adding relevant variable-bindings.
Other Parameters
----------------
\*\*options :
Request options:
* `lookupMib` - load MIB and resolve response MIB variables at
the cost of slightly reduced performance. Default is `False`.
* `cbFun` (callable) - user-supplied callable that is invoked
to pass SNMP response data or error to user at a later point
of time. Default is `None`.
* `cbCtx` (object) - user-supplied object passing additional
parameters to/from `cbFun`. Default is `None`.
Note
----
The `SnmpDispatcher` object may be expensive to create, therefore it is
advised to maintain it for the lifecycle of the application/thread for
as long as possible.
Returns
-------
sendRequestHandle: int
Unique request identifier. Can be used for matching received
responses with ongoing *INFORM* requests. Returns `None` for
*TRAP* notifications.
Raises
------
PySnmpError
Or its derivative indicating that an error occurred while
performing SNMP operation.
Examples
--------
>>> from pysnmp.hlapi.v1arch.asyncore import *
>>>
>>> snmpDispatcher = SnmpDispatcher()
>>>
>>> sendNotification(
>>> snmpDispatcher,
>>> CommunityData('public'),
>>> UdpTransportTarget(('demo.snmplabs.com', 162)),
>>> 'trap',
>>> NotificationType(ObjectIdentity('SNMPv2-MIB', 'coldStart')),
>>> lookupMib=True
>>> )
>>> snmpDispatcher.transportDispatcher.runDispatcher()
"""
sysUpTime = v2c.apiTrapPDU.sysUpTime
snmpTrapOID = v2c.apiTrapPDU.snmpTrapOID
def _ensureVarBinds(varBinds):
# Add sysUpTime if not present already
if not varBinds or varBinds[0][0] != sysUpTime:
varBinds.insert(0, (v2c.ObjectIdentifier(sysUpTime), v2c.TimeTicks(0)))
# Search for and reposition sysUpTime if it's elsewhere
for idx, varBind in enumerate(varBinds[1:]):
if varBind[0] == sysUpTime:
varBinds[0] = varBind
del varBinds[idx + 1]
break
if len(varBinds) < 2:
raise error.PySnmpError('SNMP notification PDU requires '
'SNMPv2-MIB::snmpTrapOID.0 to be present')
# Search for and reposition snmpTrapOID if it's elsewhere
for idx, varBind in enumerate(varBinds[2:]):
if varBind[0] == snmpTrapOID:
del varBinds[idx + 2]
if varBinds[1][0] == snmpTrapOID:
varBinds[1] = varBind
else:
varBinds.insert(1, varBind)
break
# Fail on missing snmpTrapOID
if varBinds[1][0] != snmpTrapOID:
raise error.PySnmpError('SNMP notification PDU requires '
'SNMPv2-MIB::snmpTrapOID.0 to be present')
return varBinds
def _cbFun(snmpDispatcher, stateHandle, errorIndication, rspPdu, _cbCtx):
if not cbFun:
return
if errorIndication:
cbFun(errorIndication, v2c.Integer(0), v2c.Integer(0), None,
cbCtx=cbCtx, snmpDispatcher=snmpDispatcher, stateHandle=stateHandle)
return
errorStatus = v2c.apiTrapPDU.getErrorStatus(rspPdu)
errorIndex = v2c.apiTrapPDU.getErrorIndex(rspPdu)
varBinds = v2c.apiTrapPDU.getVarBinds(rspPdu)
if lookupMib:
varBinds = VB_PROCESSOR.unmakeVarBinds(snmpDispatcher.cache, varBinds)
nextStateHandle = v2c.getNextRequestID()
nextVarBinds = cbFun(errorIndication, errorStatus, errorIndex, varBinds,
cbCtx=cbCtx,
snmpDispatcher=snmpDispatcher,
stateHandle=stateHandle,
nextStateHandle=nextStateHandle)
if not nextVarBinds:
return
v2c.apiTrapPDU.setRequestID(reqPdu, nextStateHandle)
v2c.apiTrapPDU.setVarBinds(reqPdu, _ensureVarBinds(nextVarBinds))
return snmpDispatcher.sendPdu(authData, transportTarget, reqPdu, cbFun=_cbFun)
lookupMib, cbFun, cbCtx = [options.get(x) for x in ('lookupMib', 'cbFun', 'cbCtx')]
if lookupMib:
varBinds = VB_PROCESSOR.makeVarBinds(snmpDispatcher.cache, varBinds)
if notifyType == 'trap':
reqPdu = v2c.TrapPDU()
else:
reqPdu = v2c.InformRequestPDU()
v2c.apiTrapPDU.setDefaults(reqPdu)
v2c.apiTrapPDU.setVarBinds(reqPdu, varBinds)
varBinds = v2c.apiTrapPDU.getVarBinds(reqPdu)
v2c.apiTrapPDU.setVarBinds(reqPdu, _ensureVarBinds(varBinds))
if authData.mpModel == 0:
reqPdu = rfc2576.v2ToV1(reqPdu)
return snmpDispatcher.sendPdu(authData, transportTarget, reqPdu, cbFun=_cbFun)
| 36.929577 | 147 | 0.65281 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4,505 | 0.572718 |
808dfedaabff9b3164170b6fb76d7eb702131fc0 | 2,066 | py | Python | Temperatura.py | russeleiser/ProjectTemperature | d1b07fe253f2e916a23fbd94471e2e053eac4f64 | [
"Apache-2.0"
] | null | null | null | Temperatura.py | russeleiser/ProjectTemperature | d1b07fe253f2e916a23fbd94471e2e053eac4f64 | [
"Apache-2.0"
] | null | null | null | Temperatura.py | russeleiser/ProjectTemperature | d1b07fe253f2e916a23fbd94471e2e053eac4f64 | [
"Apache-2.0"
] | null | null | null | print ("--------------------------------------------------------")
print ("-------------Bienvenido a la Temperatura----------------")
print ("--------------------------------------------------------\n")
x = 0
while x < 10000:
print ("Elige tus opciones:")
print ("(1) Convertir de °C a °F")
print ("(2) Convertir de °C a °K")
print ("(3) Convertir de °F a °C")
print ("(4) Convertir de °F a °K")
print ("(5) Convertir de °K a °C")
print ("(6) Convertir de °K a °F\n")
op = int(input("Elige una opcion de acuerdo a su numero: "))
if op ==1:
r11 = int(input("Escriba el numero que quiere convertir: "))
var11 = r11
var12 = var11 * 1.8
var13 = var12 + 32
print ("°F =", var11, "x 1.8 + 32\n°F =", var12,"+ 32\n°F =", var13,"\n")
elif op ==2:
r12 = int(input("Escriba el numero que quiere convertir: "))
var21 = r12
var22 = var21 + 273
print ("°K =", var21, "-273\n°K =", var22,"\n")
elif op ==3:
r13 = int(input("Escriba el numero que quiere convertir: "))
var31 = r13
var32 = var31 - 32
var33 = var32 / 1.8
print ("°C = (", var31,"- 32 ) / 1.8\n°C =", var32, "/ 1.8\n°C =", var33, "\n")
elif op ==4:
r14 = int(input("Escriba el numero que quiere convertir: "))
var41 = r14
var42 = var41 - 32
bla = 0.5 * var42
var43 = bla + 273
print ("°K = 5/9 (", var41,"- 32 ) + 273\n°K = 5/9", var42, "+ 273\n°K = 0.5 x", var42,"+ 273\n°K =",var43, "\n")
elif op ==5:
r15 = int(input("Escriba el numero que quiere convertir: "))
var51 = r15
var52 = var51 - 273
print ("El resultado es: ", var52)
elif op ==6:
r16 = int(input("Escriba el numero que quiere convertir: "))
var61 = r16
var62 = var61 - 273
num = 1.8 * var62
var63 = num + 32
print ("El resultado es: ", var63)
else:
print ("Esta opcion no existe, ingrese una nueva opcion...")
x = x + 1
| 31.784615 | 121 | 0.466602 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 943 | 0.451196 |
808e7027c492b712a045440e3d09d2fd697f8aca | 1,875 | py | Python | scripts/pretrain_n_times.py | gabrielasuchopar/info-nas | a786015c9d876d73d24dc6e7e07b36fb625f8861 | [
"Apache-2.0"
] | null | null | null | scripts/pretrain_n_times.py | gabrielasuchopar/info-nas | a786015c9d876d73d24dc6e7e07b36fb625f8861 | [
"Apache-2.0"
] | null | null | null | scripts/pretrain_n_times.py | gabrielasuchopar/info-nas | a786015c9d876d73d24dc6e7e07b36fb625f8861 | [
"Apache-2.0"
] | null | null | null | import random
import click
import json
import os
import numpy as np
import torch
from info_nas.config import local_dataset_cfg, load_json_cfg
from info_nas.datasets.networks.pretrained import pretrain_network_dataset
from nasbench import api
from nasbench_pytorch.datasets.cifar10 import prepare_dataset
from scripts.utils import mkdir_if_not_exists
@click.command()
@click.argument('hash')
@click.option('--nasbench_path', default='../data/nasbench_only108.tfrecord')
@click.option('--config_path', default='../configs/pretrain_config.json')
@click.option('--out_dir', default='.')
@click.option('--root', default='../data/cifar/')
@click.option('--seed', default=1)
@click.option('--n_seeds', default=10)
@click.option('--device', default='cuda')
def main(hash, nasbench_path, config_path, out_dir, root, seed, n_seeds, device):
device = torch.device(device)
out_dir = os.path.join(out_dir, f"out_{hash}/")
mkdir_if_not_exists(out_dir)
# pretrain
if not len(config_path) or config_path is None:
config = local_dataset_cfg
else:
config = load_json_cfg(config_path)
# save config for reference
config_name = os.path.basename(config_path) if config_path is not None else 'config.json'
with open(os.path.join(out_dir, config_name), 'w+') as f:
json.dump(config, f, indent=' ')
nasbench = api.NASBench(nasbench_path)
random.seed(seed)
torch.manual_seed(seed)
dataset = prepare_dataset(root=root, random_state=seed, **config['cifar-10'])
for i in range(n_seeds):
np.random.seed(i)
torch.manual_seed(i)
random.seed(i)
out_path_i = os.path.join(out_dir, str(i))
pretrain_network_dataset([hash], nasbench, dataset, device=device, dir_path=out_path_i,
**config['pretrain'])
if __name__ == "__main__":
main()
| 30.241935 | 95 | 0.6992 | 0 | 0 | 0 | 0 | 1,479 | 0.7888 | 0 | 0 | 283 | 0.150933 |
808f116d87b2c8fed3cb8bf2929ebbf77954dd8e | 5,574 | py | Python | python/dazl/pretty/_module_builder.py | digital-asset/dazl-client | 5d54edaea26d7704cc8d73e5945b37ed2806265b | [
"Apache-2.0"
] | 8 | 2019-09-08T09:41:03.000Z | 2022-02-19T12:54:30.000Z | python/dazl/pretty/_module_builder.py | digital-asset/dazl-client | 5d54edaea26d7704cc8d73e5945b37ed2806265b | [
"Apache-2.0"
] | 55 | 2019-05-30T23:00:31.000Z | 2022-01-24T01:51:32.000Z | python/dazl/pretty/_module_builder.py | digital-asset/dazl-client | 5d54edaea26d7704cc8d73e5945b37ed2806265b | [
"Apache-2.0"
] | 9 | 2019-06-30T18:15:27.000Z | 2021-12-03T10:15:27.000Z | # Copyright (c) 2017-2021 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
from typing import Dict, List, Sequence
from ..damlast.daml_lf_1 import DottedName, Module
from ..damlast.util import module_name, package_ref
from ..model.types import ModuleRef
class ModuleHierarchy:
START = "START"
ITEM = "ITEM"
END = "END"
def __init__(self, package_id, module_name: "Sequence[str]" = ()):
self.ref = ModuleRef(package_id, DottedName(module_name))
self._items = dict() # type: Dict[str, ModuleHierarchy]
self._modules = list() # type: List[Module]
def __getitem__(self, item: str):
"""
Retrieve the sub-:class:`ModuleBuilder` prefixed by a name.
:param item:
:return:
"""
mb = self._items.get(item)
if mb is None:
mn = module_name(self.ref)
# TODO: Revisit in dazl 7.0.0 when the internal structures of ModuleRef and DottedName change
mb = ModuleHierarchy(
package_ref(self.ref), (f"{mn}.{item}" if mn else str(mn)).split(".")
)
self._items[item] = mb
return mb
def add_module(self, module: "Module") -> None:
obj = self
components = str(module.name).split(".")
for component in components:
obj = obj[component]
obj._modules.append(module)
def __iter__(self):
yield ModuleHierarchy.START, self.ref, None
for child in self._items.values():
yield from child
for mod in self._modules:
yield ModuleHierarchy.ITEM, self.ref, mod
yield ModuleHierarchy.END, self.ref, None
# def lines(self, depth: int = 0) -> Generator[str, None, None]:
# prefix = (depth * 4) * ' '
# for key, mb in self._items.items():
# yield ''
# yield ''
# yield f'{prefix}class {key}:'
# for leaf, template in mb._leaves.items():
# template_fn = template.data_type.name.full_name
# slot_names = tuple(name for name, _ in template.data_type.named_args)
#
# yield f'{prefix} class {leaf}(metaclass=TemplateMeta, template_name={template_fn!r}):'
# yield f'{prefix} """'
# yield f'{prefix} Example usage:'
# yield f'{prefix} create({template_fn},'
# yield f'{prefix} {python_example_object(self.store, template.data_type)}'
# yield f'{prefix} )'
# for choice in template.choices:
# yield f'{prefix} exercise(cid, {choice.name!r}, {python_example_object(self.store, choice.data_type)})'
# yield f'{prefix} """'
# yield f'{prefix} __slots__ = {slot_names!r}'
# yield f'{prefix}'
# if slot_names:
# yield f'{prefix} def __init__(self, {", ".join(slot_names)}):'
# for slot_name in slot_names:
# yield f'{prefix} self.{slot_name} = {slot_name}'
# yield ''
# yield f'{prefix} def _asdict(self) -> dict:'
# yield f'{prefix} return {{'
# for slot_name in slot_names:
# yield f'{prefix} {slot_name!r}: self.{slot_name},'
# yield f'{prefix} }}'
# else:
# yield f'{prefix} def _asdict(self) -> dict:'
# yield f'{prefix} return {{}}'
#
# for choice in template.choices:
# choice_data_type = self.store.resolve_type_reference(choice.data_type) if isinstance(choice.data_type, TypeReference) else choice.data_type
# choice_slot_names = tuple(name for name, _ in choice_data_type.named_args) if isinstance(choice_data_type, RecordType) else ()
# yield ''
# yield f'{prefix} class {choice.name}(metaclass=ChoiceMeta, template_name={template_fn!r}, choice_name={choice.name!r}):'
# yield f'{prefix} __slots__ = {choice_slot_names!r}'
#
# if choice_slot_names:
# yield ''
# yield f'{prefix} def __init__(self, {", ".join(choice_slot_names)}):'
# for choice_slot_name in choice_slot_names:
# yield f'{prefix} self.{choice_slot_name} = {choice_slot_name}'
# yield ''
# yield f'{prefix} def _asdict(self) -> dict:'
# yield f'{prefix} return {{'
# for choice_slot_name in choice_slot_names:
# yield f'{prefix} {choice_slot_name!r}: self.{choice_slot_name},'
# yield f'{prefix} }}'
# else:
# yield f'{prefix} def _asdict(self) -> dict:'
# yield f'{prefix} return {{}}'
# yield f''
# for line in mb.lines(depth + 1):
# yield line
def __repr__(self):
return f"ModuleBuilder(ref={self.ref}, items={self._items})"
| 48.469565 | 161 | 0.502512 | 5,248 | 0.941514 | 281 | 0.050413 | 0 | 0 | 0 | 0 | 3,974 | 0.712953 |
808f6c2c11d842f902a2f33f31bb54427134e845 | 2,097 | py | Python | test/Test.py | AmeCode/DyMatByJoerg-Raedler | 9adb99ad6baa12b9f6c3bdb9a5fe76720f4c2de0 | [
"BSD-2-Clause"
] | 8 | 2016-05-18T16:08:07.000Z | 2019-12-09T15:58:07.000Z | test/Test.py | AmeCode/DyMatByJoerg-Raedler | 9adb99ad6baa12b9f6c3bdb9a5fe76720f4c2de0 | [
"BSD-2-Clause"
] | 1 | 2021-11-16T17:06:19.000Z | 2021-11-18T13:49:30.000Z | test/Test.py | AmeCode/DyMatByJoerg-Raedler | 9adb99ad6baa12b9f6c3bdb9a5fe76720f4c2de0 | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/env python
# Copyright (c) 2011, Joerg Raedler (Berlin, Germany)
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this list
# of conditions and the following disclaimer. Redistributions in binary form must
# reproduce the above copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import DyMat, DyMat.Export, random
files = ('DoublePendulum_Dymola-7.4.mat',
'DoublePendulum_OpenModelica-1.8.mat',
'DoublePendulum_Dymola-2012.mat',
'DoublePendulum_Dymola-2012-SaveAs.mat',
'DoublePendulum_Dymola-2012-SaveAsPlotted.mat')
formats = DyMat.Export.formats.keys()
for fi in files:
# open file
df = DyMat.DyMatFile(fi)
# pick a maximum of 30 random variable names
n = df.names()
x = min(len(n), 30)
va = random.sample(df.names(), x)
print(va)
# do export
for fo in formats:
print('Exporting %s to %s' % (fi, fo))
try:
DyMat.Export.export(fo, df, va)
except Exception as e:
print(e)
| 38.833333 | 82 | 0.723891 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,614 | 0.769671 |
8090b7d8a4222966bc351912bad35d98958fbb28 | 11,473 | py | Python | httptesting/main.py | HttpTesting/pyhttp | 5fd9deddbd920b905297ec64215d502d41d48d2c | [
"Apache-2.0"
] | 6 | 2019-08-09T06:50:50.000Z | 2021-01-29T02:13:58.000Z | httptesting/main.py | HttpTesting/pyhttp | 5fd9deddbd920b905297ec64215d502d41d48d2c | [
"Apache-2.0"
] | 1 | 2019-08-16T04:58:43.000Z | 2019-08-16T04:58:43.000Z | httptesting/main.py | HttpTesting/pyhttp | 5fd9deddbd920b905297ec64215d502d41d48d2c | [
"Apache-2.0"
] | 3 | 2020-11-26T00:58:22.000Z | 2021-02-21T14:46:39.000Z | # ########################################################
# 将根目录加入sys.path中,解决命令行找不到包的问题
import sys
import os
curPath = os.path.abspath(os.path.dirname(__file__))
rootPath = os.path.split(curPath)[0]
sys.path.insert(0, rootPath)
# ########################################################
import shutil
import time
from httptesting.library import gl
from httptesting.library import scripts
from httptesting.library.parse import parse_string_value
from httptesting.library.scripts import (write_file,
read_file,
remove_file,
update_yam_content
)
from httptesting.library.config import conf
from httptesting.library.emailstmp import EmailClass
from httptesting.library.falsework import create_falsework
from httptesting.library.har import ConvertHarToYAML
from httptesting import __version__
import argparse
########################################################################
"""
Command line mode.
"""
def _parse_config(config):
"""
Parse config parameters.
"""
if config:
# Setting global var.
if config[0] == 'set' and config.__len__() == 1:
try:
os.system(gl.configFile)
except (KeyboardInterrupt, SystemExit):
print("已终止执行.")
elif config[0] == 'set' and len(config) == 2 and '=' in config[1]:
cf = config[1].split("=")
update_yam_content(gl.configFile, cf[0], parse_string_value(cf[1]))
elif config[0] == 'get' and len(config) == 2 and '=' not in config[1]:
content = conf.get_yaml_field(gl.configFile)
try:
print(content[config[1]])
except KeyError as ex:
print('[KeyError]: {}'.format(ex))
else:
print('Unknown command: {}'.format(config[0]))
def _convert_case_to_yaml(vert):
"""
Convert case to YAML.
"""
if vert:
yamlfile = os.path.join(os.getcwd(), str(vert).strip())
scripts.generate_case_tmpl(yamlfile)
def _convert_httphar_to_yaml(har):
"""
Convert http.har to YAML.
"""
if har:
temp_dict = ConvertHarToYAML.convert_har_to_ht(har)
ConvertHarToYAML.write_case_to_yaml('', temp_dict)
def _false_work(start_project):
"""
False work.
"""
if start_project:
create_falsework(os.path.join(os.getcwd(), start_project))
def _get_file_yaml(case_file):
"""
Get file case YAML.
"""
temp_list = []
# Get the yaml file name and write to the queue.
if case_file:
# Specify the execution CASE.
fargs = '&#'.join(case_file)
temp_list.append(os.path.join(os.getcwd(), fargs))
cache_dir = os.path.join(gl.loadcasePath, ".am_cache")
write_file(
os.path.join(cache_dir, 'yaml.cache'),
'w',
';'.join(temp_list)
)
return True
return False
def _get_dirs_case_yaml(case_dir):
"""
Get dirs case YAML.
"""
temp_list = []
if case_dir:
for root, dirs, files in os.walk(case_dir):
for f in files:
if 'yaml' in f:
d = os.path.join(os.getcwd(), case_dir)
temp_list.append(os.path.join(d, f))
# 缓存目录
cache_dir = os.path.join(gl.loadcasePath, ".am_cache")
# Write file absolute path to file.
write_file(
os.path.join(cache_dir, 'yaml.cache'),
'w',
';'.join(temp_list)
)
return True
return False
def run_min():
"""
Perfrom interface test entry.
Args: None
Usage:
Command line execution.
Return:
There is no return.
"""
# Takes the current path of the command line
cur_dir = os.getcwd()
os.chdir(cur_dir)
parse = argparse.ArgumentParser(
description='httptesting HTTP(s) interface testing framework.',
prog='httptesting'
)
parse.add_argument(
"-v",
"--version",
action='version',
version="%(prog)s {}".format(__version__),
help='Framework version.'
)
parse.add_argument(
"-f",
"--file",
nargs='+',
default='',
help='The file path; File absolute or relative path.'
)
parse.add_argument(
"-d",
"--dir",
default='',
help='The folder path; folder absolute or relative path.'
)
parse.add_argument(
"-sp",
"--startproject",
default='',
help='Generate test case templates.'
)
parse.add_argument(
"-conf",
"--config",
nargs="+",
default='',
help='Basic setting of framework.'
)
parse.add_argument(
"-har",
default='',
help='Convert the har files to YAML. har file is *.har'
)
parse.add_argument(
"-c",
"--convert",
default='',
help='Convert the har files to YAML. YAML file is *.yaml'
)
# Command line arguments are assigned to varibales.
args = parse.parse_args()
case_file = args.file
case_dir = args.dir
start_project = args.startproject
config = args.config
har = args.har
vert = args.convert
# convert YAML.
_convert_case_to_yaml(vert)
# Convert har files to YAML.
_convert_httphar_to_yaml(har)
# Setting global var.
_parse_config(config)
# False work.
_false_work(start_project)
# Write file absolute path to file.
# Get the yaml file name and write to the queue.
_get_file_yaml(case_file)
_get_dirs_case_yaml(case_dir)
# Began to call.
RunTestCase.invoke()
#########################################################################
# Not in command mode --dir defaults to the testcase directory.
# Example:
# python3 main.py --dir=r"D:\test_project\project\cloud_fi_v2\testcase"
#########################################################################
class RunTestCase(object):
@classmethod
def create_report_file(cls):
cls.file_name = 'report.html'
report_dir = os.path.join(
os.path.join(os.getcwd(), 'report'),
time.strftime('%Y%m%d_%H%M%S', time.localtime())
)
# 按日期创建测试报告文件夹
if not os.path.exists(report_dir):
# os.mkdir(report_dir)
os.makedirs(report_dir)
# 确定生成报告的路径
cls.filePath = os.path.join(report_dir, cls.file_name)
return cls.filePath
@staticmethod
def copy_custom_function():
# 自定义函数功能
func = os.path.join(os.getcwd(), 'extfunc.py')
target = os.path.join(gl.loadcasePath, 'extfunc.py')
if os.path.exists(func):
shutil.copy(func, target)
@staticmethod
def tmpl_msg(low_path):
# 发送钉钉模版测试结果
config = conf.get_yaml_field(gl.configFile)
# report外网发布地址ip+port
report_url = config['REPORT_URL']
# 钉钉标题
content = config['DING_TITLE']
# 从报告中取得测试结果数据 e.g. 3 tests; 2.23 seconds; 3 passed; 0 failed; 0 errors
file_result = os.path.join(gl.loadcasePath, 'result.cache')
#
result_content = read_file(file_result, 'r')
# Remove file
remove_file(file_result)
res_list = result_content.split(";")
# 发送钉钉消息
msg = """{}执行【已完成】:\n共{}个用例, 执行耗时{}秒, 通过{}, 失败{}, 错误{}, 通过率{}\n测试报告: {}/{}"""
msg = msg.format(content, res_list[0], res_list[1],
res_list[2], res_list[3], res_list[4],
res_list[5], report_url, low_path)
return msg
@staticmethod
def run(path):
"""
Execute the test and generate the test report file.
Args:
path: Report file absolute path.
Return:
There is no return.
"""
config = conf.get_yaml_field(gl.configFile)
exe_con = config['ENABLE_EXECUTION']
exe_num = config['EXECUTION_NUM']
rerun = config['ENABLE_RERUN']
reruns_nums = config['RERUN_NUM']
repeat = config['ENABLE_REPEAT']
repeat_num = config['REPEAT_NUM']
exec_mode = config['ENABLE_EXEC_MODE']
debug_mode = config['ENABLE_DEBUG_MODE']
last_failed = config['ENABLE_LAST_FAILED']
failed_first = config['ENABLE_FAILED_FIRST']
# custom function
RunTestCase.copy_custom_function()
# failed first
failed_first_args = (' --ff ' if failed_first else '') if not last_failed else ''
# last failed
last_failed_args = (' --lf ' if last_failed else '') if not failed_first else ''
# Enable repeat case.
repeat_args = ' --count={} '.format(repeat_num) if repeat else ''
# Enable CPU concurrency
py_args = ' -n {} '.format(exe_num) if exe_con else ''
# Enable failed retry
reruns_args = ' --reruns {} '.format(reruns_nums) if rerun else ''
# debug mode print debug info.
debug = '' if debug_mode else '--tb=no'
"""
Load the pytest framework,
which must be written here or DDT will be loaded first.
from httptesting.case import test_load_case
"""
case_path = gl.loadcasePath
# Output mode console or report.
if exec_mode:
cmd = 'cd {} && py.test -q -s {} {} {} {}'.format(
case_path, reruns_args, 'test_load_case.py',
repeat_args, debug
)
else:
cmd = 'cd {} && py.test {} {} {} {} {} {} --html={} {} --self-contained-html'.format(
case_path,
py_args,
reruns_args,
last_failed_args,
failed_first_args,
'test_load_case.py',
repeat_args,
path,
debug
)
try:
os.system(cmd)
except (KeyboardInterrupt, SystemExit):
print('已终止执行.')
@staticmethod
def invoke():
"""
Start executing tests generate test reports.
:return: There is no.
"""
# CONFIG: Read configuration information
config = conf.get_yaml_field(gl.configFile)
dd_enable = config['ENABLE_DDING']
dd_token = config['DD_TOKEN']
dd_url = config['DING_URL']
email_enable = config['EMAIL_ENABLE']
# END CONFIG.
# Test report file name.
time_str = time.strftime('%Y%m%d_%H%M%S', time.localtime())
path = RunTestCase.create_report_file()
# Start test the send pin message.
if dd_enable:
scripts.send_msg_dding(
'{}:★开始API接口自动化测试★'.format(time_str),
dd_token,
dd_url
)
# Execute the test and send the test report.
RunTestCase.run(path)
if dd_enable:
# Template message.
dir_list = path.split('\\')
low_path = dir_list[len(dir_list) - 2]
msg = RunTestCase.tmpl_msg(low_path)
print(msg)
scripts.send_msg_dding(msg, dd_token, dd_url)
if email_enable:
# Send test report to EMAIL.
email = EmailClass()
email.send(path)
if __name__ == "__main__":
run_min()
| 29.04557 | 97 | 0.546239 | 5,525 | 0.470173 | 0 | 0 | 5,469 | 0.465407 | 0 | 0 | 3,876 | 0.329844 |
8094ccd0dc06024e1e4323bc6b4fa0cfcef4fd31 | 346 | py | Python | setup.py | smok-serwis/longshot-python | 9671d60d77e12d2cb6bc2530d05f55d4bafa8e66 | [
"MIT"
] | null | null | null | setup.py | smok-serwis/longshot-python | 9671d60d77e12d2cb6bc2530d05f55d4bafa8e66 | [
"MIT"
] | null | null | null | setup.py | smok-serwis/longshot-python | 9671d60d77e12d2cb6bc2530d05f55d4bafa8e66 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
from distutils.core import setup
setup(name='longshot',
version='0.1alpha',
description='SMOK client connectivity library',
author='smok-serwis.pl',
author_email='admin@smok.co',
url='https://github.com/smok-serwis/longshot-python',
packages=['longshot', 'longshot.persistence'],
) | 28.833333 | 59 | 0.66763 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 186 | 0.537572 |
80959fb4e1855167032241c2b29b45d563eacafd | 1,799 | py | Python | users/tests/test_delete_user.py | tgamauf/spritstat | 849526ec8dec46c57194d50ff3b32c16d0cb684a | [
"MIT"
] | 1 | 2022-01-30T10:50:14.000Z | 2022-01-30T10:50:14.000Z | users/tests/test_delete_user.py | tgamauf/spritstat | 849526ec8dec46c57194d50ff3b32c16d0cb684a | [
"MIT"
] | 47 | 2022-02-02T22:07:28.000Z | 2022-03-30T13:53:37.000Z | users/tests/test_delete_user.py | tgamauf/spritstat | 849526ec8dec46c57194d50ff3b32c16d0cb684a | [
"MIT"
] | null | null | null | from django.urls import reverse
from rest_framework import status
from rest_framework.test import APITestCase
from users.models import CustomUser
class TestDeleteUser(APITestCase):
fixtures = ["user.json"]
url: str
user: CustomUser
@classmethod
def setUpTestData(cls):
cls.url = reverse("account_delete")
cls.user = CustomUser.objects.get(email="test@test.at")
def setUp(self):
if not self.id().endswith("_not_logged_in"):
self.client.login(username=self.user.email, password="test")
def test_ok(self):
response = self.client.delete(self.url)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
with self.assertRaisesMessage(
CustomUser.DoesNotExist, "CustomUser matching query does not exist."
):
CustomUser.objects.get(id=self.user.id)
def test_not_logged_in(self):
response = self.client.delete(self.url)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
# Let's just try to get the user, which would fail if it is in fact
# deleted.
CustomUser.objects.get(id=self.user.id)
def test_get(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def test_post(self):
response = self.client.post(self.url)
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def test_put(self):
response = self.client.put(self.url)
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def test_patch(self):
response = self.client.patch(self.url)
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
| 33.943396 | 82 | 0.696498 | 1,649 | 0.91662 | 0 | 0 | 148 | 0.082268 | 0 | 0 | 184 | 0.102279 |
8095bb749718d47f183223964eb45c85dd3110bd | 1,323 | py | Python | sphinxcontrib/needs/builder.py | tlovett/sphinxcontrib-needs | 41794403266deb6a4f7ec07bb8297abb0ddc57b1 | [
"MIT"
] | null | null | null | sphinxcontrib/needs/builder.py | tlovett/sphinxcontrib-needs | 41794403266deb6a4f7ec07bb8297abb0ddc57b1 | [
"MIT"
] | null | null | null | sphinxcontrib/needs/builder.py | tlovett/sphinxcontrib-needs | 41794403266deb6a4f7ec07bb8297abb0ddc57b1 | [
"MIT"
] | null | null | null | from sphinx.builders import Builder
from sphinxcontrib.needs.utils import NeedsList
import sphinx
from pkg_resources import parse_version
sphinx_version = sphinx.__version__
if parse_version(sphinx_version) >= parse_version("1.6"):
from sphinx.util import logging
else:
import logging
class NeedsBuilder(Builder):
name = 'needs'
format = 'json'
file_suffix = '.txt'
links_suffix = None
def write_doc(self, docname, doctree):
pass
def finish(self):
log = logging.getLogger(__name__)
needs = self.env.need_all_needs
config = self.env.config
version = config.version
needs_list = NeedsList(config, self.outdir, self.confdir)
needs_list.load_json()
for key, need in needs.items():
needs_list.add_need(version, need)
try:
needs_list.write_json()
except Exception as e:
log.error("Error during writing json file: {0}".format(e))
else:
log.info("Needs successfully exported")
def get_outdated_docs(self):
return ""
def prepare_writing(self, docnames):
pass
def write_doc_serialized(self, docname, doctree):
pass
def cleanup(self):
pass
def get_target_uri(self, docname, typ=None):
return ""
| 24.962264 | 70 | 0.646259 | 1,027 | 0.776266 | 0 | 0 | 0 | 0 | 0 | 0 | 94 | 0.071051 |
80969674b8cc9162e56c575ce0b3e77de0054708 | 194 | py | Python | coding/exceptions.py | ffaristocrat/coding | 5017ddba4b1b1e180f012bc36608e1e6b30b0447 | [
"MIT"
] | null | null | null | coding/exceptions.py | ffaristocrat/coding | 5017ddba4b1b1e180f012bc36608e1e6b30b0447 | [
"MIT"
] | null | null | null | coding/exceptions.py | ffaristocrat/coding | 5017ddba4b1b1e180f012bc36608e1e6b30b0447 | [
"MIT"
] | null | null | null | class GameException(Exception):
pass
class GameFlowException(GameException):
pass
class EndProgram(GameFlowException):
pass
class InvalidPlayException(GameException):
pass
| 12.933333 | 42 | 0.762887 | 184 | 0.948454 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
8097ac06bd41972aee8ea1d14c4c9dcc3e93c68f | 3,976 | py | Python | usercodex/plugins/powertools.py | ipindanger/Codex-z | 1cedebd4352e4adb914b40a219bbda752c9b39d7 | [
"BSD-3-Clause"
] | 2 | 2021-08-30T05:44:14.000Z | 2021-09-13T06:04:21.000Z | usercodex/plugins/powertools.py | ipindanger/Codex-z | 1cedebd4352e4adb914b40a219bbda752c9b39d7 | [
"BSD-3-Clause"
] | null | null | null | usercodex/plugins/powertools.py | ipindanger/Codex-z | 1cedebd4352e4adb914b40a219bbda752c9b39d7 | [
"BSD-3-Clause"
] | 1 | 2021-09-26T13:17:29.000Z | 2021-09-26T13:17:29.000Z | import sys
from asyncio.exceptions import CancelledError
from time import sleep
from usercodex import codex
from ..core.logger import logging
from ..core.managers import edit_or_reply
from ..sql_helper.global_collection import (
add_to_collectionlist,
del_keyword_collectionlist,
get_collectionlist_items,
)
from ..sql_helper.globals import addgvar, delgvar, gvarstatus
from . import BOTLOG, BOTLOG_CHATID, HEROKU_APP
LOGS = logging.getLogger(__name__)
plugin_category = "tools"
@codex.cod_cmd(
pattern="restart$",
command=("restart", plugin_category),
info={
"header": "Restarts the bot !!",
"usage": "{tr}restart",
},
disable_errors=True,
)
async def _(event):
"Restarts the bot !!"
if BOTLOG:
await event.client.send_message(BOTLOG_CHATID, "#RESTART \n" "Bot Restarted")
xedoc = await edit_or_reply(
event,
"Restarted. `.ping` me or `.help` to check if I am online, actually it takes 1-2 min for restarting",
)
try:
ulist = get_collectionlist_items()
for i in ulist:
if i == "restart_update":
del_keyword_collectionlist("restart_update")
except Exception as e:
LOGS.error(e)
try:
add_to_collectionlist("restart_update", [xedoc.chat_id, xedoc.id])
except Exception as e:
LOGS.error(e)
try:
delgvar("ipaddress")
await codex.disconnect()
except CancelledError:
pass
except Exception as e:
LOGS.error(e)
@codex.cod_cmd(
pattern="shutdown$",
command=("shutdown", plugin_category),
info={
"header": "Shutdowns the bot !!",
"description": "To turn off the dyno of heroku. you cant turn on by bot you need to got to heroku and turn on or use @hk_heroku_bot",
"usage": "{tr}shutdown",
},
)
async def _(event):
"Shutdowns the bot"
if BOTLOG:
await event.client.send_message(BOTLOG_CHATID, "#SHUTDOWN \n" "Bot shut down")
await edit_or_reply(event, "`Turning off bot now ...Manually turn me on later`")
if HEROKU_APP is not None:
HEROKU_APP.process_formation()["worker"].scale(0)
else:
sys.exit(0)
@codex.cod_cmd(
pattern="sleep( [0-9]+)?$",
command=("sleep", plugin_category),
info={
"header": "Userbot will stop working for the mentioned time.",
"usage": "{tr}sleep <seconds>",
"examples": "{tr}sleep 60",
},
)
async def _(event):
"To sleep the userbot"
if " " not in event.pattern_match.group(1):
return await edit_or_reply(event, "Syntax: `.sleep time`")
counter = int(event.pattern_match.group(1))
if BOTLOG:
await event.client.send_message(
BOTLOG_CHATID,
"You put the bot to sleep for " + str(counter) + " seconds",
)
event = await edit_or_reply(event, f"`ok, let me sleep for {counter} seconds`")
sleep(counter)
await event.edit("`OK, I'm awake now.`")
@codex.cod_cmd(
pattern="notify (on|off)$",
command=("notify", plugin_category),
info={
"header": "To update the your chat after restart or reload .",
"description": "Will send the ping cmd as reply to the previous last msg of (restart/reload/update cmds).",
"usage": [
"{tr}notify <on/off>",
],
},
)
async def set_pmlog(event):
"To update the your chat after restart or reload ."
input_str = event.pattern_match.group(1)
if input_str == "off":
if gvarstatus("restartupdate") is None:
return await edit_delete(event, "__Notify was already disabled__")
delgvar("restartupdate")
return await edit_or_reply(event, "__Notify was disabled successfully.__")
if gvarstatus("restartupdate") is None:
addgvar("restartupdate", "turn-oned")
return await edit_or_reply(event, "__Notify was enabled successfully.__")
await edit_delete(event, "__Notify was already enabled.__")
| 31.808 | 141 | 0.642354 | 0 | 0 | 0 | 0 | 3,470 | 0.872736 | 2,333 | 0.586771 | 1,370 | 0.344567 |
8098c90d9973448cc9044622db68c84654d5a0ef | 7,309 | py | Python | MC_vis_histogram.py | FloweryK/Neutrino-vertex-reconstruction | 29650df90a4a82a27235d250a9213df5d2bbf4ec | [
"Apache-2.0"
] | 2 | 2020-01-28T20:07:44.000Z | 2020-04-16T16:31:45.000Z | MC_vis_histogram.py | FloweryK/vertex-reconstruction | 29650df90a4a82a27235d250a9213df5d2bbf4ec | [
"Apache-2.0"
] | 1 | 2022-03-12T00:11:12.000Z | 2022-03-12T00:11:12.000Z | MC_vis_histogram.py | FloweryK/Neutrino-vertex-reconstruction | 29650df90a4a82a27235d250a9213df5d2bbf4ec | [
"Apache-2.0"
] | 1 | 2020-04-16T16:32:00.000Z | 2020-04-16T16:32:00.000Z | from utils import load, save, path_list, DEAD_PMTS
import nets
import torch
import numpy as np
import pandas as pd
from scipy import interpolate
import matplotlib.pyplot as plt
from matplotlib.ticker import PercentFormatter
from itertools import repeat
from multiprocessing import Pool
def neural_residual(root_dir):
# model selection
net_type = load(root_dir + '/configuration.json')['net_type']
if net_type == 'Net':
net = nets.Net()
elif net_type == 'Net2c':
net = nets.Net2c()
elif net_type == 'CNN1c':
net = nets.CNN1c()
elif net_type == 'CNN2c':
net = nets.CNN2c()
else:
print('invalide net type')
raise ValueError
# get the latest model for neural network
epoch_path = path_list(root_dir + '/models/')[-1]
model_path = path_list(epoch_path, filter='pt')[-1]
net.load_state_dict(torch.load(model_path, map_location=torch.device('cpu')))
# get inputs, labels, outputs and residuals
inputs = load(root_dir + '/test_inputs.tensor').float()
labels = load(root_dir + '/test_labels.tensor').float().numpy()
outputs = net(inputs).detach().cpu().clone().numpy()
residuals = (outputs - labels) * 1000
return residuals.T
def cwm_residual(root_dir):
try:
interp_r = load('src/interp_r')
interp_z = load('src/interp_z')
except FileNotFoundError:
with open('src/WeightingCorrection_att.dat', 'r') as f:
df = []
while True:
line = f.readline().split(' ')
line = list(filter(lambda a: a != '', line))
try:
line[3] = line[3][:-1]
except IndexError:
break
df.append(line)
df = pd.DataFrame(df, dtype=float)
# calculate interpolation
R = df[0]
Z = df[1]
weight_R = df[2]
weight_Z = df[3]
interp_r = interpolate.interp2d(R, Z, weight_R, kind='linear')
interp_z = interpolate.interp2d(R, Z, weight_Z, kind='linear')
save(interp_r, 'src/interp_r')
save(interp_z, 'src/interp_z')
pmt_positions = load('src/pmtcoordinates_ID.json')
testpaths = load(root_dir + '/testpaths.list')
# multiprocessing
p = Pool(processes=40)
residuals = []
total = len(testpaths)
for i in range(5):
print('getting cwm residuals... %i' % i)
paths_batch = testpaths[int(0.2 * i * total):int(0.2 * (i + 1) * total)]
residuals += p.starmap(__job, zip(paths_batch,
repeat(interp_r),
repeat(interp_z),
repeat(pmt_positions)
)
)
residuals = [r for r in residuals if r]
return np.array(residuals).T
def __job(path, interp_r, interp_z, pmt_positions):
f = load(path)
capture_time = f['capture_time'] # scalar value
hits = int(f['photon_hits']) # scalar value
hit_counts = f['hit_count'] # vector value
hit_pmts = f['hit_pmt'] # vector value
hit_time = f['hit_time'] # vector value
true_vertex = [f['positron_x'], f['positron_y'], f['positron_z']]
x = np.zeros(354)
for i in range(hits):
pmt = hit_pmts[i]
count = hit_counts[i]
t = hit_time[i]
if pmt in DEAD_PMTS:
continue
if t < capture_time:
x[pmt] += count
# if the entry is valid, reconstruct the vertex
if sum(x) > 0:
# calculate cwm vertex
reco_vertex = np.array([.0, .0, .0])
for pmt_id, hits in enumerate(x):
pmt_pos = pmt_positions[str(pmt_id)]
reco_vertex += hits * np.array([pmt_pos['x'], pmt_pos['y'], pmt_pos['z']], dtype=float)
# normalize
reco_vertex = reco_vertex / sum(x)
# correction 1
weight1r = interp_r(np.linalg.norm(reco_vertex[:2]), abs(reco_vertex[2]))
weight1z = interp_z(np.linalg.norm(reco_vertex[:2]), abs(reco_vertex[2]))
reco_vertex[:2] *= weight1r
reco_vertex[2] *= weight1z
# correction 2
weight2 = 0.8784552 - 0.0000242758 * np.linalg.norm(reco_vertex[:2])
reco_vertex *= weight2
return (reco_vertex - true_vertex).tolist()
else:
return False
def filter_nsigma(outputs, n):
ns, bins = np.histogram(outputs, bins=200)
peak = bins[np.argmax(ns)]
std = np.std(outputs)
return [output for output in outputs if (peak - n * std) < output < (peak + n * std)]
def main():
# control group
print('control group root: ')
control_root = str(input())
print('control group name:')
control_name = str(input())
# experimental group
print('# of experimental groups:')
ex_number = int(input())
print('experimental group roots (%i):' % ex_number)
ex_root = [str(input()) for _ in range(ex_number)]
print('experimental group names')
ex_names = []
for i in range(ex_number):
print('name for ' + ex_root[i])
ex_names.append(str(input()))
# get residuals
print('calculating residuals')
control_residual = cwm_residual(root_dir=control_root)
ex_residuals = [neural_residual(root_dir=ex_root[i]) for i in range(ex_number)]
# draw histograms
print('drawing histograms')
fig, axes = plt.subplots(1, 3, figsize=(14, 4))
for axis in range(3):
axes[axis].hist(control_residual[axis],
bins=200,
density=True,
histtype='step',
linestyle=':',
color='black',
label=control_name)
for i in range(ex_number):
axes[axis].hist(ex_residuals[i][axis],
bins=200,
density=True,
histtype='step',
label=ex_names[i])
# Text on filtered sigma
control_filtered_std = np.std(filter_nsigma(control_residual[axis], n=2))
ex_filtered_std = [np.std(filter_nsigma(ex_residuals[i][axis], n=2)) for i in range(ex_number)]
text_std = '$\\sigma_{%s}=%.1fmm$' % (control_name, control_filtered_std)
for i in range(ex_number):
text_std += '\n$\\sigma_{%s}=%.1fmm$' % (ex_names[i], ex_filtered_std[i])
axes[axis].text(200, 0.78/100,
text_std,
ha='left', va='top',
fontsize=8,
bbox=dict(boxstyle='square', fc='w'))
# axes properties
axis_name = ['x', 'y', 'z'][axis]
axes[axis].set_xlabel(r'$%s_{rec} - %s_{real} $ (mm)' % (axis_name, axis_name))
axes[axis].set_ylabel('portion')
axes[axis].yaxis.set_major_formatter(PercentFormatter(1))
axes[axis].set_xlim([-1000, 1000])
axes[axis].set_ylim([0, 0.8/100])
axes[axis].grid()
axes[axis].legend(fontsize=8, loc='upper left')
plt.tight_layout()
plt.savefig('MC_vis_histogram.png')
plt.close()
if __name__ == '__main__':
main()
| 32.340708 | 103 | 0.556848 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,222 | 0.167191 |
8099c71a5b46437ba6242c03e20101234e7abce7 | 1,481 | py | Python | py/annotate.py | Capstone-Projects-2019-Fall/TAG-Local | 088c4f36ff934b5d82fc470d1110f52f6d348070 | [
"MIT"
] | null | null | null | py/annotate.py | Capstone-Projects-2019-Fall/TAG-Local | 088c4f36ff934b5d82fc470d1110f52f6d348070 | [
"MIT"
] | 2 | 2021-01-28T20:25:45.000Z | 2021-05-10T19:38:59.000Z | py/annotate.py | Capstone-Projects-2019-Fall/TAG-Local | 088c4f36ff934b5d82fc470d1110f52f6d348070 | [
"MIT"
] | null | null | null | import json
import argparse
import sys
import spacy
class DocumentClass:
def __init__(self, title, text, annotations):
self.title = title
self.text = text
self.annotations = annotations
class AnnotationClass:
def __init__(self, label, start, end, content):
self.range = {'startPosition': start, 'endPosition': end}
self.content = content
self.label = label
def main(model, raw_data):
data = json.loads(raw_data)
nlp = spacy.load(model)
print("Loaded model from: '%s'" % model)
docs = []
for d in data:
doc = nlp(d['text'])
return_data = []
for ent in doc.ents:
annotation = AnnotationClass(ent.label_, ent.start_char, ent.end_char, ent.text)
print("Found entity: %s in %s" % (ent.text, d['title']))
sys.stdout.flush()
return_data.append(annotation.__dict__)
docs.append(DocumentClass(d['title'], d['text'], return_data).__dict__)
# print("Found %d entities", doc.ents.count)
with open('data.json', 'w') as outfile:
json.dump(docs, outfile)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--model',
help="path to ML model"
)
parser.add_argument(
'--raw_data',
help="Path to the data directory."
)
args = parser.parse_args()
print("args parsed")
print(args)
sys.stdout.flush()
main(args.model, args.raw_data)
| 25.982456 | 92 | 0.613774 | 359 | 0.242404 | 0 | 0 | 0 | 0 | 0 | 0 | 252 | 0.170155 |
809ae1f39dc9d7fac5d194516d317497caa9edc2 | 1,881 | py | Python | tests/model_test.py | jicewarwick/AShareData | 13c78602fe00a5326f421c8a8003f3889492e6dd | [
"MIT"
] | 30 | 2019-09-18T07:26:05.000Z | 2022-03-17T11:15:47.000Z | tests/model_test.py | jicewarwick/Tushare2MySQL | 13c78602fe00a5326f421c8a8003f3889492e6dd | [
"MIT"
] | 2 | 2019-12-11T02:45:58.000Z | 2020-12-21T10:41:43.000Z | tests/model_test.py | jicewarwick/Tushare2MySQL | 13c78602fe00a5326f421c8a8003f3889492e6dd | [
"MIT"
] | 9 | 2019-10-22T09:00:14.000Z | 2022-02-02T02:21:31.000Z | import datetime as dt
import unittest
from AShareData import set_global_config
from AShareData.model import *
class MyTestCase(unittest.TestCase):
def setUp(self) -> None:
set_global_config('config.json')
def test_something(self):
self.assertEqual(True, False)
@staticmethod
def test_FF3factor_return():
model = FamaFrench3FactorModel()
smb = SMBandHMLCompositor(model)
date = dt.datetime(2021, 3, 9)
pre_date = dt.datetime(2021, 3, 8)
pre_month_date = dt.datetime(2021, 2, 26)
smb.compute_factor_return(balance_date=pre_date, pre_date=pre_date, date=date,
rebalance_marker='D', period_marker='D')
smb.compute_factor_return(balance_date=pre_month_date, pre_date=pre_date, date=date,
rebalance_marker='M', period_marker='D')
smb.compute_factor_return(balance_date=pre_month_date, pre_date=pre_month_date, date=date,
rebalance_marker='M', period_marker='M')
@staticmethod
def test_FFC4_factor_return():
model = FamaFrenchCarhart4FactorModel()
umd = UMDCompositor(model)
date = dt.datetime(2021, 3, 9)
pre_date = dt.datetime(2021, 3, 8)
pre_month_date = dt.datetime(2021, 2, 26)
umd.compute_factor_return(balance_date=pre_date, pre_date=pre_date, date=date,
rebalance_marker='D', period_marker='D')
umd.compute_factor_return(balance_date=pre_month_date, pre_date=pre_date, date=date,
rebalance_marker='M', period_marker='D')
umd.compute_factor_return(balance_date=pre_month_date, pre_date=pre_month_date, date=date,
rebalance_marker='M', period_marker='M')
if __name__ == '__main__':
unittest.main()
| 40.891304 | 98 | 0.641148 | 1,718 | 0.913344 | 0 | 0 | 1,531 | 0.813929 | 0 | 0 | 59 | 0.031366 |
809afc40f2a771818d0d0cf20f2537175ded1cf9 | 5,962 | py | Python | quantifying-aliasing/torch_util.py | antonior92/aliasing-in-cnns | c41d7451e8af0cda0d5f8d4444eb0fbb433650bb | [
"MIT"
] | 3 | 2021-06-09T10:12:23.000Z | 2022-01-13T08:38:21.000Z | quantifying-aliasing/torch_util.py | antonior92/aliasing-in-cnns | c41d7451e8af0cda0d5f8d4444eb0fbb433650bb | [
"MIT"
] | null | null | null | quantifying-aliasing/torch_util.py | antonior92/aliasing-in-cnns | c41d7451e8af0cda0d5f8d4444eb0fbb433650bb | [
"MIT"
] | null | null | null | import torch
from collections.abc import Iterable
def _get_layers(model, all_layers=None, all_names=None, top_name=None, fn=None, sep='_'):
"""Auxiliar function. Recursive method for getting all in the model for which `fn(layer)=True`."""
if all_names is None:
all_names = []
if all_layers is None:
all_layers = []
if top_name is None:
top_name = ''
if fn is None:
fn = lambda l: True
for name, layer in model.named_children():
if list(layer.children()):
all_names, all_layers = _get_layers(layer, all_layers, all_names, top_name+name+sep, fn)
else:
if fn(layer):
all_names.append(top_name + name)
all_layers.append(layer)
return all_names, all_layers
def get_layers(model, fn=None, sep='_'):
"""Get all layers of torch.nn.Module for which `fn(layer)=True` using a depth-first search.
Given the module `model` and the function `fn(layer: torch.nn.Module) -> bool` return all layers
for which the function returns true. Return a list of tuples: ('name', Module). For nested blocks
the name is a single string, with subblocks names separed by `sep` (by default `sep=_`). For instance,
`layer1_0_conv1` for 3 nested blocks `layer1`, `0`, `conv1`."""
all_names, all_layers = _get_layers(model, fn=fn, sep=sep)
return list(zip(all_names, all_layers))
def replace_layer(model, layer_name, replace_fn):
"""Replace single layer in a (possibly nested) torch.nn.Module using `replace_fn`.
Given a module `model` and a layer specified by `layer_name` replace the layer using
`new_layer = replace_fn(old_layer)`. Here `layer_name` is a list of strings, each string
indexing a level of the nested model."""
if layer_name:
nm = layer_name.pop()
model._modules[nm] = replace_layer(model._modules[nm], layer_name, replace_fn)
else:
model = replace_fn(model)
return model
def replace_all_layers(model, layers, replace_fn, sep='_'):
"""Replace layers in a (possibly nested) torch.nn.Module using `replace_fn`.
Given a module `model` and a layer specified by `layer_name` replace the layer using
`new_layer = replace_fn(old_layer)`. Here `layer_name` is a list of strings, each string
indexing a level of the nested model."""
for l in layers:
model = replace_layer(model, l.split(sep)[::-1], replace_fn)
return model
class SaveIntermediaryValues(object):
"""Module for saving intermediary values."""
def __init__(self, collapsing_fn, is_layer_fn, n_samples):
self.collapsing_fn = collapsing_fn
self.is_layer_fn = is_layer_fn
self.batch_dim = 0
self.n_samples = n_samples
self.counter = None
self.is_first_execution = None
self.storage = None
self.layer_names = None
def save_forward_hooks(self, model):
all_layers = get_layers(model, fn=self.is_layer_fn)
self.layer_names = list(list(zip(*all_layers))[0])
self.storage = {name: None for name in self.layer_names}
self.counter = {name: 0 for name in self.layer_names}
self.is_first_execution = {name: True for name in self.layer_names}
for name in self.layer_names:
model = replace_all_layers(model, [name], replace_fn=self.hook(name))
return model
def hook(self, name):
def register_forward_hook(layer):
def forward_hook(_self, inp, _out):
x = self.collapsing_fn(inp[0], _self)
if self.is_first_execution[name]:
self.is_first_execution[name] = False
self.storage[name] = self.init_storage(x)
delta = self.update_storage(x, self.storage[name], self.counter[name])
self.counter[name] += delta
layer.register_forward_hook(forward_hook)
return layer
return register_forward_hook
def init_storage(self, x):
if type(x) == torch.Tensor:
shape = list(x.shape)
shape[self.batch_dim] = self.n_samples
return torch.zeros(shape, dtype=x.dtype)
elif type(x) == dict:
aux = {}
for key, value in x.items():
aux[key] = self.init_storage(value)
return aux
elif isinstance(x, Iterable):
aux = []
for xx in x:
aux.append(self.init_storage(xx))
return tuple(aux)
else:
raise NotImplementedError()
def update_storage(self, x, storage, counter):
if type(x) == torch.Tensor:
delta = x.shape[self.batch_dim]
storage[counter:counter + delta, ...] = x
return delta
elif type(x) == dict:
delta = 0
for key, value in x.items():
delta = self.update_storage(value, storage[key], counter)
return delta
elif isinstance(x, Iterable):
delta = 0
iter_storage = iter(storage)
for xx in x:
delta = self.update_storage(xx, next(iter_storage), counter)
return delta
else:
raise NotImplementedError()
def reset_storage(self, storage=None):
if storage is None:
storage = self.storage
if type(storage) == torch.Tensor:
storage[...] = 0
elif type(storage) == dict:
for key, value in storage.items():
self.reset_storage(storage[key])
elif isinstance(storage, Iterable):
iter_storage = iter(storage)
for xx in x:
self.reset_storage(next(iter_storage))
else:
raise NotImplementedError()
def reset(self):
self.counter = {name: 0 for name in self.layer_names}
self.is_first_execution = {name: True for name in self.layer_names}
self.reset_storage() | 40.013423 | 106 | 0.616068 | 3,510 | 0.588729 | 0 | 0 | 0 | 0 | 0 | 0 | 1,234 | 0.206978 |
809b18b1d50940a472d99838108c97ff7972ad17 | 1,079 | py | Python | HorribleConky.py | WolfgangAxel/ConkyConfigs | a437fd91761872202f90c0dab36e1c050a017054 | [
"MIT"
] | 1 | 2016-02-12T11:52:10.000Z | 2016-02-12T11:52:10.000Z | HorribleConky.py | WolfgangAxel/ConkyConfigs | a437fd91761872202f90c0dab36e1c050a017054 | [
"MIT"
] | null | null | null | HorribleConky.py | WolfgangAxel/ConkyConfigs | a437fd91761872202f90c0dab36e1c050a017054 | [
"MIT"
] | null | null | null | #!/usr/bin/python
from lxml import html
import requests
"""
Enter HorribleSubs's title for the shows you
watch in quotes followed by a comma, then
hit enter to add another show. When
all of your watched shows are entered, put
the ending bracket.
EX:
MYSHOWS = ["Bananya",
"New Game",
"Kono Bijutsubu ni wa Mondai ga Aru!",
"Re Zero kara Hajimeru Isekai Seikatsu"]
"""
MYSHOWS = [
]
def makeLine(string,time,size=50):
if string in MYSHOWS:
string = "**" + string
if len(string) <= 42:
out = string
else:
out = string[:42]
for i in range(45-len(out)):
out = out + "."
tzadj = int(time[0:2])+2
if tzadj >= 24:
tzadj = tzadj-24
tzadj = str(tzadj)
if len(tzadj) == 2:
time = tzadj + time[2:]
else:
time = "0" + tzadj + time[2:]
out = out + time
return out
page = requests.get("http://horriblesubs.info/")
tree = html.fromstring(page.content)
show = tree.xpath('//a[@title="See all releases for this show"]/text()')
sched = tree.xpath('//td[@class="schedule-time"]/text()')
for i in range(len(show)):
line = makeLine(show[i],sched[i])
print line
| 21.156863 | 72 | 0.658943 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 458 | 0.424467 |
809cd1e9b18d3c534a6711685a3e152ddbafdb24 | 8,517 | py | Python | appengine/components/components/endpoints_webapp2/partial.py | Swift1313/luci-py | 0a4fdfc25f89833026be6a8b29c0a27b8f3c5fc4 | [
"Apache-2.0"
] | null | null | null | appengine/components/components/endpoints_webapp2/partial.py | Swift1313/luci-py | 0a4fdfc25f89833026be6a8b29c0a27b8f3c5fc4 | [
"Apache-2.0"
] | null | null | null | appengine/components/components/endpoints_webapp2/partial.py | Swift1313/luci-py | 0a4fdfc25f89833026be6a8b29c0a27b8f3c5fc4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
"""Partial response utilities for an Endpoints v1 over webapp2 service.
Grammar of a fields partial response string:
fields: selector [,selector]*
selector: path [(fields)]?
path: name [/name]*
name: [A-Za-z_][A-Za-z0-9_]* | \*
Examples:
fields=a
Response includes the value of the "a" field.
fields=a,b
Response includes the values of the "a" and "b" fields.
fields=a/b
Response includes the value of the "b" field of "a". If "a" is an array,
response includes the values of the "b" fields of every element in the array.
fields=a/*/c
For every element or field of "a", response includes the value of that element
or field's "c" field.
fields=a(b,c)
Equivalent to fields=a/b union fields=a/c.
"""
class ParsingError(Exception):
"""Indicates an error during parsing.
Fields:
index: The index the error occurred at.
message: The error message.
"""
def __init__(self, index, message):
super(ParsingError, self).__init__('%d: %s' % (index, message))
self.index = index
self.message = message
def _merge(source, destination):
"""Recursively merges the source dict into the destination dict.
Args:
source: A dict whose values are source dicts.
destination: A dict whose values are destination dicts.
"""
for key, value in source.iteritems():
if destination.get(key):
_merge(value, destination[key])
else:
destination[key] = value
class _ParsingContext(object):
"""Encapsulates parsing information.
Attributes:
accumulator: A list of field name characters accumulated so far.
expecting_name: Whether or not a field name is expected.
fields: A dict of accumulated fields.
"""
def __init__(self):
"""Initializes a new instance of ParsingContext."""
# Pointer to the subfield dict of the last added field.
self._last = None
self.accumulator = []
self.expecting_name = True
self.fields = {}
def accumulate(self, char):
"""Accumulates the given char.
Args:
char: The character to accumulate.
"""
# Accumulate all characters even if they aren't allowed by the grammar.
# In the worst case there will be extra keys in the fields dict which will
# be ignored when the mask is applied because they don't match any legal
# field name. It won't cause incorrect masks to be applied. The exception is
# / which has special meaning. See add_field below. Note that * has special
# meaning while applying the mask but not while parsing. See _apply below.
self.accumulator.append(char)
def add_field(self, i):
"""Records the field name in the accumulator then clears the accumulator.
Args:
i: The index the parser is at.
"""
# Reset the index to the start of the accumulated string.
i -= len(self.accumulator)
path = ''.join(self.accumulator).strip()
if not path:
raise ParsingError(i, 'expected name')
# Advance i to the first non-space char.
for char in self.accumulator:
if char != ' ':
break
i += 1
# / has special meaning; a/b/c is shorthand for a(b(c)). Add subfield dicts
# recursively. E.g. if the fields dict is empty then parsing a/b/c is like
# setting fields["a"] = {"b": {"c": {}} and pointing last to c's value.
pointer = self.fields
for part in path.split('/'):
if not part:
raise ParsingError(i, 'empty name in path')
pointer = pointer.setdefault(part, {})
# Increment the index by the length of this part as well as the /.
i += len(part) + 1
self._last = pointer
self.accumulator = []
def add_subfields(self, subfields):
"""Adds the given subfields to the last added field.
Args:
subfields: A dict of accumulated subfields.
Returns:
False if there was no last added field to add subfields to, else True.
"""
if self._last is None:
return False
_merge(subfields, self._last)
return True
def _parse(fields):
"""Parses the given partial response string into a partial response mask.
Args:
fields: A fields partial response string.
Returns:
A dict which can be used to mask another dict.
Raises:
ParsingError: If fields wasn't a valid partial response string.
"""
stack = [_ParsingContext()]
i = 0
while i < len(fields):
# Invariants maintained below.
# Stack invariant: The stack always has at least one context.
assert stack, fields
# Accumulator invariant: Non-empty accumulator implies expecting a name.
assert not stack[-1].accumulator or stack[-1].expecting_name, fields
if fields[i] == ',':
# If we just returned from a lower context, no name is expected.
if stack[-1].expecting_name:
stack[-1].add_field(i)
stack[-1].expecting_name = True
elif fields[i] == '(':
# Maintain accumulator invariant.
# A name must occur before any (.
stack[-1].add_field(i)
stack[-1].expecting_name = False
# Enter a new context. When we return from this context we don't expect to
# accumulate another name. There must be , or a return to a higher context
# (or the end of the string altogether).
stack.append(_ParsingContext())
elif fields[i] == ')':
# If we just returned from a lower context, no name is expected.
if stack[-1].expecting_name:
stack[-1].add_field(i)
# Return to a higher context. Maintain stack invariant.
subfields = stack.pop().fields
if not stack:
# Mismatched ().
raise ParsingError(i, 'unexpected )')
# See accumulator invariant maintenance above.
assert not stack[-1].expecting_name, fields
if not stack[-1].add_subfields(subfields):
# ) before any field.
raise ParsingError(i, 'unexpected (')
else:
# If we just returned from a lower context, no name is expected.
if not stack[-1].expecting_name:
raise ParsingError(i, 'unexpected name')
stack[-1].accumulate(fields[i])
i += 1
if len(stack) != 1:
# Mismatched ().
raise ParsingError(i, 'expected )')
# If we just returned from a lower context, no name is expected.
if stack[-1].expecting_name:
stack[-1].add_field(i)
return stack[0].fields
def _apply(response, partial):
"""Applies the given partial response dict to the given response.
Args:
response: A dict to be updated in place.
partial: A partial response dict as returned by _parse. May be modified,
but will not have its masking behavior changed.
Returns:
The masked response.
"""
for key, value in response.items():
pointer = None
if key in partial:
if partial[key]:
# If the subfield dict is non-empty, include all of *'s subfields.
_merge(partial.get('*', {}), partial[key])
pointer = partial[key]
elif '*' in partial:
pointer = partial['*']
if pointer is None:
response.pop(key)
elif pointer:
if isinstance(value, dict) and value:
_apply(value, pointer)
if not value:
# No subfields were kept, remove this field.
response.pop(key)
elif isinstance(value, list) and value:
new_values = []
for v in value:
# In a dict constructed from a protorpc.message.Message list elements
# always have the same type. Here we allow list elements to have mixed
# types and only recursively apply the mask to dicts.
if isinstance(v, dict):
_apply(v, pointer)
if v:
# Subfields were kept, include this element.
new_values.append(v)
else:
# Non-dict, include this element.
new_values.append(v)
response[key] = new_values
if not new_values:
# No elements were kept, remove this field.
response.pop(key)
return response
def mask(response, fields):
"""Applies the given fields partial response string to the given response.
Args:
response: A dict encoded using protorpc.protojson.ProtoJson.encode_message
to be updated in place.
fields: A fields partial response string.
Returns:
The masked response.
Raises:
ParsingError: If fields wasn't a valid partial response string.
"""
return _apply(response, _parse(fields))
| 29.884211 | 80 | 0.656921 | 2,857 | 0.335447 | 0 | 0 | 0 | 0 | 0 | 0 | 5,101 | 0.59892 |
809d1a480aa05cc6e8c44d78d1263f97f33db820 | 312 | py | Python | Python/02-Django/01-Try-Django/Challenges/01-Getting-Started/03/mainurls.py | pabhd3/Code-School | 5115c1bdc1e99ea667bdc942667342123ae3929a | [
"MIT"
] | null | null | null | Python/02-Django/01-Try-Django/Challenges/01-Getting-Started/03/mainurls.py | pabhd3/Code-School | 5115c1bdc1e99ea667bdc942667342123ae3929a | [
"MIT"
] | null | null | null | Python/02-Django/01-Try-Django/Challenges/01-Getting-Started/03/mainurls.py | pabhd3/Code-School | 5115c1bdc1e99ea667bdc942667342123ae3929a | [
"MIT"
] | null | null | null | # Python
# Django
# Try Django
# Getting Started (Level 1)
# Challenge 03 - Refactor the existing URL Dispatchers
from django.conf.urls import url
from . import views
urlpatterns = [
# TODO: Add a url() object whose regex parameter takes an empty path that
# terminates, and goes to views.home
] | 24 | 78 | 0.711538 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 225 | 0.721154 |
809ee11b7cac20d59b1a576eee070e9b5073a1ab | 521 | py | Python | vmraid/patches/v13_0/add_switch_theme_to_navbar_settings.py | sowrisurya/vmraid | f833e00978019dad87af80b41279c0146c063ed5 | [
"MIT"
] | null | null | null | vmraid/patches/v13_0/add_switch_theme_to_navbar_settings.py | sowrisurya/vmraid | f833e00978019dad87af80b41279c0146c063ed5 | [
"MIT"
] | null | null | null | vmraid/patches/v13_0/add_switch_theme_to_navbar_settings.py | sowrisurya/vmraid | f833e00978019dad87af80b41279c0146c063ed5 | [
"MIT"
] | null | null | null | from __future__ import unicode_literals
import vmraid
def execute():
navbar_settings = vmraid.get_single("Navbar Settings")
if vmraid.db.exists('Navbar Item', {'item_label': 'Toggle Theme'}):
return
for navbar_item in navbar_settings.settings_dropdown[6:]:
navbar_item.idx = navbar_item.idx + 1
navbar_settings.append('settings_dropdown', {
'item_label': 'Toggle Theme',
'item_type': 'Action',
'action': 'new vmraid.ui.ThemeSwitcher().show()',
'is_standard': 1,
'idx': 7
})
navbar_settings.save() | 24.809524 | 68 | 0.723608 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 184 | 0.353167 |
809ef4ffa07e480d7b2e767240e3be82449c7ee9 | 82 | py | Python | Python/04. Sets/006. Set union() Operation.py | stonehengee/HackerrankPractice | ec052e7447391e40d1919cf0b641ff5023da3da3 | [
"MIT"
] | null | null | null | Python/04. Sets/006. Set union() Operation.py | stonehengee/HackerrankPractice | ec052e7447391e40d1919cf0b641ff5023da3da3 | [
"MIT"
] | null | null | null | Python/04. Sets/006. Set union() Operation.py | stonehengee/HackerrankPractice | ec052e7447391e40d1919cf0b641ff5023da3da3 | [
"MIT"
] | null | null | null | # Problem: https://www.hackerrank.com/challenges/py-set-union/problem
# Score: 10
| 27.333333 | 69 | 0.756098 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 80 | 0.97561 |
809f0a2dcf94026faf4117b1a4863d6f038ab4bf | 137 | py | Python | fun/vowel_counter.py | ahmedelq/PythonicAlgorithms | ce10dbb6e1fd0ea5c922a932b0f920236aa411bf | [
"MIT"
] | null | null | null | fun/vowel_counter.py | ahmedelq/PythonicAlgorithms | ce10dbb6e1fd0ea5c922a932b0f920236aa411bf | [
"MIT"
] | null | null | null | fun/vowel_counter.py | ahmedelq/PythonicAlgorithms | ce10dbb6e1fd0ea5c922a932b0f920236aa411bf | [
"MIT"
] | null | null | null | def count_vowels(txt):
vs = "a, e, i, o, u".split(', ')
return sum([1 for t in txt if t in vs])
print(count_vowels('Hello world')) | 19.571429 | 40 | 0.605839 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 32 | 0.233577 |
809f5c8d05928489dbd0063155d53bd8866d8342 | 75,667 | py | Python | Lib/site-packages/OCC/Convert.py | JWerbrouck/RWTH_M1_Projekt | 7ae63a2277361fa3273cf0677b297379482b8240 | [
"bzip2-1.0.6"
] | null | null | null | Lib/site-packages/OCC/Convert.py | JWerbrouck/RWTH_M1_Projekt | 7ae63a2277361fa3273cf0677b297379482b8240 | [
"bzip2-1.0.6"
] | 1 | 2022-03-17T16:46:04.000Z | 2022-03-17T16:46:04.000Z | Lib/site-packages/OCC/Convert.py | JWerbrouck/RWTH_M1_Projekt | 7ae63a2277361fa3273cf0677b297379482b8240 | [
"bzip2-1.0.6"
] | null | null | null | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.1
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (3,0,0):
new_instancemethod = lambda func, inst, cls: _Convert.SWIG_PyInstanceMethod_New(func)
else:
from new import instancemethod as new_instancemethod
if version_info >= (2,6,0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_Convert', [dirname(__file__)])
except ImportError:
import _Convert
return _Convert
if fp is not None:
try:
_mod = imp.load_module('_Convert', fp, pathname, description)
finally:
fp.close()
return _mod
_Convert = swig_import_helper()
del swig_import_helper
else:
import _Convert
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError(name)
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
def _swig_setattr_nondynamic_method(set):
def set_attr(self,name,value):
if (name == "thisown"): return self.this.own(value)
if hasattr(self,name) or (name == "this"):
set(self,name,value)
else:
raise AttributeError("You cannot add attributes to %s" % self)
return set_attr
class SwigPyIterator(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
__swig_destroy__ = _Convert.delete_SwigPyIterator
def __iter__(self): return self
SwigPyIterator.value = new_instancemethod(_Convert.SwigPyIterator_value,None,SwigPyIterator)
SwigPyIterator.incr = new_instancemethod(_Convert.SwigPyIterator_incr,None,SwigPyIterator)
SwigPyIterator.decr = new_instancemethod(_Convert.SwigPyIterator_decr,None,SwigPyIterator)
SwigPyIterator.distance = new_instancemethod(_Convert.SwigPyIterator_distance,None,SwigPyIterator)
SwigPyIterator.equal = new_instancemethod(_Convert.SwigPyIterator_equal,None,SwigPyIterator)
SwigPyIterator.copy = new_instancemethod(_Convert.SwigPyIterator_copy,None,SwigPyIterator)
SwigPyIterator.next = new_instancemethod(_Convert.SwigPyIterator_next,None,SwigPyIterator)
SwigPyIterator.__next__ = new_instancemethod(_Convert.SwigPyIterator___next__,None,SwigPyIterator)
SwigPyIterator.previous = new_instancemethod(_Convert.SwigPyIterator_previous,None,SwigPyIterator)
SwigPyIterator.advance = new_instancemethod(_Convert.SwigPyIterator_advance,None,SwigPyIterator)
SwigPyIterator.__eq__ = new_instancemethod(_Convert.SwigPyIterator___eq__,None,SwigPyIterator)
SwigPyIterator.__ne__ = new_instancemethod(_Convert.SwigPyIterator___ne__,None,SwigPyIterator)
SwigPyIterator.__iadd__ = new_instancemethod(_Convert.SwigPyIterator___iadd__,None,SwigPyIterator)
SwigPyIterator.__isub__ = new_instancemethod(_Convert.SwigPyIterator___isub__,None,SwigPyIterator)
SwigPyIterator.__add__ = new_instancemethod(_Convert.SwigPyIterator___add__,None,SwigPyIterator)
SwigPyIterator.__sub__ = new_instancemethod(_Convert.SwigPyIterator___sub__,None,SwigPyIterator)
SwigPyIterator_swigregister = _Convert.SwigPyIterator_swigregister
SwigPyIterator_swigregister(SwigPyIterator)
import OCC.Standard
import OCC.TColgp
import OCC.gp
import OCC.TCollection
import OCC.MMgt
import OCC.TColStd
Convert_TgtThetaOver2 = _Convert.Convert_TgtThetaOver2
Convert_TgtThetaOver2_1 = _Convert.Convert_TgtThetaOver2_1
Convert_TgtThetaOver2_2 = _Convert.Convert_TgtThetaOver2_2
Convert_TgtThetaOver2_3 = _Convert.Convert_TgtThetaOver2_3
Convert_TgtThetaOver2_4 = _Convert.Convert_TgtThetaOver2_4
Convert_QuasiAngular = _Convert.Convert_QuasiAngular
Convert_RationalC1 = _Convert.Convert_RationalC1
Convert_Polynomial = _Convert.Convert_Polynomial
class Convert_CompBezierCurves2dToBSplineCurve2d(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* Constructs a framework for converting a sequence of adjacent non-rational Bezier curves into a BSpline curve. Knots will be created on the computed BSpline curve at each junction point of two consecutive Bezier curves. The degree of continuity of the BSpline curve will be increased at the junction point of two consecutive Bezier curves if their tangent vectors at this point are parallel. AngularTolerance (given in radians, and defaulted to 1.0 e-4) will be used to check the parallelism of the two tangent vectors. Use the following functions: - AddCurve to define in sequence the adjacent Bezier curves to be converted, - Perform to compute the data needed to build the BSpline curve, - and the available consultation functions to access the computed data. This data may be used to construct the BSpline curve.
:param AngularTolerance: default value is 1.0e-4
:type AngularTolerance: float
:rtype: None
"""
_Convert.Convert_CompBezierCurves2dToBSplineCurve2d_swiginit(self,_Convert.new_Convert_CompBezierCurves2dToBSplineCurve2d(*args))
def AddCurve(self, *args):
"""
* Adds the Bezier curve defined by the table of poles Poles, to the sequence (still contained in this framework) of adjacent Bezier curves to be converted into a BSpline curve. Only polynomial (i.e. non-rational) Bezier curves are converted using this framework. If this is not the first call to the function (i.e. if this framework still contains data in its sequence of Bezier curves), the degree of continuity of the BSpline curve will be increased at the time of computation at the first point of the added Bezier curve (i.e. the first point of the Poles table). This will be the case if the tangent vector of the curve at this point is parallel to the tangent vector at the end point of the preceding Bezier curve in the sequence of Bezier curves still contained in this framework. An angular tolerance given at the time of construction of this framework, will be used to check the parallelism of the two tangent vectors. This checking procedure, and all the relative computations will be performed by the function Perform. When the sequence of adjacent Bezier curves is complete, use the following functions: - Perform to compute the data needed to build the BSpline curve, - and the available consultation functions to access the computed data. This data may be used to construct the BSpline curve. Warning The sequence of Bezier curves treated by this framework is automatically initialized with the first Bezier curve when the function is first called. During subsequent use of this function, ensure that the first point of the added Bezier curve (i.e. the first point of the Poles table) is coincident with the last point of the sequence (i.e. the last point of the preceding Bezier curve in the sequence) of Bezier curves still contained in this framework. An error may occur at the time of computation if this condition is not satisfied. Particular care must be taken with respect to the above, as this condition is not checked either when defining the sequence of Bezier curves or at the time of computation.
:param Poles:
:type Poles: TColgp_Array1OfPnt2d
:rtype: None
"""
return _Convert.Convert_CompBezierCurves2dToBSplineCurve2d_AddCurve(self, *args)
def Perform(self, *args):
"""
* Computes all the data needed to build a BSpline curve equivalent to the sequence of adjacent Bezier curves still contained in this framework. A knot is inserted on the computed BSpline curve at the junction point of two consecutive Bezier curves. The degree of continuity of the BSpline curve will be increased at the junction point of two consecutive Bezier curves if their tangent vectors at this point are parallel. An angular tolerance given at the time of construction of this framework is used to check the parallelism of the two tangent vectors. Use the available consultation functions to access the computed data. This data may then be used to construct the BSpline curve. Warning Ensure that the curves in the sequence of Bezier curves contained in this framework are adjacent. An error may occur at the time of computation if this condition is not satisfied. Particular care must be taken with respect to the above as this condition is not checked, either when defining the Bezier curve sequence or at the time of computation.
:rtype: None
"""
return _Convert.Convert_CompBezierCurves2dToBSplineCurve2d_Perform(self, *args)
def Degree(self, *args):
"""
* Returns the degree of the BSpline curve whose data is computed in this framework. Warning Take particular care not to use this function before the computation is performed (Perform function), as this condition is not checked and an error may therefore occur.
:rtype: int
"""
return _Convert.Convert_CompBezierCurves2dToBSplineCurve2d_Degree(self, *args)
def NbPoles(self, *args):
"""
* Returns the number of poles of the BSpline curve whose data is computed in this framework. Warning Take particular care not to use this function before the computation is performed (Perform function), as this condition is not checked and an error may therefore occur.
:rtype: int
"""
return _Convert.Convert_CompBezierCurves2dToBSplineCurve2d_NbPoles(self, *args)
def Poles(self, *args):
"""
* Loads the Poles table with the poles of the BSpline curve whose data is computed in this framework. Warning - Do not use this function before the computation is performed (Perform function). - The length of the Poles array must be equal to the number of poles of the BSpline curve whose data is computed in this framework. Particular care must be taken with respect to the above, as these conditions are not checked, and an error may occur.
:param Poles:
:type Poles: TColgp_Array1OfPnt2d
:rtype: None
"""
return _Convert.Convert_CompBezierCurves2dToBSplineCurve2d_Poles(self, *args)
def NbKnots(self, *args):
"""
* Returns the number of knots of the BSpline curve whose data is computed in this framework. Warning Take particular care not to use this function before the computation is performed (Perform function), as this condition is not checked and an error may therefore occur.
:rtype: int
"""
return _Convert.Convert_CompBezierCurves2dToBSplineCurve2d_NbKnots(self, *args)
def KnotsAndMults(self, *args):
"""
* Loads the Knots table with the knots and the Mults table with the corresponding multiplicities of the BSpline curve whose data is computed in this framework. Warning - Do not use this function before the computation is performed (Perform function). - The length of the Knots and Mults arrays must be equal to the number of knots in the BSpline curve whose data is computed in this framework. Particular care must be taken with respect to the above as these conditions are not checked, and an error may occur.
:param Knots:
:type Knots: TColStd_Array1OfReal &
:param Mults:
:type Mults: TColStd_Array1OfInteger &
:rtype: None
"""
return _Convert.Convert_CompBezierCurves2dToBSplineCurve2d_KnotsAndMults(self, *args)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Convert_CompBezierCurves2dToBSplineCurve2d.AddCurve = new_instancemethod(_Convert.Convert_CompBezierCurves2dToBSplineCurve2d_AddCurve,None,Convert_CompBezierCurves2dToBSplineCurve2d)
Convert_CompBezierCurves2dToBSplineCurve2d.Perform = new_instancemethod(_Convert.Convert_CompBezierCurves2dToBSplineCurve2d_Perform,None,Convert_CompBezierCurves2dToBSplineCurve2d)
Convert_CompBezierCurves2dToBSplineCurve2d.Degree = new_instancemethod(_Convert.Convert_CompBezierCurves2dToBSplineCurve2d_Degree,None,Convert_CompBezierCurves2dToBSplineCurve2d)
Convert_CompBezierCurves2dToBSplineCurve2d.NbPoles = new_instancemethod(_Convert.Convert_CompBezierCurves2dToBSplineCurve2d_NbPoles,None,Convert_CompBezierCurves2dToBSplineCurve2d)
Convert_CompBezierCurves2dToBSplineCurve2d.Poles = new_instancemethod(_Convert.Convert_CompBezierCurves2dToBSplineCurve2d_Poles,None,Convert_CompBezierCurves2dToBSplineCurve2d)
Convert_CompBezierCurves2dToBSplineCurve2d.NbKnots = new_instancemethod(_Convert.Convert_CompBezierCurves2dToBSplineCurve2d_NbKnots,None,Convert_CompBezierCurves2dToBSplineCurve2d)
Convert_CompBezierCurves2dToBSplineCurve2d.KnotsAndMults = new_instancemethod(_Convert.Convert_CompBezierCurves2dToBSplineCurve2d_KnotsAndMults,None,Convert_CompBezierCurves2dToBSplineCurve2d)
Convert_CompBezierCurves2dToBSplineCurve2d._kill_pointed = new_instancemethod(_Convert.Convert_CompBezierCurves2dToBSplineCurve2d__kill_pointed,None,Convert_CompBezierCurves2dToBSplineCurve2d)
Convert_CompBezierCurves2dToBSplineCurve2d_swigregister = _Convert.Convert_CompBezierCurves2dToBSplineCurve2d_swigregister
Convert_CompBezierCurves2dToBSplineCurve2d_swigregister(Convert_CompBezierCurves2dToBSplineCurve2d)
class Convert_CompBezierCurvesToBSplineCurve(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* Constructs a framework for converting a sequence of adjacent non-rational Bezier curves into a BSpline curve. Knots will be created on the computed BSpline curve at each junction point of two consecutive Bezier curves. The degree of continuity of the BSpline curve will be increased at the junction point of two consecutive Bezier curves if their tangent vectors at this point are parallel. AngularTolerance (given in radians, and defaulted to 1.0 e-4) will be used to check the parallelism of the two tangent vectors. Use the following functions: - AddCurve to define in sequence the adjacent Bezier curves to be converted, - Perform to compute the data needed to build the BSpline curve, - and the available consultation functions to access the computed data. This data may be used to construct the BSpline curve.
:param AngularTolerance: default value is 1.0e-4
:type AngularTolerance: float
:rtype: None
"""
_Convert.Convert_CompBezierCurvesToBSplineCurve_swiginit(self,_Convert.new_Convert_CompBezierCurvesToBSplineCurve(*args))
def AddCurve(self, *args):
"""
* Adds the Bezier curve defined by the table of poles Poles, to the sequence (still contained in this framework) of adjacent Bezier curves to be converted into a BSpline curve. Only polynomial (i.e. non-rational) Bezier curves are converted using this framework. If this is not the first call to the function (i.e. if this framework still contains data in its Bezier curve sequence), the degree of continuity of the BSpline curve will be increased at the time of computation at the first point of the added Bezier curve (i.e. the first point of the Poles table). This will be the case if the tangent vector of the curve at this point is parallel to the tangent vector at the end point of the preceding Bezier curve in the Bezier curve sequence still contained in this framework. An angular tolerance given at the time of construction of this framework will be used to check the parallelism of the two tangent vectors. This checking procedure and all related computations will be performed by the Perform function. When the adjacent Bezier curve sequence is complete, use the following functions: - Perform to compute the data needed to build the BSpline curve, - and the available consultation functions to access the computed data. This data may be used to construct the BSpline curve. Warning The Bezier curve sequence treated by this framework is automatically initialized with the first Bezier curve when the function is first called. During subsequent use of this function, ensure that the first point of the added Bezier curve (i.e. the first point of the Poles table) is coincident with the last point of the Bezier curve sequence (i.e. the last point of the preceding Bezier curve in the sequence) still contained in this framework. An error may occur at the time of computation if this condition is not satisfied. Particular care must be taken with respect to the above, as this condition is not checked either when defining the Bezier curve sequence or at the time of computation.
:param Poles:
:type Poles: TColgp_Array1OfPnt
:rtype: None
"""
return _Convert.Convert_CompBezierCurvesToBSplineCurve_AddCurve(self, *args)
def Perform(self, *args):
"""
* Computes all the data needed to build a BSpline curve equivalent to the adjacent Bezier curve sequence still contained in this framework. A knot is inserted on the computed BSpline curve at the junction point of two consecutive Bezier curves. The degree of continuity of the BSpline curve will be increased at the junction point of two consecutive Bezier curves if their tangent vectors at this point are parallel. An angular tolerance given at the time of construction of this framework is used to check the parallelism of the two tangent vectors. Use the available consultation functions to access the computed data. This data may then be used to construct the BSpline curve. Warning Make sure that the curves in the Bezier curve sequence contained in this framework are adjacent. An error may occur at the time of computation if this condition is not satisfied. Particular care must be taken with respect to the above as this condition is not checked, either when defining the Bezier curve sequence or at the time of computation.
:rtype: None
"""
return _Convert.Convert_CompBezierCurvesToBSplineCurve_Perform(self, *args)
def Degree(self, *args):
"""
* Returns the degree of the BSpline curve whose data is computed in this framework. Warning Take particular care not to use this function before the computation is performed (Perform function), as this condition is not checked and an error may therefore occur.
:rtype: int
"""
return _Convert.Convert_CompBezierCurvesToBSplineCurve_Degree(self, *args)
def NbPoles(self, *args):
"""
* Returns the number of poles of the BSpline curve whose data is computed in this framework. Warning Take particular care not to use this function before the computation is performed (Perform function), as this condition is not checked and an error may therefore occur.
:rtype: int
"""
return _Convert.Convert_CompBezierCurvesToBSplineCurve_NbPoles(self, *args)
def Poles(self, *args):
"""
* Loads the Poles table with the poles of the BSpline curve whose data is computed in this framework. Warning - Do not use this function before the computation is performed (Perform function). - The length of the Poles array must be equal to the number of poles of the BSpline curve whose data is computed in this framework. Particular care must be taken with respect to the above, as these conditions are not checked, and an error may occur.
:param Poles:
:type Poles: TColgp_Array1OfPnt
:rtype: None
"""
return _Convert.Convert_CompBezierCurvesToBSplineCurve_Poles(self, *args)
def NbKnots(self, *args):
"""
* Returns the number of knots of the BSpline curve whose data is computed in this framework. Warning Take particular care not to use this function before the computation is performed (Perform function), as this condition is not checked and an error may therefore occur.
:rtype: int
"""
return _Convert.Convert_CompBezierCurvesToBSplineCurve_NbKnots(self, *args)
def KnotsAndMults(self, *args):
"""
* - loads the Knots table with the knots, - and loads the Mults table with the corresponding multiplicities of the BSpline curve whose data is computed in this framework. Warning - Do not use this function before the computation is performed (Perform function). - The length of the Knots and Mults arrays must be equal to the number of knots in the BSpline curve whose data is computed in this framework. Particular care must be taken with respect to the above as these conditions are not checked, and an error may occur.
:param Knots:
:type Knots: TColStd_Array1OfReal &
:param Mults:
:type Mults: TColStd_Array1OfInteger &
:rtype: None
"""
return _Convert.Convert_CompBezierCurvesToBSplineCurve_KnotsAndMults(self, *args)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Convert_CompBezierCurvesToBSplineCurve.AddCurve = new_instancemethod(_Convert.Convert_CompBezierCurvesToBSplineCurve_AddCurve,None,Convert_CompBezierCurvesToBSplineCurve)
Convert_CompBezierCurvesToBSplineCurve.Perform = new_instancemethod(_Convert.Convert_CompBezierCurvesToBSplineCurve_Perform,None,Convert_CompBezierCurvesToBSplineCurve)
Convert_CompBezierCurvesToBSplineCurve.Degree = new_instancemethod(_Convert.Convert_CompBezierCurvesToBSplineCurve_Degree,None,Convert_CompBezierCurvesToBSplineCurve)
Convert_CompBezierCurvesToBSplineCurve.NbPoles = new_instancemethod(_Convert.Convert_CompBezierCurvesToBSplineCurve_NbPoles,None,Convert_CompBezierCurvesToBSplineCurve)
Convert_CompBezierCurvesToBSplineCurve.Poles = new_instancemethod(_Convert.Convert_CompBezierCurvesToBSplineCurve_Poles,None,Convert_CompBezierCurvesToBSplineCurve)
Convert_CompBezierCurvesToBSplineCurve.NbKnots = new_instancemethod(_Convert.Convert_CompBezierCurvesToBSplineCurve_NbKnots,None,Convert_CompBezierCurvesToBSplineCurve)
Convert_CompBezierCurvesToBSplineCurve.KnotsAndMults = new_instancemethod(_Convert.Convert_CompBezierCurvesToBSplineCurve_KnotsAndMults,None,Convert_CompBezierCurvesToBSplineCurve)
Convert_CompBezierCurvesToBSplineCurve._kill_pointed = new_instancemethod(_Convert.Convert_CompBezierCurvesToBSplineCurve__kill_pointed,None,Convert_CompBezierCurvesToBSplineCurve)
Convert_CompBezierCurvesToBSplineCurve_swigregister = _Convert.Convert_CompBezierCurvesToBSplineCurve_swigregister
Convert_CompBezierCurvesToBSplineCurve_swigregister(Convert_CompBezierCurvesToBSplineCurve)
class Convert_CompPolynomialToPoles(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* Warning! Continuity can be at MOST the maximum degree of the polynomial functions TrueIntervals : this is the true parameterisation for the composite curve that is : the curve has myContinuity if the nth curve is parameterized between myTrueIntervals(n) and myTrueIntervals(n+1) Coefficients have to be the implicit 'c form': Coefficients[Numcurves][MaxDegree+1][Dimension] Warning! The NumberOfCoefficient of an polynome is his degree + 1 Example: To convert the linear function f(x) = 2*x + 1 on the domaine [2,5] to BSpline with the bound [-1,1]. Arguments are : NumCurves = 1; Continuity = 1; Dimension = 1; MaxDegree = 1; NumCoeffPerCurve [1] = {2}; Coefficients[2] = {1, 2}; PolynomialIntervals[1,2] = {{2,5}} TrueIntervals[2] = {-1, 1}
:param NumCurves:
:type NumCurves: int
:param Continuity:
:type Continuity: int
:param Dimension:
:type Dimension: int
:param MaxDegree:
:type MaxDegree: int
:param NumCoeffPerCurve:
:type NumCoeffPerCurve: Handle_TColStd_HArray1OfInteger &
:param Coefficients:
:type Coefficients: Handle_TColStd_HArray1OfReal &
:param PolynomialIntervals:
:type PolynomialIntervals: Handle_TColStd_HArray2OfReal &
:param TrueIntervals:
:type TrueIntervals: Handle_TColStd_HArray1OfReal &
:rtype: None
* To Convert sevral span with different order of Continuity. Warning: The Length of Continuity have to be NumCurves-1
:param NumCurves:
:type NumCurves: int
:param Dimension:
:type Dimension: int
:param MaxDegree:
:type MaxDegree: int
:param Continuity:
:type Continuity: TColStd_Array1OfInteger &
:param NumCoeffPerCurve:
:type NumCoeffPerCurve: TColStd_Array1OfInteger &
:param Coefficients:
:type Coefficients: TColStd_Array1OfReal &
:param PolynomialIntervals:
:type PolynomialIntervals: TColStd_Array2OfReal &
:param TrueIntervals:
:type TrueIntervals: TColStd_Array1OfReal &
:rtype: None
* To Convert only one span.
:param Dimension:
:type Dimension: int
:param MaxDegree:
:type MaxDegree: int
:param Degree:
:type Degree: int
:param Coefficients:
:type Coefficients: TColStd_Array1OfReal &
:param PolynomialIntervals:
:type PolynomialIntervals: TColStd_Array1OfReal &
:param TrueIntervals:
:type TrueIntervals: TColStd_Array1OfReal &
:rtype: None
"""
_Convert.Convert_CompPolynomialToPoles_swiginit(self,_Convert.new_Convert_CompPolynomialToPoles(*args))
def NbPoles(self, *args):
"""
* number of poles of the n-dimensional BSpline
:rtype: int
"""
return _Convert.Convert_CompPolynomialToPoles_NbPoles(self, *args)
def Poles(self, *args):
"""
* returns the poles of the n-dimensional BSpline in the following format : [1..NumPoles][1..Dimension]
:param Poles:
:type Poles: Handle_TColStd_HArray2OfReal &
:rtype: None
"""
return _Convert.Convert_CompPolynomialToPoles_Poles(self, *args)
def Degree(self, *args):
"""
:rtype: int
"""
return _Convert.Convert_CompPolynomialToPoles_Degree(self, *args)
def NbKnots(self, *args):
"""
* Degree of the n-dimensional Bspline
:rtype: int
"""
return _Convert.Convert_CompPolynomialToPoles_NbKnots(self, *args)
def Knots(self, *args):
"""
* Knots of the n-dimensional Bspline
:param K:
:type K: Handle_TColStd_HArray1OfReal &
:rtype: None
"""
return _Convert.Convert_CompPolynomialToPoles_Knots(self, *args)
def Multiplicities(self, *args):
"""
* Multiplicities of the knots in the BSpline
:param M:
:type M: Handle_TColStd_HArray1OfInteger &
:rtype: None
"""
return _Convert.Convert_CompPolynomialToPoles_Multiplicities(self, *args)
def IsDone(self, *args):
"""
:rtype: bool
"""
return _Convert.Convert_CompPolynomialToPoles_IsDone(self, *args)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Convert_CompPolynomialToPoles.NbPoles = new_instancemethod(_Convert.Convert_CompPolynomialToPoles_NbPoles,None,Convert_CompPolynomialToPoles)
Convert_CompPolynomialToPoles.Poles = new_instancemethod(_Convert.Convert_CompPolynomialToPoles_Poles,None,Convert_CompPolynomialToPoles)
Convert_CompPolynomialToPoles.Degree = new_instancemethod(_Convert.Convert_CompPolynomialToPoles_Degree,None,Convert_CompPolynomialToPoles)
Convert_CompPolynomialToPoles.NbKnots = new_instancemethod(_Convert.Convert_CompPolynomialToPoles_NbKnots,None,Convert_CompPolynomialToPoles)
Convert_CompPolynomialToPoles.Knots = new_instancemethod(_Convert.Convert_CompPolynomialToPoles_Knots,None,Convert_CompPolynomialToPoles)
Convert_CompPolynomialToPoles.Multiplicities = new_instancemethod(_Convert.Convert_CompPolynomialToPoles_Multiplicities,None,Convert_CompPolynomialToPoles)
Convert_CompPolynomialToPoles.IsDone = new_instancemethod(_Convert.Convert_CompPolynomialToPoles_IsDone,None,Convert_CompPolynomialToPoles)
Convert_CompPolynomialToPoles._kill_pointed = new_instancemethod(_Convert.Convert_CompPolynomialToPoles__kill_pointed,None,Convert_CompPolynomialToPoles)
Convert_CompPolynomialToPoles_swigregister = _Convert.Convert_CompPolynomialToPoles_swigregister
Convert_CompPolynomialToPoles_swigregister(Convert_CompPolynomialToPoles)
class Convert_ConicToBSplineCurve(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def Degree(self, *args):
"""
* Returns the degree of the BSpline curve whose data is computed in this framework.
:rtype: int
"""
return _Convert.Convert_ConicToBSplineCurve_Degree(self, *args)
def NbPoles(self, *args):
"""
* Returns the number of poles of the BSpline curve whose data is computed in this framework.
:rtype: int
"""
return _Convert.Convert_ConicToBSplineCurve_NbPoles(self, *args)
def NbKnots(self, *args):
"""
* Returns the number of knots of the BSpline curve whose data is computed in this framework.
:rtype: int
"""
return _Convert.Convert_ConicToBSplineCurve_NbKnots(self, *args)
def IsPeriodic(self, *args):
"""
* Returns true if the BSpline curve whose data is computed in this framework is periodic.
:rtype: bool
"""
return _Convert.Convert_ConicToBSplineCurve_IsPeriodic(self, *args)
def Pole(self, *args):
"""
* Returns the pole of index Index to the poles table of the BSpline curve whose data is computed in this framework. Exceptions Standard_OutOfRange if Index is outside the bounds of the poles table of the BSpline curve whose data is computed in this framework.
:param Index:
:type Index: int
:rtype: gp_Pnt2d
"""
return _Convert.Convert_ConicToBSplineCurve_Pole(self, *args)
def Weight(self, *args):
"""
* Returns the weight of the pole of index Index to the poles table of the BSpline curve whose data is computed in this framework. Exceptions Standard_OutOfRange if Index is outside the bounds of the poles table of the BSpline curve whose data is computed in this framework.
:param Index:
:type Index: int
:rtype: float
"""
return _Convert.Convert_ConicToBSplineCurve_Weight(self, *args)
def Knot(self, *args):
"""
* Returns the knot of index Index to the knots table of the BSpline curve whose data is computed in this framework. Exceptions Standard_OutOfRange if Index is outside the bounds of the knots table of the BSpline curve whose data is computed in this framework.
:param Index:
:type Index: int
:rtype: float
"""
return _Convert.Convert_ConicToBSplineCurve_Knot(self, *args)
def Multiplicity(self, *args):
"""
* Returns the multiplicity of the knot of index Index to the knots table of the BSpline curve whose data is computed in this framework. Exceptions Standard_OutOfRange if Index is outside the bounds of the knots table of the BSpline curve whose data is computed in this framework.
:param Index:
:type Index: int
:rtype: int
"""
return _Convert.Convert_ConicToBSplineCurve_Multiplicity(self, *args)
def BuildCosAndSin(self, *args):
"""
:param Parametrisation:
:type Parametrisation: Convert_ParameterisationType
:param CosNumerator:
:type CosNumerator: Handle_TColStd_HArray1OfReal &
:param SinNumerator:
:type SinNumerator: Handle_TColStd_HArray1OfReal &
:param Denominator:
:type Denominator: Handle_TColStd_HArray1OfReal &
:param Degree:
:type Degree: int &
:param Knots:
:type Knots: Handle_TColStd_HArray1OfReal &
:param Mults:
:type Mults: Handle_TColStd_HArray1OfInteger &
:rtype: None
:param Parametrisation:
:type Parametrisation: Convert_ParameterisationType
:param UFirst:
:type UFirst: float
:param ULast:
:type ULast: float
:param CosNumerator:
:type CosNumerator: Handle_TColStd_HArray1OfReal &
:param SinNumerator:
:type SinNumerator: Handle_TColStd_HArray1OfReal &
:param Denominator:
:type Denominator: Handle_TColStd_HArray1OfReal &
:param Degree:
:type Degree: int &
:param Knots:
:type Knots: Handle_TColStd_HArray1OfReal &
:param Mults:
:type Mults: Handle_TColStd_HArray1OfInteger &
:rtype: None
"""
return _Convert.Convert_ConicToBSplineCurve_BuildCosAndSin(self, *args)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Convert_ConicToBSplineCurve.Degree = new_instancemethod(_Convert.Convert_ConicToBSplineCurve_Degree,None,Convert_ConicToBSplineCurve)
Convert_ConicToBSplineCurve.NbPoles = new_instancemethod(_Convert.Convert_ConicToBSplineCurve_NbPoles,None,Convert_ConicToBSplineCurve)
Convert_ConicToBSplineCurve.NbKnots = new_instancemethod(_Convert.Convert_ConicToBSplineCurve_NbKnots,None,Convert_ConicToBSplineCurve)
Convert_ConicToBSplineCurve.IsPeriodic = new_instancemethod(_Convert.Convert_ConicToBSplineCurve_IsPeriodic,None,Convert_ConicToBSplineCurve)
Convert_ConicToBSplineCurve.Pole = new_instancemethod(_Convert.Convert_ConicToBSplineCurve_Pole,None,Convert_ConicToBSplineCurve)
Convert_ConicToBSplineCurve.Weight = new_instancemethod(_Convert.Convert_ConicToBSplineCurve_Weight,None,Convert_ConicToBSplineCurve)
Convert_ConicToBSplineCurve.Knot = new_instancemethod(_Convert.Convert_ConicToBSplineCurve_Knot,None,Convert_ConicToBSplineCurve)
Convert_ConicToBSplineCurve.Multiplicity = new_instancemethod(_Convert.Convert_ConicToBSplineCurve_Multiplicity,None,Convert_ConicToBSplineCurve)
Convert_ConicToBSplineCurve.BuildCosAndSin = new_instancemethod(_Convert.Convert_ConicToBSplineCurve_BuildCosAndSin,None,Convert_ConicToBSplineCurve)
Convert_ConicToBSplineCurve._kill_pointed = new_instancemethod(_Convert.Convert_ConicToBSplineCurve__kill_pointed,None,Convert_ConicToBSplineCurve)
Convert_ConicToBSplineCurve_swigregister = _Convert.Convert_ConicToBSplineCurve_swigregister
Convert_ConicToBSplineCurve_swigregister(Convert_ConicToBSplineCurve)
class Convert_ElementarySurfaceToBSplineSurface(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def UDegree(self, *args):
"""
:rtype: int
"""
return _Convert.Convert_ElementarySurfaceToBSplineSurface_UDegree(self, *args)
def VDegree(self, *args):
"""
* Returns the degree for the u or v parametric direction of the BSpline surface whose data is computed in this framework.
:rtype: int
"""
return _Convert.Convert_ElementarySurfaceToBSplineSurface_VDegree(self, *args)
def NbUPoles(self, *args):
"""
:rtype: int
"""
return _Convert.Convert_ElementarySurfaceToBSplineSurface_NbUPoles(self, *args)
def NbVPoles(self, *args):
"""
* Returns the number of poles for the u or v parametric direction of the BSpline surface whose data is computed in this framework.
:rtype: int
"""
return _Convert.Convert_ElementarySurfaceToBSplineSurface_NbVPoles(self, *args)
def NbUKnots(self, *args):
"""
:rtype: int
"""
return _Convert.Convert_ElementarySurfaceToBSplineSurface_NbUKnots(self, *args)
def NbVKnots(self, *args):
"""
* Returns the number of knots for the u or v parametric direction of the BSpline surface whose data is computed in this framework .
:rtype: int
"""
return _Convert.Convert_ElementarySurfaceToBSplineSurface_NbVKnots(self, *args)
def IsUPeriodic(self, *args):
"""
:rtype: bool
"""
return _Convert.Convert_ElementarySurfaceToBSplineSurface_IsUPeriodic(self, *args)
def IsVPeriodic(self, *args):
"""
* Returns true if the BSpline surface whose data is computed in this framework is periodic in the u or v parametric direction.
:rtype: bool
"""
return _Convert.Convert_ElementarySurfaceToBSplineSurface_IsVPeriodic(self, *args)
def Pole(self, *args):
"""
* Returns the pole of index (UIndex,VIndex) to the poles table of the BSpline surface whose data is computed in this framework. Exceptions Standard_OutOfRange if, for the BSpline surface whose data is computed in this framework: - UIndex is outside the bounds of the poles table in the u parametric direction, or - VIndex is outside the bounds of the poles table in the v parametric direction.
:param UIndex:
:type UIndex: int
:param VIndex:
:type VIndex: int
:rtype: gp_Pnt
"""
return _Convert.Convert_ElementarySurfaceToBSplineSurface_Pole(self, *args)
def Weight(self, *args):
"""
* Returns the weight of the pole of index (UIndex,VIndex) to the poles table of the BSpline surface whose data is computed in this framework. Exceptions Standard_OutOfRange if, for the BSpline surface whose data is computed in this framework: - UIndex is outside the bounds of the poles table in the u parametric direction, or - VIndex is outside the bounds of the poles table in the v parametric direction.
:param UIndex:
:type UIndex: int
:param VIndex:
:type VIndex: int
:rtype: float
"""
return _Convert.Convert_ElementarySurfaceToBSplineSurface_Weight(self, *args)
def UKnot(self, *args):
"""
* Returns the U-knot of range UIndex. Raised if UIndex < 1 or UIndex > NbUKnots.
:param UIndex:
:type UIndex: int
:rtype: float
"""
return _Convert.Convert_ElementarySurfaceToBSplineSurface_UKnot(self, *args)
def VKnot(self, *args):
"""
* Returns the V-knot of range VIndex. Raised if VIndex < 1 or VIndex > NbVKnots.
:param UIndex:
:type UIndex: int
:rtype: float
"""
return _Convert.Convert_ElementarySurfaceToBSplineSurface_VKnot(self, *args)
def UMultiplicity(self, *args):
"""
* Returns the multiplicity of the U-knot of range UIndex. Raised if UIndex < 1 or UIndex > NbUKnots.
:param UIndex:
:type UIndex: int
:rtype: int
"""
return _Convert.Convert_ElementarySurfaceToBSplineSurface_UMultiplicity(self, *args)
def VMultiplicity(self, *args):
"""
* Returns the multiplicity of the V-knot of range VIndex. Raised if VIndex < 1 or VIndex > NbVKnots.
:param VIndex:
:type VIndex: int
:rtype: int
"""
return _Convert.Convert_ElementarySurfaceToBSplineSurface_VMultiplicity(self, *args)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Convert_ElementarySurfaceToBSplineSurface.UDegree = new_instancemethod(_Convert.Convert_ElementarySurfaceToBSplineSurface_UDegree,None,Convert_ElementarySurfaceToBSplineSurface)
Convert_ElementarySurfaceToBSplineSurface.VDegree = new_instancemethod(_Convert.Convert_ElementarySurfaceToBSplineSurface_VDegree,None,Convert_ElementarySurfaceToBSplineSurface)
Convert_ElementarySurfaceToBSplineSurface.NbUPoles = new_instancemethod(_Convert.Convert_ElementarySurfaceToBSplineSurface_NbUPoles,None,Convert_ElementarySurfaceToBSplineSurface)
Convert_ElementarySurfaceToBSplineSurface.NbVPoles = new_instancemethod(_Convert.Convert_ElementarySurfaceToBSplineSurface_NbVPoles,None,Convert_ElementarySurfaceToBSplineSurface)
Convert_ElementarySurfaceToBSplineSurface.NbUKnots = new_instancemethod(_Convert.Convert_ElementarySurfaceToBSplineSurface_NbUKnots,None,Convert_ElementarySurfaceToBSplineSurface)
Convert_ElementarySurfaceToBSplineSurface.NbVKnots = new_instancemethod(_Convert.Convert_ElementarySurfaceToBSplineSurface_NbVKnots,None,Convert_ElementarySurfaceToBSplineSurface)
Convert_ElementarySurfaceToBSplineSurface.IsUPeriodic = new_instancemethod(_Convert.Convert_ElementarySurfaceToBSplineSurface_IsUPeriodic,None,Convert_ElementarySurfaceToBSplineSurface)
Convert_ElementarySurfaceToBSplineSurface.IsVPeriodic = new_instancemethod(_Convert.Convert_ElementarySurfaceToBSplineSurface_IsVPeriodic,None,Convert_ElementarySurfaceToBSplineSurface)
Convert_ElementarySurfaceToBSplineSurface.Pole = new_instancemethod(_Convert.Convert_ElementarySurfaceToBSplineSurface_Pole,None,Convert_ElementarySurfaceToBSplineSurface)
Convert_ElementarySurfaceToBSplineSurface.Weight = new_instancemethod(_Convert.Convert_ElementarySurfaceToBSplineSurface_Weight,None,Convert_ElementarySurfaceToBSplineSurface)
Convert_ElementarySurfaceToBSplineSurface.UKnot = new_instancemethod(_Convert.Convert_ElementarySurfaceToBSplineSurface_UKnot,None,Convert_ElementarySurfaceToBSplineSurface)
Convert_ElementarySurfaceToBSplineSurface.VKnot = new_instancemethod(_Convert.Convert_ElementarySurfaceToBSplineSurface_VKnot,None,Convert_ElementarySurfaceToBSplineSurface)
Convert_ElementarySurfaceToBSplineSurface.UMultiplicity = new_instancemethod(_Convert.Convert_ElementarySurfaceToBSplineSurface_UMultiplicity,None,Convert_ElementarySurfaceToBSplineSurface)
Convert_ElementarySurfaceToBSplineSurface.VMultiplicity = new_instancemethod(_Convert.Convert_ElementarySurfaceToBSplineSurface_VMultiplicity,None,Convert_ElementarySurfaceToBSplineSurface)
Convert_ElementarySurfaceToBSplineSurface._kill_pointed = new_instancemethod(_Convert.Convert_ElementarySurfaceToBSplineSurface__kill_pointed,None,Convert_ElementarySurfaceToBSplineSurface)
Convert_ElementarySurfaceToBSplineSurface_swigregister = _Convert.Convert_ElementarySurfaceToBSplineSurface_swigregister
Convert_ElementarySurfaceToBSplineSurface_swigregister(Convert_ElementarySurfaceToBSplineSurface)
class Convert_GridPolynomialToPoles(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* To only one polynomial Surface. The Length of <PolynomialUIntervals> and <PolynomialVIntervals> have to be 2. This values defined the parametric domain of the Polynomial Equation. Coefficients : The <Coefficients> have to be formated than an 'C array' [MaxUDegree+1] [MaxVDegree+1] [3]
:param MaxUDegree:
:type MaxUDegree: int
:param MaxVDegree:
:type MaxVDegree: int
:param NumCoeff:
:type NumCoeff: Handle_TColStd_HArray1OfInteger &
:param Coefficients:
:type Coefficients: Handle_TColStd_HArray1OfReal &
:param PolynomialUIntervals:
:type PolynomialUIntervals: Handle_TColStd_HArray1OfReal &
:param PolynomialVIntervals:
:type PolynomialVIntervals: Handle_TColStd_HArray1OfReal &
:rtype: None
* To one grid of polynomial Surface. Warning! Continuity in each parametric direction can be at MOST the maximum degree of the polynomial functions. <TrueUIntervals>, <TrueVIntervals> : this is the true parameterisation for the composite surface Coefficients : The Coefficients have to be formated than an 'C array' [NbVSurfaces] [NBUSurfaces] [MaxUDegree+1] [MaxVDegree+1] [3] raises DomainError if <NumCoeffPerSurface> is not a [1, NbVSurfaces*NbUSurfaces, 1,2] array. if <Coefficients> is not a
:param NbUSurfaces:
:type NbUSurfaces: int
:param NBVSurfaces:
:type NBVSurfaces: int
:param UContinuity:
:type UContinuity: int
:param VContinuity:
:type VContinuity: int
:param MaxUDegree:
:type MaxUDegree: int
:param MaxVDegree:
:type MaxVDegree: int
:param NumCoeffPerSurface:
:type NumCoeffPerSurface: Handle_TColStd_HArray2OfInteger &
:param Coefficients:
:type Coefficients: Handle_TColStd_HArray1OfReal &
:param PolynomialUIntervals:
:type PolynomialUIntervals: Handle_TColStd_HArray1OfReal &
:param PolynomialVIntervals:
:type PolynomialVIntervals: Handle_TColStd_HArray1OfReal &
:param TrueUIntervals:
:type TrueUIntervals: Handle_TColStd_HArray1OfReal &
:param TrueVIntervals:
:type TrueVIntervals: Handle_TColStd_HArray1OfReal &
:rtype: None
"""
_Convert.Convert_GridPolynomialToPoles_swiginit(self,_Convert.new_Convert_GridPolynomialToPoles(*args))
def Perform(self, *args):
"""
:param UContinuity:
:type UContinuity: int
:param VContinuity:
:type VContinuity: int
:param MaxUDegree:
:type MaxUDegree: int
:param MaxVDegree:
:type MaxVDegree: int
:param NumCoeffPerSurface:
:type NumCoeffPerSurface: Handle_TColStd_HArray2OfInteger &
:param Coefficients:
:type Coefficients: Handle_TColStd_HArray1OfReal &
:param PolynomialUIntervals:
:type PolynomialUIntervals: Handle_TColStd_HArray1OfReal &
:param PolynomialVIntervals:
:type PolynomialVIntervals: Handle_TColStd_HArray1OfReal &
:param TrueUIntervals:
:type TrueUIntervals: Handle_TColStd_HArray1OfReal &
:param TrueVIntervals:
:type TrueVIntervals: Handle_TColStd_HArray1OfReal &
:rtype: None
"""
return _Convert.Convert_GridPolynomialToPoles_Perform(self, *args)
def NbUPoles(self, *args):
"""
:rtype: int
"""
return _Convert.Convert_GridPolynomialToPoles_NbUPoles(self, *args)
def NbVPoles(self, *args):
"""
:rtype: int
"""
return _Convert.Convert_GridPolynomialToPoles_NbVPoles(self, *args)
def Poles(self, *args):
"""
* returns the poles of the BSpline Surface
:rtype: Handle_TColgp_HArray2OfPnt
"""
return _Convert.Convert_GridPolynomialToPoles_Poles(self, *args)
def UDegree(self, *args):
"""
:rtype: int
"""
return _Convert.Convert_GridPolynomialToPoles_UDegree(self, *args)
def VDegree(self, *args):
"""
:rtype: int
"""
return _Convert.Convert_GridPolynomialToPoles_VDegree(self, *args)
def NbUKnots(self, *args):
"""
:rtype: int
"""
return _Convert.Convert_GridPolynomialToPoles_NbUKnots(self, *args)
def NbVKnots(self, *args):
"""
:rtype: int
"""
return _Convert.Convert_GridPolynomialToPoles_NbVKnots(self, *args)
def UKnots(self, *args):
"""
* Knots in the U direction
:rtype: Handle_TColStd_HArray1OfReal
"""
return _Convert.Convert_GridPolynomialToPoles_UKnots(self, *args)
def VKnots(self, *args):
"""
* Knots in the V direction
:rtype: Handle_TColStd_HArray1OfReal
"""
return _Convert.Convert_GridPolynomialToPoles_VKnots(self, *args)
def UMultiplicities(self, *args):
"""
* Multiplicities of the knots in the U direction
:rtype: Handle_TColStd_HArray1OfInteger
"""
return _Convert.Convert_GridPolynomialToPoles_UMultiplicities(self, *args)
def VMultiplicities(self, *args):
"""
* Multiplicities of the knots in the V direction
:rtype: Handle_TColStd_HArray1OfInteger
"""
return _Convert.Convert_GridPolynomialToPoles_VMultiplicities(self, *args)
def IsDone(self, *args):
"""
:rtype: bool
"""
return _Convert.Convert_GridPolynomialToPoles_IsDone(self, *args)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Convert_GridPolynomialToPoles.Perform = new_instancemethod(_Convert.Convert_GridPolynomialToPoles_Perform,None,Convert_GridPolynomialToPoles)
Convert_GridPolynomialToPoles.NbUPoles = new_instancemethod(_Convert.Convert_GridPolynomialToPoles_NbUPoles,None,Convert_GridPolynomialToPoles)
Convert_GridPolynomialToPoles.NbVPoles = new_instancemethod(_Convert.Convert_GridPolynomialToPoles_NbVPoles,None,Convert_GridPolynomialToPoles)
Convert_GridPolynomialToPoles.Poles = new_instancemethod(_Convert.Convert_GridPolynomialToPoles_Poles,None,Convert_GridPolynomialToPoles)
Convert_GridPolynomialToPoles.UDegree = new_instancemethod(_Convert.Convert_GridPolynomialToPoles_UDegree,None,Convert_GridPolynomialToPoles)
Convert_GridPolynomialToPoles.VDegree = new_instancemethod(_Convert.Convert_GridPolynomialToPoles_VDegree,None,Convert_GridPolynomialToPoles)
Convert_GridPolynomialToPoles.NbUKnots = new_instancemethod(_Convert.Convert_GridPolynomialToPoles_NbUKnots,None,Convert_GridPolynomialToPoles)
Convert_GridPolynomialToPoles.NbVKnots = new_instancemethod(_Convert.Convert_GridPolynomialToPoles_NbVKnots,None,Convert_GridPolynomialToPoles)
Convert_GridPolynomialToPoles.UKnots = new_instancemethod(_Convert.Convert_GridPolynomialToPoles_UKnots,None,Convert_GridPolynomialToPoles)
Convert_GridPolynomialToPoles.VKnots = new_instancemethod(_Convert.Convert_GridPolynomialToPoles_VKnots,None,Convert_GridPolynomialToPoles)
Convert_GridPolynomialToPoles.UMultiplicities = new_instancemethod(_Convert.Convert_GridPolynomialToPoles_UMultiplicities,None,Convert_GridPolynomialToPoles)
Convert_GridPolynomialToPoles.VMultiplicities = new_instancemethod(_Convert.Convert_GridPolynomialToPoles_VMultiplicities,None,Convert_GridPolynomialToPoles)
Convert_GridPolynomialToPoles.IsDone = new_instancemethod(_Convert.Convert_GridPolynomialToPoles_IsDone,None,Convert_GridPolynomialToPoles)
Convert_GridPolynomialToPoles._kill_pointed = new_instancemethod(_Convert.Convert_GridPolynomialToPoles__kill_pointed,None,Convert_GridPolynomialToPoles)
Convert_GridPolynomialToPoles_swigregister = _Convert.Convert_GridPolynomialToPoles_swigregister
Convert_GridPolynomialToPoles_swigregister(Convert_GridPolynomialToPoles)
class Convert_SequenceNodeOfSequenceOfArray1OfPoles(OCC.TCollection.TCollection_SeqNode):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:param I:
:type I: Handle_TColgp_HArray1OfPnt
:param n:
:type n: TCollection_SeqNodePtr &
:param p:
:type p: TCollection_SeqNodePtr &
:rtype: None
"""
_Convert.Convert_SequenceNodeOfSequenceOfArray1OfPoles_swiginit(self,_Convert.new_Convert_SequenceNodeOfSequenceOfArray1OfPoles(*args))
def Value(self, *args):
"""
:rtype: Handle_TColgp_HArray1OfPnt
"""
return _Convert.Convert_SequenceNodeOfSequenceOfArray1OfPoles_Value(self, *args)
def _kill_pointed(self):
"""_kill_pointed(Convert_SequenceNodeOfSequenceOfArray1OfPoles self)"""
return _Convert.Convert_SequenceNodeOfSequenceOfArray1OfPoles__kill_pointed(self)
def GetHandle(self):
"""GetHandle(Convert_SequenceNodeOfSequenceOfArray1OfPoles self) -> Handle_Convert_SequenceNodeOfSequenceOfArray1OfPoles"""
return _Convert.Convert_SequenceNodeOfSequenceOfArray1OfPoles_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Convert_SequenceNodeOfSequenceOfArray1OfPoles.Value = new_instancemethod(_Convert.Convert_SequenceNodeOfSequenceOfArray1OfPoles_Value,None,Convert_SequenceNodeOfSequenceOfArray1OfPoles)
Convert_SequenceNodeOfSequenceOfArray1OfPoles._kill_pointed = new_instancemethod(_Convert.Convert_SequenceNodeOfSequenceOfArray1OfPoles__kill_pointed,None,Convert_SequenceNodeOfSequenceOfArray1OfPoles)
Convert_SequenceNodeOfSequenceOfArray1OfPoles.GetHandle = new_instancemethod(_Convert.Convert_SequenceNodeOfSequenceOfArray1OfPoles_GetHandle,None,Convert_SequenceNodeOfSequenceOfArray1OfPoles)
Convert_SequenceNodeOfSequenceOfArray1OfPoles_swigregister = _Convert.Convert_SequenceNodeOfSequenceOfArray1OfPoles_swigregister
Convert_SequenceNodeOfSequenceOfArray1OfPoles_swigregister(Convert_SequenceNodeOfSequenceOfArray1OfPoles)
class Handle_Convert_SequenceNodeOfSequenceOfArray1OfPoles(OCC.TCollection.Handle_TCollection_SeqNode):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_Convert.Handle_Convert_SequenceNodeOfSequenceOfArray1OfPoles_swiginit(self,_Convert.new_Handle_Convert_SequenceNodeOfSequenceOfArray1OfPoles(*args))
DownCast = staticmethod(_Convert.Handle_Convert_SequenceNodeOfSequenceOfArray1OfPoles_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_Convert_SequenceNodeOfSequenceOfArray1OfPoles.Nullify = new_instancemethod(_Convert.Handle_Convert_SequenceNodeOfSequenceOfArray1OfPoles_Nullify,None,Handle_Convert_SequenceNodeOfSequenceOfArray1OfPoles)
Handle_Convert_SequenceNodeOfSequenceOfArray1OfPoles.IsNull = new_instancemethod(_Convert.Handle_Convert_SequenceNodeOfSequenceOfArray1OfPoles_IsNull,None,Handle_Convert_SequenceNodeOfSequenceOfArray1OfPoles)
Handle_Convert_SequenceNodeOfSequenceOfArray1OfPoles.GetObject = new_instancemethod(_Convert.Handle_Convert_SequenceNodeOfSequenceOfArray1OfPoles_GetObject,None,Handle_Convert_SequenceNodeOfSequenceOfArray1OfPoles)
Handle_Convert_SequenceNodeOfSequenceOfArray1OfPoles._kill_pointed = new_instancemethod(_Convert.Handle_Convert_SequenceNodeOfSequenceOfArray1OfPoles__kill_pointed,None,Handle_Convert_SequenceNodeOfSequenceOfArray1OfPoles)
Handle_Convert_SequenceNodeOfSequenceOfArray1OfPoles_swigregister = _Convert.Handle_Convert_SequenceNodeOfSequenceOfArray1OfPoles_swigregister
Handle_Convert_SequenceNodeOfSequenceOfArray1OfPoles_swigregister(Handle_Convert_SequenceNodeOfSequenceOfArray1OfPoles)
def Handle_Convert_SequenceNodeOfSequenceOfArray1OfPoles_DownCast(*args):
return _Convert.Handle_Convert_SequenceNodeOfSequenceOfArray1OfPoles_DownCast(*args)
Handle_Convert_SequenceNodeOfSequenceOfArray1OfPoles_DownCast = _Convert.Handle_Convert_SequenceNodeOfSequenceOfArray1OfPoles_DownCast
class Convert_SequenceOfArray1OfPoles(OCC.TCollection.TCollection_BaseSequence):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:rtype: None
"""
_Convert.Convert_SequenceOfArray1OfPoles_swiginit(self,_Convert.new_Convert_SequenceOfArray1OfPoles(*args))
def Clear(self, *args):
"""
:rtype: None
"""
return _Convert.Convert_SequenceOfArray1OfPoles_Clear(self, *args)
def Assign(self, *args):
"""
:param Other:
:type Other: Convert_SequenceOfArray1OfPoles &
:rtype: Convert_SequenceOfArray1OfPoles
"""
return _Convert.Convert_SequenceOfArray1OfPoles_Assign(self, *args)
def Set(self, *args):
"""
:param Other:
:type Other: Convert_SequenceOfArray1OfPoles &
:rtype: Convert_SequenceOfArray1OfPoles
"""
return _Convert.Convert_SequenceOfArray1OfPoles_Set(self, *args)
def Append(self, *args):
"""
:param T:
:type T: Handle_TColgp_HArray1OfPnt
:rtype: None
:param S:
:type S: Convert_SequenceOfArray1OfPoles &
:rtype: None
"""
return _Convert.Convert_SequenceOfArray1OfPoles_Append(self, *args)
def Prepend(self, *args):
"""
:param T:
:type T: Handle_TColgp_HArray1OfPnt
:rtype: None
:param S:
:type S: Convert_SequenceOfArray1OfPoles &
:rtype: None
"""
return _Convert.Convert_SequenceOfArray1OfPoles_Prepend(self, *args)
def InsertBefore(self, *args):
"""
:param Index:
:type Index: int
:param T:
:type T: Handle_TColgp_HArray1OfPnt
:rtype: None
:param Index:
:type Index: int
:param S:
:type S: Convert_SequenceOfArray1OfPoles &
:rtype: None
"""
return _Convert.Convert_SequenceOfArray1OfPoles_InsertBefore(self, *args)
def InsertAfter(self, *args):
"""
:param Index:
:type Index: int
:param T:
:type T: Handle_TColgp_HArray1OfPnt
:rtype: None
:param Index:
:type Index: int
:param S:
:type S: Convert_SequenceOfArray1OfPoles &
:rtype: None
"""
return _Convert.Convert_SequenceOfArray1OfPoles_InsertAfter(self, *args)
def First(self, *args):
"""
:rtype: Handle_TColgp_HArray1OfPnt
"""
return _Convert.Convert_SequenceOfArray1OfPoles_First(self, *args)
def Last(self, *args):
"""
:rtype: Handle_TColgp_HArray1OfPnt
"""
return _Convert.Convert_SequenceOfArray1OfPoles_Last(self, *args)
def Split(self, *args):
"""
:param Index:
:type Index: int
:param Sub:
:type Sub: Convert_SequenceOfArray1OfPoles &
:rtype: None
"""
return _Convert.Convert_SequenceOfArray1OfPoles_Split(self, *args)
def Value(self, *args):
"""
:param Index:
:type Index: int
:rtype: Handle_TColgp_HArray1OfPnt
"""
return _Convert.Convert_SequenceOfArray1OfPoles_Value(self, *args)
def SetValue(self, *args):
"""
:param Index:
:type Index: int
:param I:
:type I: Handle_TColgp_HArray1OfPnt
:rtype: None
"""
return _Convert.Convert_SequenceOfArray1OfPoles_SetValue(self, *args)
def ChangeValue(self, *args):
"""
:param Index:
:type Index: int
:rtype: Handle_TColgp_HArray1OfPnt
"""
return _Convert.Convert_SequenceOfArray1OfPoles_ChangeValue(self, *args)
def Remove(self, *args):
"""
:param Index:
:type Index: int
:rtype: None
:param FromIndex:
:type FromIndex: int
:param ToIndex:
:type ToIndex: int
:rtype: None
"""
return _Convert.Convert_SequenceOfArray1OfPoles_Remove(self, *args)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Convert_SequenceOfArray1OfPoles.Clear = new_instancemethod(_Convert.Convert_SequenceOfArray1OfPoles_Clear,None,Convert_SequenceOfArray1OfPoles)
Convert_SequenceOfArray1OfPoles.Assign = new_instancemethod(_Convert.Convert_SequenceOfArray1OfPoles_Assign,None,Convert_SequenceOfArray1OfPoles)
Convert_SequenceOfArray1OfPoles.Set = new_instancemethod(_Convert.Convert_SequenceOfArray1OfPoles_Set,None,Convert_SequenceOfArray1OfPoles)
Convert_SequenceOfArray1OfPoles.Append = new_instancemethod(_Convert.Convert_SequenceOfArray1OfPoles_Append,None,Convert_SequenceOfArray1OfPoles)
Convert_SequenceOfArray1OfPoles.Prepend = new_instancemethod(_Convert.Convert_SequenceOfArray1OfPoles_Prepend,None,Convert_SequenceOfArray1OfPoles)
Convert_SequenceOfArray1OfPoles.InsertBefore = new_instancemethod(_Convert.Convert_SequenceOfArray1OfPoles_InsertBefore,None,Convert_SequenceOfArray1OfPoles)
Convert_SequenceOfArray1OfPoles.InsertAfter = new_instancemethod(_Convert.Convert_SequenceOfArray1OfPoles_InsertAfter,None,Convert_SequenceOfArray1OfPoles)
Convert_SequenceOfArray1OfPoles.First = new_instancemethod(_Convert.Convert_SequenceOfArray1OfPoles_First,None,Convert_SequenceOfArray1OfPoles)
Convert_SequenceOfArray1OfPoles.Last = new_instancemethod(_Convert.Convert_SequenceOfArray1OfPoles_Last,None,Convert_SequenceOfArray1OfPoles)
Convert_SequenceOfArray1OfPoles.Split = new_instancemethod(_Convert.Convert_SequenceOfArray1OfPoles_Split,None,Convert_SequenceOfArray1OfPoles)
Convert_SequenceOfArray1OfPoles.Value = new_instancemethod(_Convert.Convert_SequenceOfArray1OfPoles_Value,None,Convert_SequenceOfArray1OfPoles)
Convert_SequenceOfArray1OfPoles.SetValue = new_instancemethod(_Convert.Convert_SequenceOfArray1OfPoles_SetValue,None,Convert_SequenceOfArray1OfPoles)
Convert_SequenceOfArray1OfPoles.ChangeValue = new_instancemethod(_Convert.Convert_SequenceOfArray1OfPoles_ChangeValue,None,Convert_SequenceOfArray1OfPoles)
Convert_SequenceOfArray1OfPoles.Remove = new_instancemethod(_Convert.Convert_SequenceOfArray1OfPoles_Remove,None,Convert_SequenceOfArray1OfPoles)
Convert_SequenceOfArray1OfPoles._kill_pointed = new_instancemethod(_Convert.Convert_SequenceOfArray1OfPoles__kill_pointed,None,Convert_SequenceOfArray1OfPoles)
Convert_SequenceOfArray1OfPoles_swigregister = _Convert.Convert_SequenceOfArray1OfPoles_swigregister
Convert_SequenceOfArray1OfPoles_swigregister(Convert_SequenceOfArray1OfPoles)
class Convert_CircleToBSplineCurve(Convert_ConicToBSplineCurve):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* The equivalent B-spline curve has the same orientation as the circle C.
:param C:
:type C: gp_Circ2d
:param Parameterisation: default value is Convert_TgtThetaOver2
:type Parameterisation: Convert_ParameterisationType
:rtype: None
* The circle C is limited between the parametric values U1, U2 in radians. U1 and U2 [0.0, 2*Pi] . The equivalent B-spline curve is oriented from U1 to U2 and has the same orientation as the circle C. Raised if U1 = U2 or U1 = U2 + 2.0 * Pi
:param C:
:type C: gp_Circ2d
:param U1:
:type U1: float
:param U2:
:type U2: float
:param Parameterisation: default value is Convert_TgtThetaOver2
:type Parameterisation: Convert_ParameterisationType
:rtype: None
"""
_Convert.Convert_CircleToBSplineCurve_swiginit(self,_Convert.new_Convert_CircleToBSplineCurve(*args))
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Convert_CircleToBSplineCurve._kill_pointed = new_instancemethod(_Convert.Convert_CircleToBSplineCurve__kill_pointed,None,Convert_CircleToBSplineCurve)
Convert_CircleToBSplineCurve_swigregister = _Convert.Convert_CircleToBSplineCurve_swigregister
Convert_CircleToBSplineCurve_swigregister(Convert_CircleToBSplineCurve)
class Convert_ConeToBSplineSurface(Convert_ElementarySurfaceToBSplineSurface):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* The equivalent B-spline surface as the same orientation as the Cone in the U and V parametric directions. Raised if U1 = U2 or U1 = U2 + 2.0 * Pi Raised if V1 = V2.
:param C:
:type C: gp_Cone
:param U1:
:type U1: float
:param U2:
:type U2: float
:param V1:
:type V1: float
:param V2:
:type V2: float
:rtype: None
* The equivalent B-spline surface as the same orientation as the Cone in the U and V parametric directions. Raised if V1 = V2.
:param C:
:type C: gp_Cone
:param V1:
:type V1: float
:param V2:
:type V2: float
:rtype: None
"""
_Convert.Convert_ConeToBSplineSurface_swiginit(self,_Convert.new_Convert_ConeToBSplineSurface(*args))
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Convert_ConeToBSplineSurface._kill_pointed = new_instancemethod(_Convert.Convert_ConeToBSplineSurface__kill_pointed,None,Convert_ConeToBSplineSurface)
Convert_ConeToBSplineSurface_swigregister = _Convert.Convert_ConeToBSplineSurface_swigregister
Convert_ConeToBSplineSurface_swigregister(Convert_ConeToBSplineSurface)
class Convert_CylinderToBSplineSurface(Convert_ElementarySurfaceToBSplineSurface):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* The equivalent B-splineSurface as the same orientation as the cylinder in the U and V parametric directions. Raised if U1 = U2 or U1 = U2 + 2.0 * Pi Raised if V1 = V2.
:param Cyl:
:type Cyl: gp_Cylinder
:param U1:
:type U1: float
:param U2:
:type U2: float
:param V1:
:type V1: float
:param V2:
:type V2: float
:rtype: None
* The equivalent B-splineSurface as the same orientation as the cylinder in the U and V parametric directions. Raised if V1 = V2.
:param Cyl:
:type Cyl: gp_Cylinder
:param V1:
:type V1: float
:param V2:
:type V2: float
:rtype: None
"""
_Convert.Convert_CylinderToBSplineSurface_swiginit(self,_Convert.new_Convert_CylinderToBSplineSurface(*args))
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Convert_CylinderToBSplineSurface._kill_pointed = new_instancemethod(_Convert.Convert_CylinderToBSplineSurface__kill_pointed,None,Convert_CylinderToBSplineSurface)
Convert_CylinderToBSplineSurface_swigregister = _Convert.Convert_CylinderToBSplineSurface_swigregister
Convert_CylinderToBSplineSurface_swigregister(Convert_CylinderToBSplineSurface)
class Convert_EllipseToBSplineCurve(Convert_ConicToBSplineCurve):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* The equivalent B-spline curve has the same orientation as the ellipse E.
:param E:
:type E: gp_Elips2d
:param Parameterisation: default value is Convert_TgtThetaOver2
:type Parameterisation: Convert_ParameterisationType
:rtype: None
* The ellipse E is limited between the parametric values U1, U2. The equivalent B-spline curve is oriented from U1 to U2 and has the same orientation as E. Raised if U1 = U2 or U1 = U2 + 2.0 * Pi
:param E:
:type E: gp_Elips2d
:param U1:
:type U1: float
:param U2:
:type U2: float
:param Parameterisation: default value is Convert_TgtThetaOver2
:type Parameterisation: Convert_ParameterisationType
:rtype: None
"""
_Convert.Convert_EllipseToBSplineCurve_swiginit(self,_Convert.new_Convert_EllipseToBSplineCurve(*args))
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Convert_EllipseToBSplineCurve._kill_pointed = new_instancemethod(_Convert.Convert_EllipseToBSplineCurve__kill_pointed,None,Convert_EllipseToBSplineCurve)
Convert_EllipseToBSplineCurve_swigregister = _Convert.Convert_EllipseToBSplineCurve_swigregister
Convert_EllipseToBSplineCurve_swigregister(Convert_EllipseToBSplineCurve)
class Convert_HyperbolaToBSplineCurve(Convert_ConicToBSplineCurve):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* The hyperbola H is limited between the parametric values U1, U2 and the equivalent B-spline curve has the same orientation as the hyperbola.
:param H:
:type H: gp_Hypr2d
:param U1:
:type U1: float
:param U2:
:type U2: float
:rtype: None
"""
_Convert.Convert_HyperbolaToBSplineCurve_swiginit(self,_Convert.new_Convert_HyperbolaToBSplineCurve(*args))
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Convert_HyperbolaToBSplineCurve._kill_pointed = new_instancemethod(_Convert.Convert_HyperbolaToBSplineCurve__kill_pointed,None,Convert_HyperbolaToBSplineCurve)
Convert_HyperbolaToBSplineCurve_swigregister = _Convert.Convert_HyperbolaToBSplineCurve_swigregister
Convert_HyperbolaToBSplineCurve_swigregister(Convert_HyperbolaToBSplineCurve)
class Convert_ParabolaToBSplineCurve(Convert_ConicToBSplineCurve):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* The parabola Prb is limited between the parametric values U1, U2 and the equivalent B-spline curve as the same orientation as the parabola Prb.
:param Prb:
:type Prb: gp_Parab2d
:param U1:
:type U1: float
:param U2:
:type U2: float
:rtype: None
"""
_Convert.Convert_ParabolaToBSplineCurve_swiginit(self,_Convert.new_Convert_ParabolaToBSplineCurve(*args))
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Convert_ParabolaToBSplineCurve._kill_pointed = new_instancemethod(_Convert.Convert_ParabolaToBSplineCurve__kill_pointed,None,Convert_ParabolaToBSplineCurve)
Convert_ParabolaToBSplineCurve_swigregister = _Convert.Convert_ParabolaToBSplineCurve_swigregister
Convert_ParabolaToBSplineCurve_swigregister(Convert_ParabolaToBSplineCurve)
class Convert_SphereToBSplineSurface(Convert_ElementarySurfaceToBSplineSurface):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* The equivalent B-spline surface as the same orientation as the sphere in the U and V parametric directions. Raised if U1 = U2 or U1 = U2 + 2.0 * Pi Raised if V1 = V2.
:param Sph:
:type Sph: gp_Sphere
:param U1:
:type U1: float
:param U2:
:type U2: float
:param V1:
:type V1: float
:param V2:
:type V2: float
:rtype: None
* The equivalent B-spline surface as the same orientation as the sphere in the U and V parametric directions. Raised if UTrim = True and Param1 = Param2 or Param1 = Param2 + 2.0 * Pi Raised if UTrim = False and Param1 = Param2
:param Sph:
:type Sph: gp_Sphere
:param Param1:
:type Param1: float
:param Param2:
:type Param2: float
:param UTrim: default value is Standard_True
:type UTrim: bool
:rtype: None
* The equivalent B-spline surface as the same orientation as the sphere in the U and V parametric directions.
:param Sph:
:type Sph: gp_Sphere
:rtype: None
"""
_Convert.Convert_SphereToBSplineSurface_swiginit(self,_Convert.new_Convert_SphereToBSplineSurface(*args))
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Convert_SphereToBSplineSurface._kill_pointed = new_instancemethod(_Convert.Convert_SphereToBSplineSurface__kill_pointed,None,Convert_SphereToBSplineSurface)
Convert_SphereToBSplineSurface_swigregister = _Convert.Convert_SphereToBSplineSurface_swigregister
Convert_SphereToBSplineSurface_swigregister(Convert_SphereToBSplineSurface)
class Convert_TorusToBSplineSurface(Convert_ElementarySurfaceToBSplineSurface):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* The equivalent B-spline surface as the same orientation as the torus in the U and V parametric directions. Raised if U1 = U2 or U1 = U2 + 2.0 * Pi Raised if V1 = V2 or V1 = V2 + 2.0 * Pi
:param T:
:type T: gp_Torus
:param U1:
:type U1: float
:param U2:
:type U2: float
:param V1:
:type V1: float
:param V2:
:type V2: float
:rtype: None
* The equivalent B-spline surface as the same orientation as the torus in the U and V parametric directions. Raised if Param1 = Param2 or Param1 = Param2 + 2.0 * Pi
:param T:
:type T: gp_Torus
:param Param1:
:type Param1: float
:param Param2:
:type Param2: float
:param UTrim: default value is Standard_True
:type UTrim: bool
:rtype: None
* The equivalent B-spline surface as the same orientation as the torus in the U and V parametric directions.
:param T:
:type T: gp_Torus
:rtype: None
"""
_Convert.Convert_TorusToBSplineSurface_swiginit(self,_Convert.new_Convert_TorusToBSplineSurface(*args))
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Convert_TorusToBSplineSurface._kill_pointed = new_instancemethod(_Convert.Convert_TorusToBSplineSurface__kill_pointed,None,Convert_TorusToBSplineSurface)
Convert_TorusToBSplineSurface_swigregister = _Convert.Convert_TorusToBSplineSurface_swigregister
Convert_TorusToBSplineSurface_swigregister(Convert_TorusToBSplineSurface)
| 48.226259 | 2,029 | 0.748173 | 52,277 | 0.690882 | 0 | 0 | 0 | 0 | 0 | 0 | 35,347 | 0.467139 |
80a0908852b7d407afbc7c7d5928b94ad14fee04 | 9,608 | py | Python | GestureAgents/AppRecognizer.py | chaosct/GestureAgents | 9ec0adb1e59bf995d5808431edd4cb8bf8907728 | [
"MIT"
] | 1 | 2015-01-22T10:42:09.000Z | 2015-01-22T10:42:09.000Z | GestureAgents/AppRecognizer.py | chaosct/GestureAgents | 9ec0adb1e59bf995d5808431edd4cb8bf8907728 | [
"MIT"
] | null | null | null | GestureAgents/AppRecognizer.py | chaosct/GestureAgents | 9ec0adb1e59bf995d5808431edd4cb8bf8907728 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from GestureAgents.Recognizer import Recognizer, newHypothesis
import copy
from GestureAgents.Agent import Agent
from GestureAgents.Events import Event
class FakeAgent(Agent):
def __init__(self, original, creator):
self.original_agent = original
Agent.__init__(self, creator, list(original.events))
def __getattr__(self, attrname):
return getattr(self.original_agent, attrname)
class SensorProxyAgent(Agent):
def __init__(self, original, creator):
self.original_agent = original
self.to_discard = True
self.acquired_dict = {}
self.sensorproxy = creator
Agent.__init__(self, creator, list(original.events))
self.recycled = self.original_agent.recycled
def __getattr__(self, attrname):
return getattr(self.original_agent, attrname)
def acquire(self, r):
if not self.acquired_dict and not self.sensorproxy.to_complete:
self.sensorproxy.acquire(self.original_agent)
self.acquired_dict[r] = self.get_AR(r)
return Agent.acquire(self, r)
def complete(self, r):
# We find all AppRecognizer possibliy requiring this completion
# and add to the SensorProxy list in order to finally complete in
# case of completion of gesture
del self.acquired_dict[r]
ARlist = self.get_AR(r)
self.sensorproxy.to_complete |= ARlist
Agent.complete(self, r)
def discard(self, r):
if r == self._recognizer_complete:
if not self.sensorproxy.failed:
# finishing
if not self.sensorproxy.executed:
self.to_discard = True
else:
self.sensorproxy.finish()
else:
# Agent fail
pass
else:
# fail
if r in self.acquired_dict:
del self.acquired_dict[r]
if not self.acquired_dict and not self._recognizer_complete: # and self.owners:
self.sensorproxy.discard(self.original_agent)
Agent.discard(self, r)
def get_AR(self, r):
rlist = set([r])
ARlist = set()
while rlist:
R = rlist.pop()
if isinstance(R, AppRecognizer):
ARlist.add(R)
else:
if R.agent:
for e in R.agent.events.itervalues():
for i in e.lookupf:
rlist.add(i)
return ARlist
class SensorProxy(Recognizer):
ninstances = 0
def __init__(self, system, recognizer, host):
Recognizer.__init__(self, system, Event())
self.recognizer = recognizer
# self.newAgent = Event()
self.register_event(self.system.newAgent(
recognizer), SensorProxy._eventNewAgent)
self.name = "SensorProxy(%s) %d" % (str(
recognizer.__name__), SensorProxy.ninstances)
SensorProxy.ninstances += 1
self.to_complete = set()
self.host = host
host.proxies.append(self)
self.alreadycompleted = False
@newHypothesis
def _eventNewAgent(self, agent):
self.unregister_event(self.system.newAgent(self.recognizer))
self.agent = SensorProxyAgent(agent, self)
self.newAgent(self.agent)
self.otheragent = agent
if not self.agent.is_someone_subscribed():
self.fail("Noone interested")
else:
for ename, event in agent.events.iteritems():
ffff = lambda self, a, eventname=ename: self.enqueueEvent(
a, eventname)
self.register_event(event, ffff)
def enqueueEvent(self, a, e):
if self.failed:
return
self.agent.original_agent = a
if e == "finishAgent":
self.agent.finish()
else:
self.agent.events[e](self.agent)
def execute(self):
self.executed = True
if self.agent.to_discard:
pass
to_complete = self.to_complete
self.to_complete = set()
for r in to_complete:
r.proxyexecuted(self)
def duplicate(self):
d = self.get_copy(self.system, self.recognizer, self.host)
d.newAgent = self.newAgent
def __repr__(self):
return self.name
def complete(self):
if self.alreadycompleted:
return
self.alreadycompleted = True
Recognizer.complete(self)
class fksystem(object):
def __init__(self, instance):
self.instance = instance
def newAgent(self, recognizer):
return self.instance.AR2_newAgent(recognizer)
def __getattr__(self, attr):
return getattr(self.instance, attr)
class AppRecognizer(Recognizer):
ninstances = 0
def __init__(self, system, original_recognizer, fksys=None, sensors=None):
self.new_agents = {}
self.recognizers = []
self.proxies = []
self.systemsystem = system
self.original_recognizer = original_recognizer
if not fksys:
self.fksystem = fksystem(self)
else:
self.fksystem = fksys
self.to_finish = False
# list of sensors that require a Proxy
if sensors is None:
#BAD IDEA. Just use system sources?
# Default is TUIO cursor events only
#from GestureAgentsTUIO.Tuio import TuioCursorEvents
#sensors = [TuioCursorEvents]
sensors = system.sources
self.sensorlist = sensors
Recognizer.__init__(self, self.fksystem, Event())
self.name = "AppRecognizer(%s) %d" % (str(
self.original_recognizer.__name__), AppRecognizer.ninstances)
AppRecognizer.ninstances += 1
self.eventqueue = []
self.register_event(self.fksystem.newAgent(
original_recognizer), AppRecognizer._eventNewAgent)
self.willenqueue = True
# is original_recognizer a sensor and we have to assume that we
# will be dealing directly with proxies
self.directProxy = original_recognizer in self.sensorlist
@newHypothesis
def _eventNewAgent(self, agent):
self.unregister_event(self.fksystem.newAgent(self.original_recognizer))
self.agent = FakeAgent(agent, self)
self.newAgent(self.agent)
self.otheragent = agent
if not self.agent.is_someone_subscribed():
self.fail("Noone interested")
else:
for ename, event in agent.events.iteritems():
ffff = lambda self, a, eventname=ename: self.enqueueEvent(
a, eventname)
self.register_event(event, ffff)
def AR2_newAgent(self, recognizer):
if recognizer not in self.new_agents:
if recognizer in self.sensorlist:
proxy = SensorProxy(self.systemsystem, recognizer, self)
self.new_agents[recognizer] = proxy.newAgent
else:
self.new_agents[recognizer] = Event()
self.recognizers.append(recognizer(self.fksystem))
return self.new_agents[recognizer]
def execute(self):
if self.directProxy:
# we wait!
return
self.willenqueue = False
for event_name, agent in self.eventqueue:
self.agent.original_agent = agent
if event_name == "finishAgent":
self.finish()
else:
self.agent.events[event_name](self.agent)
if self.to_finish:
self.finish()
def enqueueEvent(self, a, e):
if not self.eventqueue:
self.acquire(a)
proxies = [pr for pr in self.proxies if self in pr.to_complete]
if not proxies:
# self.directProxy = True
self.complete()
proxies = [pr for pr in self.proxies if self in pr.to_complete]
for p in proxies:
p.complete()
if self.willenqueue:
original_agent = copy.copy(a)
self.eventqueue.append((e, original_agent))
else:
self.agent.original_agent = a
if e == "finishAgent":
if self.executed:
self.finish()
else:
self.to_finish = True
else:
self.agent.events[e](self.agent)
def proxyexecuted(self, proxy):
if not self.eventqueue:
# ignore unsolicited completions
return
waiting = [p for p in self.proxies if self in p.to_complete]
if not waiting:
if self.directProxy:
self.directProxy = False
self.execute()
else:
self.complete()
def fail(self, cause="Unknown"):
for p in list(self.proxies):
if self in p.to_complete:
p.to_complete.remove(self)
if not p.to_complete:
p.safe_fail()
self.proxies.remove(p)
# else:
# if p.agent:
# print p.agent.acquired_dict
Recognizer.fail(self, cause)
def duplicate(self):
d = self.get_copy(
self.system, self.original_recognizer, fksys=self.fksystem, sensors=self.sensorlist)
d.new_agents = self.new_agents
d.recognizers = self.recognizers
d.proxies = self.proxies
d.newAgent = self.newAgent
def __repr__(self):
return self.name
| 33.245675 | 96 | 0.584513 | 9,394 | 0.977727 | 0 | 0 | 1,124 | 0.116986 | 0 | 0 | 821 | 0.08545 |
80a09b1bd192800d93bd52bdbf72daf90e53bc85 | 1,376 | py | Python | Tools/_create_model.py | 825477418/XX | bf46e34749394002eec0fdc65e34c339ce022cab | [
"MIT"
] | null | null | null | Tools/_create_model.py | 825477418/XX | bf46e34749394002eec0fdc65e34c339ce022cab | [
"MIT"
] | 1 | 2020-06-03T13:54:29.000Z | 2020-06-03T13:54:29.000Z | Tools/_create_model.py | 825477418/XX | bf46e34749394002eec0fdc65e34c339ce022cab | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2018/11/1 11:33
# @Author : Peter
# @Des : 生成model类
# @File : _create_model.py
# @Software: PyCharm
import os
import re
# fp = "C:\\Users\\billsteve\\Desktop\\tmp\\"
fp = "../Model/SqlAlchemy/T.py"
link = "sqlacodegen mysql+pymysql://root:HBroot21@cd-cdb-f41yw26m.sql.tencentcdb.com:63626/company> "
link = "sqlacodegen mysql+pymysql://root:HBroot21@cd-cdb-f41yw26m.sql.tencentcdb.com:63626/data_sheet> "
link = "sqlacodegen mysql+pymysql://root:HBroot21@rm-8vbzabox9s9kvomq61o.mysql.zhangbei.rds.aliyuncs.com:3306/data_sheet> "
# link = "sqlacodegen mysql+pymysql://root:HBroot21@cd-cdb-f41yw26m.sql.tencentcdb.com:63626/weibo> "
link = "sqlacodegen mysql+pymysql://root:rebind1234@zh:3306/zhihu > "
link = "sqlacodegen mysql+pymysql://root:rebind1234@zh:3306/money > "
# link = "sqlacodegen mysql+pymysql://root:DRsXT5ZJ6Oi55LPQ@localhost:3306/music > "
# link = "sqlacodegen mysql+pymysql://root:HBroot21@cd-cdb-f41yw26m.sql.tencentcdb.com:63626/money> "
os.system(link + str(fp))
content = open(fp, encoding="utf-8").read()
# content = content.replace("INTEGER", "Integer").replace("BIGINT", "BigInteger")
# content = re.sub("Integer(\(\d+\))", "Integer", content)
# content = re.sub("BigInteger(\(\d+\))", "BigInteger", content)
# open(fp, "w", encoding="utf-8").write(content)
print(content)
| 45.866667 | 123 | 0.704215 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,220 | 0.882779 |
80a31b978430a03712aa6e4c30f664c12e4e5cee | 44 | py | Python | stdplugins/__init__.py | ppppspsljdhdd/Pepe | 1e57825ddb0ab3ba15a19cad0ecfbf2622f6b851 | [
"Apache-2.0"
] | 20 | 2020-01-25T05:08:26.000Z | 2022-01-18T07:37:53.000Z | stdplugins/__init__.py | ishaizz/PepeBot | 7440cadc8228106d221fc8e436a0809a86be5159 | [
"Apache-2.0"
] | 15 | 2019-11-07T07:53:56.000Z | 2022-01-23T09:21:17.000Z | stdplugins/__init__.py | ishaizz/PepeBot | 7440cadc8228106d221fc8e436a0809a86be5159 | [
"Apache-2.0"
] | 62 | 2019-10-20T06:35:19.000Z | 2021-01-23T17:26:05.000Z | from uniborg import *
from userbot import *
| 14.666667 | 21 | 0.772727 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
80a43ce3d179c95872aebbda4deee5d9f217f5de | 8,035 | py | Python | CAAPR/CAAPR_AstroMagic/PTS/pts/evolve/FunctionSlot.py | wdobbels/CAAPR | 50d0b32642a61af614c22f1c6dc3c4a00a1e71a3 | [
"MIT"
] | 7 | 2016-05-20T21:56:39.000Z | 2022-02-07T21:09:48.000Z | CAAPR/CAAPR_AstroMagic/PTS/pts/evolve/FunctionSlot.py | wdobbels/CAAPR | 50d0b32642a61af614c22f1c6dc3c4a00a1e71a3 | [
"MIT"
] | 1 | 2019-03-21T16:10:04.000Z | 2019-03-22T17:21:56.000Z | CAAPR/CAAPR_AstroMagic/PTS/pts/evolve/FunctionSlot.py | wdobbels/CAAPR | 50d0b32642a61af614c22f1c6dc3c4a00a1e71a3 | [
"MIT"
] | 1 | 2020-05-19T16:17:17.000Z | 2020-05-19T16:17:17.000Z | #!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.evolve.functionslot The *function slot* concept is large used by Pyevolve, the idea
# is simple, each genetic operator or any operator, can be assigned
# to a slot, by this way, we can add more than simple one operator,
# we can have for example, two or more mutator operators at same time,
# two or more evaluation functions, etc. In this :mod:`FunctionSlot` module,
# you'll find the class :class:`FunctionSlot.FunctionSlot`, which is the slot class.
# -----------------------------------------------------------------
# Import standard modules
from types import BooleanType
# Import other evolve modules
import utils
# Import the relevant PTS classes and modules
from ..core.tools.random import prng
# -----------------------------------------------------------------
class FunctionSlot(object):
"""
FunctionSlot Class - The function slot
Example:
>>> genome.evaluator.set(eval_func)
>>> genome.evaluator[0]
<function eval_func at 0x018C8930>
>>> genome.evaluator
Slot [Evaluation Function] (Count: 1)
Name: eval_func
>>> genome.evaluator.clear()
>>> genome.evaluator
Slot [Evaluation Function] (Count: 0)
No function
You can add weight to functions when using the `rand_apply` paramter:
>>> genome.evaluator.set(eval_main, 0.9)
>>> genome.evaluator.add(eval_sec, 0.3)
>>> genome.evaluator.setRandomApply()
In the above example, the function *eval_main* will be called with 90% of
probability and the *eval_sec* will be called with 30% of probability.
There are another way to add functions too:
>>> genome.evaluator += eval_func
:param name: the slot name
:param rand_apply: if True, just one of the functions in the slot
will be applied, this function is randomly picked based
on the weight of the function added.
"""
def __init__(self, name="Anonymous Function", rand_apply=False):
""" The creator of the FunctionSlot Class """
self.funcList = []
self.funcWeights = []
self.slotName = name
self.rand_apply = rand_apply
# -----------------------------------------------------------------
def __typeCheck(self, func):
"""
Used internally to check if a function passed to the
function slot is callable. Otherwise raises a TypeError exception.
:param func: the function object
"""
if not callable(func):
utils.raiseException("The function must be a method or function", TypeError)
# -----------------------------------------------------------------
def __iadd__(self, func):
""" To add more functions using the += operator
.. versionadded:: 0.6
The __iadd__ method.
"""
self.__typeCheck(func)
self.funcList.append(func)
return self
# -----------------------------------------------------------------
def __getitem__(self, index):
""" Used to retrieve some slot function index """
return self.funcList[index]
# -----------------------------------------------------------------
def __setitem__(self, index, value):
""" Used to set the index slot function """
self.__typeCheck(value)
self.funcList[index] = value
# -----------------------------------------------------------------
def __iter__(self):
""" Return the function list iterator """
return iter(self.funcList)
# -----------------------------------------------------------------
def __len__(self):
""" Return the number of functions on the slot
.. versionadded:: 0.6
The *__len__* method
"""
return len(self.funcList)
# -----------------------------------------------------------------
def setRandomApply(self, flag=True):
"""
Sets the random function application, in this mode, the
function will randomly choose one slot to apply
:param flag: True or False
"""
if type(flag) != BooleanType:
utils.raiseException("Random option must be True or False", TypeError)
self.rand_apply = flag
# -----------------------------------------------------------------
def clear(self):
""" Used to clear the functions in the slot """
if len(self.funcList) > 0:
del self.funcList[:]
del self.funcWeights[:]
# -----------------------------------------------------------------
def add(self, func, weight=0.5):
""" Used to add a function to the slot
:param func: the function to be added in the slot
:param weight: used when you enable the *random apply*, it's the weight
of the function for the random selection
.. versionadded:: 0.6
The `weight` parameter.
"""
self.__typeCheck(func)
self.funcList.append(func)
self.funcWeights.append(weight)
# -----------------------------------------------------------------
def isEmpty(self):
""" Return true if the function slot is empy """
return (len(self.funcList) == 0)
# -----------------------------------------------------------------
def set(self, func, weight=0.5):
""" Used to clear all functions in the slot and add one
:param func: the function to be added in the slot
:param weight: used when you enable the *random apply*, it's the weight
of the function for the random selection
.. versionadded:: 0.6
The `weight` parameter.
.. note:: the method *set* of the function slot remove all previous
functions added to the slot.
"""
self.clear()
self.__typeCheck(func)
self.add(func, weight)
# -----------------------------------------------------------------
def apply(self, index, obj, **args):
""" Apply the index function
:param index: the index of the function
:param obj: this object is passes as parameter to the function
:param args: this args dictionary is passed to the function
"""
if len(self.funcList) <= 0:
raise Exception("No function defined: " + self.slotName)
return self.funcList[index](obj, **args)
# -----------------------------------------------------------------
def applyFunctions(self, obj=None, **args):
""" Generator to apply all function slots in obj
:param obj: this object is passes as parameter to the function
:param args: this args dictionary is passed to the function
"""
if len(self.funcList) <= 0:
utils.raiseException("No function defined: " + self.slotName)
if not self.rand_apply:
for f in self.funcList:
yield f(obj, **args)
else:
v = prng.uniform(0, 1)
fobj = None
for func, weight in zip(self.funcList, self.funcWeights):
fobj = func
if v < weight:
break
v = v - weight
yield fobj(obj, **args)
# -----------------------------------------------------------------
def __repr__(self):
""" String representation of FunctionSlot """
strRet = "Slot [%s] (Count: %d)\n" % (self.slotName, len(self.funcList))
if len(self.funcList) <= 0:
strRet += "\t\tNo function\n"
return strRet
for f, w in zip(self.funcList, self.funcWeights):
strRet += "\t\tName: %s - Weight: %.2f\n" % (f.func_name, w)
if f.func_doc:
strRet += "\t\tDoc: " + f.func_doc + "\n"
return strRet
# -----------------------------------------------------------------
| 30.903846 | 99 | 0.511886 | 6,849 | 0.85229 | 712 | 0.088601 | 0 | 0 | 0 | 0 | 5,428 | 0.67546 |
80a5408b335eed74b00a7bd72beabf1350c963ce | 4,010 | py | Python | test_spelling_corrector.py | mustafaKus/spellcheck | 11f6f923b1427176781bd39cba9aa5d14130332d | [
"MIT"
] | 6 | 2020-12-20T07:22:08.000Z | 2022-02-02T07:14:36.000Z | test_spelling_corrector.py | mustafaKus/spellcheck | 11f6f923b1427176781bd39cba9aa5d14130332d | [
"MIT"
] | null | null | null | test_spelling_corrector.py | mustafaKus/spellcheck | 11f6f923b1427176781bd39cba9aa5d14130332d | [
"MIT"
] | null | null | null | """Implements the test class for the spelling corrector"""
import json
import logging
import os
import sys
import unittest
from unittest import TestCase
from spelling_corrector import NorvigCorrector, SymmetricDeleteCorrector
class SpellingCorrectorTest(TestCase):
"""Implements the test class for the spelling corrector"""
def test_norvig_corrector(self):
"""Tests the norvig corrector"""
current_working_directory = os.path.abspath(os.getcwd())
tests_directory = os.path.join(current_working_directory, "tests")
logging.info("Tests the norvig corrector")
logging.info("Tests directory is %s" % tests_directory)
for test_directory_name in os.listdir(tests_directory):
logging.info("Testing in %s directory" % test_directory_name)
test_directory_path = os.path.join(tests_directory, test_directory_name)
dictionary_path = os.path.join(test_directory_path, "dictionary.txt")
test_input_2_expected_output_path = os.path.join(test_directory_path, "input_2_expected_output.json")
word_2_frequency = {}
with open(dictionary_path, "r") as dictionary_file:
logging.info("Reading the dictionary %s" % test_directory_name)
dictionary_lines = dictionary_file.readlines()
for _, line in enumerate(dictionary_lines):
word, frequency_value = line.strip().split()
word_2_frequency[word.lower()] = int(frequency_value)
spelling_corrector = NorvigCorrector(word_2_frequency)
with open(test_input_2_expected_output_path) as input_2_expected_output_file:
logging.info("Reading the test data")
input_2_expected_output = json.load(input_2_expected_output_file)
for input_, expected_output in input_2_expected_output.items():
logging.info("Expected output for the input '%s' is '%s'" % (input_, expected_output))
self.assertEqual(expected_output, spelling_corrector.correct(input_))
def test_symmetric_delete_corrector(self):
"""Tests the symmetric delete corrector"""
current_working_directory = os.path.abspath(os.getcwd())
tests_directory = os.path.join(current_working_directory, "tests")
logging.info("Tests the symmetric delete corrector")
logging.info("Tests directory is %s" % tests_directory)
for test_directory_name in os.listdir(tests_directory):
logging.info("Testing in %s directory" % test_directory_name)
test_directory_path = os.path.join(tests_directory, test_directory_name)
dictionary_path = os.path.join(test_directory_path, "dictionary.txt")
test_input_2_expected_output_path = os.path.join(test_directory_path, "input_2_expected_output.json")
word_2_frequency = {}
with open(dictionary_path, "r") as dictionary_file:
logging.info("Reading the dictionary %s" % test_directory_name)
dictionary_lines = dictionary_file.readlines()
for _, line in enumerate(dictionary_lines):
word, frequency_value = line.strip().split()
word_2_frequency[word.lower()] = int(frequency_value)
spelling_corrector = SymmetricDeleteCorrector(word_2_frequency)
with open(test_input_2_expected_output_path) as input_2_expected_output_file:
logging.info("Reading the test data")
input_2_expected_output = json.load(input_2_expected_output_file)
for input_, expected_output in input_2_expected_output.items():
logging.info("Expected output for the input '%s' is '%s'" % (input_, expected_output))
self.assertEqual(expected_output, spelling_corrector.correct(input_))
if __name__ == '__main__':
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
unittest.main()
| 55.694444 | 113 | 0.684289 | 3,666 | 0.914214 | 0 | 0 | 0 | 0 | 0 | 0 | 662 | 0.165087 |
80a5bf8e4fc4367f119468df4b768b89e6482019 | 382 | py | Python | src/pyglue/DocStrings/GroupTransform.py | omenos/OpenColorIO | 7316c3be20752278924dd3f213bff297ffb63a14 | [
"BSD-3-Clause"
] | 7 | 2015-07-01T03:19:43.000Z | 2021-03-27T11:02:16.000Z | src/pyglue/DocStrings/GroupTransform.py | dictoon/OpenColorIO | 64adcad300adfd166280d2e7b1fb5c3ce7dca482 | [
"BSD-3-Clause"
] | null | null | null | src/pyglue/DocStrings/GroupTransform.py | dictoon/OpenColorIO | 64adcad300adfd166280d2e7b1fb5c3ce7dca482 | [
"BSD-3-Clause"
] | 2 | 2019-03-05T20:43:59.000Z | 2019-11-11T20:35:55.000Z |
class GroupTransform:
"""
GroupTransform
"""
def __init__(self):
pass
def getTransform(self):
pass
def getTransforms(self):
pass
def setTransforms(self, transforms):
pass
def size(self):
pass
def push_back(self, transform):
pass
def clear(self):
pass
def empty(self):
pass
| 17.363636 | 40 | 0.54712 | 380 | 0.994764 | 0 | 0 | 0 | 0 | 0 | 0 | 30 | 0.078534 |
80a7c3aa2f40f63c3545157bf90494bd8510acf2 | 5,127 | py | Python | censusreporter/apps/census/urls.py | Durellg/censusreporter | c006c2f1c67fd29086fe532974f1eb57e70a0e2c | [
"MIT"
] | null | null | null | censusreporter/apps/census/urls.py | Durellg/censusreporter | c006c2f1c67fd29086fe532974f1eb57e70a0e2c | [
"MIT"
] | null | null | null | censusreporter/apps/census/urls.py | Durellg/censusreporter | c006c2f1c67fd29086fe532974f1eb57e70a0e2c | [
"MIT"
] | null | null | null | from django.conf import settings
from django.conf.urls import url, patterns, include
from django.contrib import admin
from django.core.urlresolvers import reverse_lazy
from django.http import HttpResponse
from django.views.decorators.cache import cache_page
from django.views.generic.base import TemplateView, RedirectView
from .utils import GEOGRAPHIES_MAP
from .views import (HomepageView, GeographyDetailView,
TableDetailView, PlaceSearchJson, GeoSearch,
HealthcheckView, DataView, TopicView, ExampleView,
MakeJSONView, SitemapTopicsView, SearchResultsView)
admin.autodiscover()
STANDARD_CACHE_TIME = 60*60*24 # 24-hour cache
COMPARISON_FORMATS = 'map|table|distribution'
BLOCK_ROBOTS = getattr(settings, 'BLOCK_ROBOTS', False)
urlpatterns = patterns('',
url(
regex = '^$',
view = cache_page(STANDARD_CACHE_TIME)(HomepageView.as_view()),
kwargs = {},
name = 'homepage',
),
# e.g. /profiles/16000US5367000/ (Spokane, WA)
# this should redirect to slugged version of the URL above
url(
regex = '^profiles/(?P<fragment>[a-zA-Z0-9\-]+)/$',
view = cache_page(STANDARD_CACHE_TIME)(GeographyDetailView.as_view()),
kwargs = {},
name = 'geography_detail',
),
url(
regex = '^profiles/$',
view = RedirectView.as_view(url=reverse_lazy('search')),
kwargs = {},
name = 'geography_search_redirect',
),
url(
regex = '^make-json/charts/$',
view = MakeJSONView.as_view(),
kwargs = {},
name = 'make_json_charts',
),
# e.g. /table/B01001/
url(
regex = '^tables/B23002/$',
view = RedirectView.as_view(url=reverse_lazy('table_detail',kwargs={'table':'B23002A'})),
kwargs = {},
name = 'redirect_B23002',
),
url(
regex = '^tables/C23002/$',
view = RedirectView.as_view(url=reverse_lazy('table_detail',kwargs={'table':'C23002A'})),
kwargs = {},
name = 'redirect_C23002',
),
url(
regex = '^tables/(?P<table>[a-zA-Z0-9]+)/$',
view = cache_page(STANDARD_CACHE_TIME)(TableDetailView.as_view()),
kwargs = {},
name = 'table_detail',
),
url(
regex = '^tables/$',
view = RedirectView.as_view(url=reverse_lazy('search')),
kwargs = {},
name = 'table_search',
),
url(
regex = '^search/$',
view = SearchResultsView.as_view(),
kwargs = {},
name = 'search'
),
url(
regex = '^data/$',
view = RedirectView.as_view(url=reverse_lazy('table_search')),
kwargs = {},
name = 'table_search_redirect',
),
# e.g. /table/B01001/
url(
regex = '^data/(?P<format>%s)/$' % COMPARISON_FORMATS,
view = cache_page(STANDARD_CACHE_TIME)(DataView.as_view()),
kwargs = {},
name = 'data_detail',
),
url(
regex = '^topics/$',
view = cache_page(STANDARD_CACHE_TIME)(TopicView.as_view()),
kwargs = {},
name = 'topic_list',
),
url(
regex = '^topics/race-latino/?$',
view = RedirectView.as_view(url=reverse_lazy('topic_detail', kwargs={'topic_slug': 'race-hispanic'})),
name = 'topic_latino_redirect',
),
url(
regex = '^topics/(?P<topic_slug>[-\w]+)/$',
view = cache_page(STANDARD_CACHE_TIME)(TopicView.as_view()),
kwargs = {},
name = 'topic_detail',
),
url(
regex = '^examples/(?P<example_slug>[-\w]+)/$',
view = cache_page(STANDARD_CACHE_TIME)(ExampleView.as_view()),
kwargs = {},
name = 'example_detail',
),
url(
regex = '^glossary/$',
view = cache_page(STANDARD_CACHE_TIME)(TemplateView.as_view(template_name="glossary.html")),
kwargs = {},
name = 'glossary',
),
url(
regex = '^locate/$',
view = cache_page(STANDARD_CACHE_TIME)(TemplateView.as_view(template_name="locate/locate.html")),
kwargs = {},
name = 'locate',
),
url(
regex = '^healthcheck$',
view = HealthcheckView.as_view(),
kwargs = {},
name = 'healthcheck',
),
url(
regex = '^robots.txt$',
view = lambda r: HttpResponse(
"User-agent: *\n%s: /" % ('Disallow' if BLOCK_ROBOTS else 'Allow') ,
mimetype="text/plain"
)
),
url(
regex = '^topics/sitemap.xml$',
view = SitemapTopicsView.as_view(),
kwargs = {},
name = 'sitemap_topics'
),
## LOCAL DEV VERSION OF API ##
url(
regex = '^place-search/json/$',
view = PlaceSearchJson.as_view(),
kwargs = {},
name = 'place_search_json',
),
url(
regex = '^geo-search/$',
view = GeoSearch.as_view(),
kwargs = {},
name = 'geo_search',
),
## END LOCAL DEV VERSION OF API ##
)
| 28.17033 | 113 | 0.549639 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,229 | 0.239711 |
80a7c4a196f3e27707a6f4deb6d5d49fa769fe9a | 722 | py | Python | Pythonista/Editor/cambiar.py | walogo/Pythonista-scripts | 760451a0cdbe5dd76008a4e616d74191385bbd8b | [
"MIT"
] | 2 | 2019-04-24T19:25:55.000Z | 2019-05-04T11:27:54.000Z | Pythonista/Editor/cambiar.py | shoriwe/Tools | 760451a0cdbe5dd76008a4e616d74191385bbd8b | [
"MIT"
] | 4 | 2019-11-02T16:12:41.000Z | 2020-01-19T10:10:23.000Z | Pythonista/Editor/cambiar.py | walogo/Tools | 760451a0cdbe5dd76008a4e616d74191385bbd8b | [
"MIT"
] | 1 | 2020-10-07T17:49:09.000Z | 2020-10-07T17:49:09.000Z | from re import search
from editor import get_path
def count_spaces(line):
patter = r' {1,6}'
return search(patter, line).span()[1]
def replace():
steps={1:'Extracting content',2:'Starting edition',3:'Getting cuantity'}
actual=1
try:
file = get_path()
print(steps[1])
content = open(file).read()
line = [x for x in content.split('\n') if ' ' in x[:1]][0]
actual=2
print(steps[2])
actual=3
print(steps[3])
cuantity = count_spaces(line)
with open(file, 'w') as file:
file.write(content.replace(' ' * cuantity, '\t'))
file.close()
print('Done')
except Exception as e:
print('++{}++'.format(e))
print('Error in step {}'.format(actual))
print('({})'.format(steps[actual]))
replace()
| 21.878788 | 73 | 0.638504 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 120 | 0.166205 |
80abafe50ec268ef5c6ba2106a574b5b6aeb135a | 2,036 | py | Python | example.py | Tobi-De/qosic-sdk | a9c7a17c3a328883dfd033080175c64fb8c8fd32 | [
"MIT"
] | 1 | 2022-03-12T13:12:17.000Z | 2022-03-12T13:12:17.000Z | example.py | Tobi-De/qosic-sdk | a9c7a17c3a328883dfd033080175c64fb8c8fd32 | [
"MIT"
] | 247 | 2021-05-12T08:52:46.000Z | 2022-03-30T15:22:06.000Z | example.py | Tobi-De/qosic-sdk | a9c7a17c3a328883dfd033080175c64fb8c8fd32 | [
"MIT"
] | null | null | null | import phonenumbers
from dotenv import dotenv_values
from qosic import Client, MtnConfig, MTN, MOOV, OPERATION_CONFIRMED
from qosic.exceptions import (
InvalidCredentialsError,
InvalidClientIdError,
ServerError,
RequestError,
)
config = dotenv_values(".env")
moov_client_id = config.get("MOOV_CLIENT_ID")
mtn_client_id = config.get("MTN_CLIENT_ID")
server_login = config.get("SERVER_LOGIN")
server_pass = config.get("SERVER_PASS")
# This is just for test purpose, you should directly pass the phone number
raw_phone = config.get("PHONE_NUMBER")
providers = [
MTN(client_id=mtn_client_id, config=MtnConfig(step=30, timeout=60 * 2)),
MOOV(client_id=moov_client_id),
]
client = Client(
providers=providers,
login=server_login,
password=server_pass,
active_logging=True,
)
def main():
providers = [
MTN(client_id=mtn_client_id, config=MtnConfig(step=30, timeout=60 * 2)),
MOOV(client_id=moov_client_id),
]
try:
client = Client(
providers=providers,
login=server_login,
password=server_pass,
active_logging=True,
)
phone = phonenumbers.parse(raw_phone)
result = client.request_payment(
phone=phone, amount=1000, first_name="User", last_name="TEST"
)
except (
InvalidCredentialsError,
InvalidClientIdError,
ServerError,
RequestError,
) as e:
print(e)
else:
if result.state == OPERATION_CONFIRMED:
print(
f"TransRef: {result.trans_ref} -> Your requested payment to {result.phone} for an amount "
f"of {result.amount} has been successfully validated "
)
else:
print(f"Payment rejected: {result}")
# If you need to make a refund : (remember that refund are only available for MTN phone number right now)
# result = client.request_refund(trans_ref=result.trans_ref, phone=phone)
if __name__ == "__main__":
main()
| 29.085714 | 113 | 0.655697 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 526 | 0.25835 |
80abf7caa3074490050648cd507ce1a0805c6ca8 | 2,277 | py | Python | rlcard/games/karma/card.py | pettaa123/rlcard | f5b98eb3a836406ee51197728a258c834959ddb3 | [
"MIT"
] | null | null | null | rlcard/games/karma/card.py | pettaa123/rlcard | f5b98eb3a836406ee51197728a258c834959ddb3 | [
"MIT"
] | null | null | null | rlcard/games/karma/card.py | pettaa123/rlcard | f5b98eb3a836406ee51197728a258c834959ddb3 | [
"MIT"
] | null | null | null | class KarmaCard(object):
info = {'type': ['number', 'wild'],
'trait': ['4', '5', '6', '7', '8', '9', 'J', 'Q', 'K', 'A', '2', '3', '10', 'draw'],
'order': ['4:4', '4:3', '4:2', '4:1', '5:4', '5:3', '5:2', '5:1', '6:4', '6:3', '6:2', '6:1',
'7:4', '7:3', '7:2', '7:1', '8:4', '8:3', '8:2', '8:1', '9:4', '9:3', '9:2', '9:1',
'J:4', 'J:3', 'J:2', 'J:1', 'Q:4', 'Q:3', 'Q:2', 'Q:1', 'K:1', 'K:2', 'K:3', 'K:4',
'A:1', 'A:2', 'A:3', 'A:4', '2:1', '2:2', '2:3', '2:4', '3:1', '3:2', '3:3', '3:4',
'10:1', '10:2', '10:3', '10:4', 'draw:1'],
'order_start': ['4:4', '4:3', '4:2', '4:1', '5:4', '5:3', '5:2', '5:1', '6:4', '6:3', '6:2', '6:1',
'7:4', '7:3', '7:2', '7:1', '8:4', '8:3', '8:2', '8:1', '9:4', '9:3', '9:2', '9:1',
'J:1', 'J:2', 'J:3', 'J:4', 'Q:1', 'Q:2', 'Q:3', 'Q:4', 'K:1', 'K:2', 'K:3', 'K:4',
'A:1', 'A:2', 'A:3', 'A:4', '2:1', '2:2', '2:3', '2:4', '3:1', '3:2', '3:3', '3:4',
'10:1', '10:2', '10:3', '10:4', 'draw:1']
}
def __init__(self, card_type, trait):
''' Initialize the class of UnoCard
Args:
card_type (str): The type of card
trait (str): The trait of card
'''
self.type = card_type
self.trait = trait
self.str = self.get_str()
def get_str(self):
''' Get the string representation of card
Return:
(str): The string of card's trait
'''
return self.trait
def get_index(self):
''' Get the index of trait
Return:
(int): The index of card's trait (value)
'''
return self.info['trait'].index(self.trait)
@staticmethod
def print_cards(cards):
''' Print out card in a nice form
Args:
card (str or list): The string form or a list of a Karma card
'''
if isinstance(cards, str):
cards = [cards]
for i, card in enumerate(cards):
print(card, end='')
if i < len(cards) - 1:
print(', ', end='')
| 36.725806 | 111 | 0.368906 | 2,276 | 0.999561 | 0 | 0 | 420 | 0.184453 | 0 | 0 | 1,161 | 0.509881 |
80ae30ede933b26169568f957d647578f83c393b | 4,131 | py | Python | datasets/Part 1 - Data Preprocessing/Section 2 -------------------- Part 1 - Data Preprocessing --------------------/my_version_kevinml/Preprocessing_CategoricalData.py | kevinLCG/machinelearning-az | 54e3090275a3fc419aad17caadc6a47a71dcd3d4 | [
"MIT"
] | null | null | null | datasets/Part 1 - Data Preprocessing/Section 2 -------------------- Part 1 - Data Preprocessing --------------------/my_version_kevinml/Preprocessing_CategoricalData.py | kevinLCG/machinelearning-az | 54e3090275a3fc419aad17caadc6a47a71dcd3d4 | [
"MIT"
] | null | null | null | datasets/Part 1 - Data Preprocessing/Section 2 -------------------- Part 1 - Data Preprocessing --------------------/my_version_kevinml/Preprocessing_CategoricalData.py | kevinLCG/machinelearning-az | 54e3090275a3fc419aad17caadc6a47a71dcd3d4 | [
"MIT"
] | null | null | null | #!/home/kevinml/anaconda3/bin/python3.7
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 10 11:47:39 2019
@author: kevinml
Version Python: 3.7
"""
# Pre Procesado - Datos Categóricos
###########################################################
# Input Dataset #
###########################################################
# Importamos las librerías
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
## Importar el data set
dataset = pd.read_csv('./Data.csv')
# Generamos un subdataset con las variales independientes y otro con las dependientes
# INDEPENDIENTES (matriz)
X = dataset.iloc[:, :-1].values
# DEPENDIENTES (vector)
y = dataset.iloc[:, 3].values
###########################################################
# Tratamiento de los NAs #
###########################################################
# Importamos las librerías
# https://scikit-learn.org/stable/modules/impute.html
from sklearn.impute import SimpleImputer
# Creamos una funcion para reemplazar los valores faltantes (NaN/np.nan) con la MEDIA/mediana/most_frequent/etc de los valores de la COLUMNA (axis=0).
imputer = SimpleImputer(missing_values = np.nan, strategy = "mean")
# Hacemos unos ajustes a nuestra funcion para solo aplicarla a las columnas con datos faltantes.
imputer = imputer.fit(X[:, 1:3])
# Sobreescribimos nuestra matriz, haciendo las sustituciones correspondientes.
X[:, 1:3] = imputer.transform(X[:,1:3])
###########################################################
# Codificacion de Datos Categoricos #
###########################################################
# Importamos las librerías
# https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.OneHotEncoder.html
# https://scikit-learn.org/stable/modules/generated/sklearn.compose.make_column_transformer.html
from sklearn.preprocessing import LabelEncoder, OneHotEncoder
from sklearn.compose import make_column_transformer
# Le pasamos a la funcion tuplas (transformador, columnas) que especifiquen los
# objetos del transformador que se aplicarán a los subconjuntos de datos.
# Las columnas no seleccionadas se ignoraran
# Codificaremos cada uno de los nombres de los paises
onehotencoder = make_column_transformer((OneHotEncoder(), [0]), remainder = "passthrough")
X = onehotencoder.fit_transform(X).toarray()
# Codificamos el valor de Purchase "Yes", "No" por "1", "0"
labelencoder_y = LabelEncoder()
y = labelencoder_y.fit_transform(y)
###########################################################
# Training & Testing Splitting #
###########################################################
# Dividir el data set en conjunto de entrenamiento y conjunto de testing
# Importamos las librerías
# https://scikit-learn.org/stable/modules/generated/sklearn.model_selection.train_test_split.html
from sklearn.model_selection import train_test_split
# Obtenemos 4 variables; caracteristicas y etiquetas, de entrenamiento y testing respectivamente. Colocamos semilla en 42.
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state = 42)
###########################################################
# Escalado de Variables #
###########################################################
# Esto se hace devido a que el rango dinamico de cada una de las variables diferentes
# y al momento de operar con ellas, como al momento de sacar distancias euclidianas
# el valor de las variables de mayor rango, puede opacar el de aquellas cuyo rango sea menor.
# Obtenderemos variables entre -1 y 1.
# Importamos las librerías
# https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.StandardScaler.html
from sklearn.preprocessing import StandardScaler
# Creamos el objeto escalador
sc_X = StandardScaler()
# Generamos un escalador de acuerdo con nuestros datos de entrenamiento.
X_train = sc_X.fit_transform(X_train)
# Utilizamos es escalador obtenido en el paso anterior, para escalar nuestros datos de testing.
X_test = sc_X.transform(X_test) | 41.727273 | 150 | 0.639797 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,138 | 0.758337 |
80ae4466a6cac93d4e909355cee631a4f38e2bce | 3,120 | py | Python | docker_registry_client_async/specs.py | GitHK/docker-registry-client-async | 384b1b7f7abcda55258028d930b45054ab03f6c4 | [
"Apache-2.0"
] | 2 | 2021-10-13T00:25:23.000Z | 2022-02-23T22:22:33.000Z | docker_registry_client_async/specs.py | GitHK/docker-registry-client-async | 384b1b7f7abcda55258028d930b45054ab03f6c4 | [
"Apache-2.0"
] | 17 | 2020-07-18T21:58:51.000Z | 2022-03-31T06:53:30.000Z | docker_registry_client_async/specs.py | GitHK/docker-registry-client-async | 384b1b7f7abcda55258028d930b45054ab03f6c4 | [
"Apache-2.0"
] | 4 | 2020-09-25T22:12:05.000Z | 2022-02-15T06:26:50.000Z | #!/usr/bin/env python
# pylint: disable=too-few-public-methods
"""Reusable string literals."""
class DockerAuthentication:
"""
https://github.com/docker/distribution/blob/master/docs/spec/auth/token.md
https://github.com/docker/distribution/blob/master/docs/spec/auth/scope.md
"""
DOCKERHUB_URL_PATTERN = (
"{0}?service={1}&scope={2}&client_id=docker-registry-client-async"
)
SCOPE_REGISTRY_CATALOG = "registry:catalog:*"
SCOPE_REPOSITORY_PULL_PATTERN = "repository:{0}:pull"
SCOPE_REPOSITORY_PUSH_PATTERN = "repository:{0}:push"
SCOPE_REPOSITORY_ALL_PATTERN = "repository:{0}:pull,push"
class DockerMediaTypes:
"""https://github.com/docker/distribution/blob/master/docs/spec/manifest-v2-2.md#manifest-list"""
CONTAINER_IMAGE_V1 = "application/vnd.docker.container.image.v1+json"
DISTRIBUTION_MANIFEST_LIST_V2 = (
"application/vnd.docker.distribution.manifest.list.v2+json"
)
DISTRIBUTION_MANIFEST_V1 = "application/vnd.docker.distribution.manifest.v1+json"
DISTRIBUTION_MANIFEST_V1_SIGNED = (
"application/vnd.docker.distribution.manifest.v1+prettyjws"
)
DISTRIBUTION_MANIFEST_V2 = "application/vnd.docker.distribution.manifest.v2+json"
IMAGE_ROOTFS_DIFF = "application/vnd.docker.image.rootfs.diff.tar.gzip"
IMAGE_ROOTFS_FOREIGN_DIFF = (
"application/vnd.docker.image.rootfs.foreign.diff.tar.gzip"
)
PLUGIN_V1 = "application/vnd.docker.plugin.v1+json"
class Indices:
"""Common registry indices."""
DOCKERHUB = "index.docker.io"
QUAY = "quay.io"
class QuayAuthentication:
"""
https://docs.quay.io/api/
"""
QUAY_URL_PATTERN = (
"{0}?service={1}&scope={2}&client_id=docker-registry-client-async"
)
SCOPE_REPOSITORY_PULL_PATTERN = "repo:{0}:read"
SCOPE_REPOSITORY_PUSH_PATTERN = "repo:{0}:write"
SCOPE_REPOSITORY_ALL_PATTERN = "repo:{0}:read,write"
class MediaTypes:
"""Generic mime types."""
APPLICATION_JSON = "application/json"
APPLICATION_OCTET_STREAM = "application/octet-stream"
APPLICATION_YAML = "application/yaml"
class OCIMediaTypes:
"""https://github.com/opencontainers/image-spec/blob/master/media-types.md"""
DESCRIPTOR_V1 = "application/vnd.oci.descriptor.v1+json"
IMAGE_CONFIG_V1 = "application/vnd.oci.image.config.v1+json"
IMAGE_INDEX_V1 = "application/vnd.oci.image.index.v1+json"
IMAGE_LAYER_V1 = "application/vnd.oci.image.layer.v1.tar"
IMAGE_LAYER_GZIP_V1 = "application/vnd.oci.image.layer.v1.tar+gzip"
IMAGE_LAYER_ZSTD_V1 = "application/vnd.oci.image.layer.v1.tar+zstd"
IMAGE_LAYER_NONDISTRIBUTABLE_V1 = (
"application/vnd.oci.image.layer.nondistributable.v1.tar"
)
IMAGE_LAYER_NONDISTRIBUTABLE_GZIP_V1 = (
"application/vnd.oci.image.layer.nondistributable.v1.tar+gzip"
)
IMAGE_LAYER_NONDISTRIBUTABLE_ZSTD_V1 = (
"application/vnd.oci.image.layer.nondistributable.v1.tar+zstd"
)
IMAGE_MANIFEST_V1 = "application/vnd.oci.image.manifest.v1+json"
LAYOUT_HEADER_V1 = "application/vnd.oci.layout.header.v1+json"
| 34.285714 | 101 | 0.721154 | 3,005 | 0.963141 | 0 | 0 | 0 | 0 | 0 | 0 | 1,835 | 0.588141 |
80b16e3010e719ac0aedcd4a3f1aa074b37a447d | 1,283 | py | Python | test/toxiproxy_api_test.py | ondrejsika/toxiproxy-python | 0ad2533bae0712fbc835b9aa37869f8cd1c3343c | [
"MIT"
] | 36 | 2016-08-03T08:58:20.000Z | 2022-01-25T07:42:41.000Z | test/toxiproxy_api_test.py | ondrejsika/toxiproxy-python | 0ad2533bae0712fbc835b9aa37869f8cd1c3343c | [
"MIT"
] | 3 | 2017-09-13T17:36:59.000Z | 2021-02-08T18:37:22.000Z | test/toxiproxy_api_test.py | ondrejsika/toxiproxy-python | 0ad2533bae0712fbc835b9aa37869f8cd1c3343c | [
"MIT"
] | 17 | 2017-04-27T14:19:53.000Z | 2022-02-25T06:55:16.000Z | from unittest import TestCase
import requests
from toxiproxy.api import validate_response
from toxiproxy.exceptions import NotFound, ProxyExists
class IntoxicatedTest(TestCase):
def setUp(self):
self.base_url = "http://127.0.0.1:8474"
def test_not_found(self):
""" Test an invalid url """
url_to_test = "%s/%s" % (self.base_url, "not_found")
with self.assertRaises(NotFound) as context:
validate_response(requests.get(url_to_test))
self.assertTrue("404 page not found\n" in context.exception)
def test_proxy_exists(self):
""" Test that a proxy already exists """
url_to_test = "%s/%s" % (self.base_url, "proxies")
json = {
"upstream": "localhost:3306",
"name": "test_mysql_service"
}
# Lets create the first proxy
validate_response(requests.post(url_to_test, json=json))
with self.assertRaises(ProxyExists) as context:
# Lets create another one to see it breaks
validate_response(requests.post(url_to_test, json=json))
self.assertTrue("proxy already exists" in context.exception)
# Delete the created proxy
requests.delete("%s/%s" % (url_to_test, "test_mysql_service"))
| 30.547619 | 72 | 0.643804 | 1,133 | 0.883087 | 0 | 0 | 0 | 0 | 0 | 0 | 344 | 0.268122 |
80b6a675ace70af0f9c0466a40acaf8e75019f1e | 179 | py | Python | tests/cases/help/see_for_top_level/test_see_for_top_level.py | WilliamMayor/vantage | 05cda557cfc27cbf8aaf80a472a023a896e98546 | [
"MIT"
] | 1 | 2018-02-21T09:50:53.000Z | 2018-02-21T09:50:53.000Z | tests/cases/help/see_for_top_level/test_see_for_top_level.py | WilliamMayor/vantage | 05cda557cfc27cbf8aaf80a472a023a896e98546 | [
"MIT"
] | 15 | 2015-04-30T15:19:29.000Z | 2021-07-28T14:34:46.000Z | tests/cases/help/see_for_top_level/test_see_for_top_level.py | WilliamMayor/vantage | 05cda557cfc27cbf8aaf80a472a023a896e98546 | [
"MIT"
] | null | null | null | def test_see_for_top_level(result):
assert (
"usage: vantage [-a PATH] [-e NAME ...] [-v KEY=[VALUE] ...] [--verbose] [-h] COMMAND..."
in result.stdout_
)
| 29.833333 | 97 | 0.547486 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 89 | 0.497207 |
80b70820208ed5aa86523b3f57937420df90eabf | 1,777 | py | Python | integritybackend/geocoder.py | starlinglab/integrity-backend | 8d2a0640d7a9f66c97d180ad76aedf968dfa43e6 | [
"MIT"
] | 1 | 2022-03-18T16:11:31.000Z | 2022-03-18T16:11:31.000Z | integritybackend/geocoder.py | starlinglab/starling-integrity-api | c39161576e852afbffe3053af468acf58207b5ae | [
"MIT"
] | 37 | 2022-01-17T22:07:17.000Z | 2022-03-31T22:11:16.000Z | integritybackend/geocoder.py | starlinglab/starling-integrity-api | c39161576e852afbffe3053af468acf58207b5ae | [
"MIT"
] | null | null | null | import geocoder
from .log_helper import LogHelper
_logger = LogHelper.getLogger()
class Geocoder:
def reverse_geocode(self, lat, lon):
"""Retrieves reverse geocoding informatioon for the given latitude and longitude.
Args:
lat, long: latitude and longitude to reverse geocode, as floats
Returns:
geolocation JSON
"""
# TODO: Add some kind of throttling and/or caching to prevent us from sending more than 1 req/sec.
response = geocoder.osm([lat, lon], method="reverse")
if response.status_code != 200 or response.status != "OK":
_logger.error(
"Reverse geocode lookup for (%s, %s) failed with: %s",
lat,
lon,
response.status,
)
return None
return self._json_to_address(response.json)
def _json_to_address(self, geo_json):
"""Convert geocoding JSON to a uniform format for our own use."""
if (osm_address := geo_json.get("raw", {}).get("address")) is None:
_logger.warning("Reverse geocoding result did not include raw.address")
return None
address = {}
address["country_code"] = osm_address.get("country_code")
address["city"] = self._get_preferred_key(
osm_address, ["city", "town", "municipality", "village"]
)
address["country"] = osm_address.get("country")
address["state"] = self._get_preferred_key(
osm_address, ["state", "region", "state_district"]
)
return address
def _get_preferred_key(self, some_dict, keys):
for key in keys:
if key in some_dict:
return some_dict.get(key)
return None
| 34.173077 | 106 | 0.593697 | 1,691 | 0.951604 | 0 | 0 | 0 | 0 | 0 | 0 | 654 | 0.368036 |
80b7514de1b311404a81251566b500133863dc16 | 1,195 | py | Python | dsa/patterns/binary_search/number_range.py | bksahu/dsa | 4b36abbb3e00ce449c435c44260316f46d6d35ec | [
"MIT"
] | null | null | null | dsa/patterns/binary_search/number_range.py | bksahu/dsa | 4b36abbb3e00ce449c435c44260316f46d6d35ec | [
"MIT"
] | 4 | 2019-10-02T14:24:54.000Z | 2020-03-26T07:06:15.000Z | dsa/patterns/binary_search/number_range.py | bksahu/dsa | 4b36abbb3e00ce449c435c44260316f46d6d35ec | [
"MIT"
] | 2 | 2019-10-02T15:57:51.000Z | 2020-04-10T07:22:06.000Z | """
Given an array of numbers sorted in ascending order, find the range of a given number ‘key’.
The range of the ‘key’ will be the first and last position of the ‘key’ in the array.
Write a function to return the range of the ‘key’. If the ‘key’ is not present return [-1, -1].
Example 1:
Input: [4, 6, 6, 6, 9], key = 6
Output: [1, 3]
Example 2:
Input: [1, 3, 8, 10, 15], key = 10
Output: [3, 3]
Example 3:
Input: [1, 3, 8, 10, 15], key = 12
Output: [-1, -1]
"""
def number_range(nums, key):
start, end = -1, -1
left, right = 0, len(nums)-1
while left < right:
mid = (left + right) >> 1
if nums[mid] < key:
left = mid + 1
else:
right = mid
start = left
if nums[start] != key:
return [-1, -1]
left, right = 0, len(nums)-1
while left < right:
mid = (left + right + 1) >> 1
if nums[mid] > key:
right = mid - 1
else:
left = mid
end = right
return [start, end]
if __name__ == "__main__":
print(number_range([4, 6, 6, 6, 9], key = 6))
print(number_range([1, 3, 8, 10, 15], key = 10))
print(number_range([1, 3, 8, 10, 15], key = 12)) | 24.895833 | 95 | 0.534728 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 499 | 0.4107 |
80b8628f26ab382aefa545b0fb3740e9e53b5e22 | 70 | py | Python | not_tf_opt/__init__.py | gergely-flamich/not-tf-opt | 18e2c0024f1179a51190751d22ba4eb8b25bf3db | [
"MIT"
] | null | null | null | not_tf_opt/__init__.py | gergely-flamich/not-tf-opt | 18e2c0024f1179a51190751d22ba4eb8b25bf3db | [
"MIT"
] | null | null | null | not_tf_opt/__init__.py | gergely-flamich/not-tf-opt | 18e2c0024f1179a51190751d22ba4eb8b25bf3db | [
"MIT"
] | null | null | null | from .variables import *
from .optimize import *
from .utils import *
| 17.5 | 24 | 0.742857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
80b90025f8cfff3aebe55376e817e68ada7a4f80 | 3,256 | py | Python | models/__init__.py | davmre/sigvisa | 91a1f163b8f3a258dfb78d88a07f2a11da41bd04 | [
"BSD-3-Clause"
] | null | null | null | models/__init__.py | davmre/sigvisa | 91a1f163b8f3a258dfb78d88a07f2a11da41bd04 | [
"BSD-3-Clause"
] | null | null | null | models/__init__.py | davmre/sigvisa | 91a1f163b8f3a258dfb78d88a07f2a11da41bd04 | [
"BSD-3-Clause"
] | null | null | null | import numpy as np
import cPickle
class Distribution(object):
def dim(self):
raise NotImplementedError('abstract base class')
def predict(self, cond=None):
raise NotImplementedError('abstract base class')
def sample(self, cond=None, key_prefix=""):
raise NotImplementedError('abstract base class')
def log_p(self, x, cond=None, key_prefix=""):
raise NotImplementedError('abstract base class')
def deriv_log_p(self, x, idx=None, cond=None, cond_key=None, cond_idx=None, lp0=None, eps=1e-4, **kwargs):
"""
Derivative of log P(X = x | cond = cond) with
respect to x_idx (if idx is not None) or with
respect to cond[cond_key]_{cond_idx} (if those
quantities are not None).
The default implementation computes a numerical
approximation to the derivative:
df/dx ~= f(x + eps)
"""
lp0 = lp0 if lp0 else self.log_p(x=x, cond=cond, **kwargs)
if cond_key is None:
# we're computing df/dx
if idx is None:
# assume x is scalar
deriv = ( self.log_p(x = x + eps, cond=cond, **kwargs) - lp0 ) / eps
else:
x[idx] += eps
deriv = ( self.log_p(x = x, cond=cond, **kwargs) - lp0 ) / eps
x[idx] -= eps
else:
# we're computing df/dcond[cond_key]
if cond_idx is None:
cond[cond_key] += eps
deriv = ( self.log_p(x = x, cond=cond, **kwargs) - lp0 ) / eps
cond[cond_key] -= eps
else:
cond[cond_key][cond_idx] += eps
deriv = ( self.log_p(x = x, cond=cond, **kwargs) - lp0 ) / eps
cond[cond_key][cond_idx] -= eps
return deriv
def dump_to_file(self, fname):
with open(fname, 'wb') as f:
cPickle.dump(self, f, cPickle.HIGHEST_PROTOCOL)
@staticmethod
def load_from_file(fname):
raise NotImplementedError('abstract base class')
def save_to_db(self, dbconn):
raise NotImplementedError('abstract base class')
@staticmethod
def load_from_db(dbconn, return_extra=False):
raise NotImplementedError('abstract base class')
class TimeSeriesDist(Distribution):
def predict(self, n):
raise NotImplementedError('abstract base class')
def sample(self, n):
raise NotImplementedError('abstract base class')
class DummyModel(Distribution):
def __init__(self, default_value = None, **kwargs):
super(DummyModel, self).__init__(**kwargs)
self.default_value = default_value if default_value is not None else 0.0
def log_p(self, x, **kwargs):
return 0.0
def sample(self, **kwargs):
return self.default_value
def predict(self, **kwargs):
return self.default_value
class Constraint(DummyModel):
def __init__(self, a=0, b=0, **kwargs):
super(Constraint, self).__init__(**kwargs)
self.a=a
self.b=b
def log_p(self, x, **kwargs):
if self.a is not None and x < self.a:
return -np.inf
if self.b is not None and x > self.b:
return -np.inf
return 0.0
| 28.313043 | 110 | 0.585381 | 3,207 | 0.984951 | 0 | 0 | 221 | 0.067875 | 0 | 0 | 616 | 0.189189 |
80b986d0b98e79475549e867f1ac605bda0845f7 | 917 | py | Python | foodapp/urls.py | gauravmahale47/Foodstore | 2e43cee54e8dc418e3aa1d572085d100727ff904 | [
"BSD-3-Clause"
] | null | null | null | foodapp/urls.py | gauravmahale47/Foodstore | 2e43cee54e8dc418e3aa1d572085d100727ff904 | [
"BSD-3-Clause"
] | null | null | null | foodapp/urls.py | gauravmahale47/Foodstore | 2e43cee54e8dc418e3aa1d572085d100727ff904 | [
"BSD-3-Clause"
] | null | null | null | from django.urls import path
from foodapp.views import *
from django.views.generic.base import TemplateView
from .views import FoodCreateView,FoodListView
from django.conf import settings
from django.conf.urls.static import static
'''
TemplateView is built-in django Class Based view Class
which is used to render the request to template,
'''
urlpatterns = [
# FBV
# path('',index),
# path('addfood',addfood),
path('',TemplateView.as_view(template_name='foodapp/index.html'),name="Home"),
path('addfood',FoodCreateView.as_view(),name='addfood'),
path('foodlist',FoodListView.as_view(),name='foodmenu'),
path('foodupdate/<pk>',FoodUpdateView.as_view(),name='foodupdate'),
path('fooddelete/<pk>',FoodDeleteView.as_view(),name='fooddelete'),
path('fooddetail/<pk>',FoodDetailView.as_view(),name='fooddetail')
]#+static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) | 43.666667 | 82 | 0.735005 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 385 | 0.419847 |
80ba554d90a825fd530cdbabac219c88bedec313 | 5,697 | py | Python | 22/a.py | Unlink/AdventOfCode2018 | 147fb0e9f7b6894688813f237d0e227390a44dbd | [
"MIT"
] | null | null | null | 22/a.py | Unlink/AdventOfCode2018 | 147fb0e9f7b6894688813f237d0e227390a44dbd | [
"MIT"
] | null | null | null | 22/a.py | Unlink/AdventOfCode2018 | 147fb0e9f7b6894688813f237d0e227390a44dbd | [
"MIT"
] | null | null | null | from PIL import Image, ImageDraw
# depth = 6084
# target = (14,709)
# depth = 510
# target = (10,10)
# depth = 4848
# target = (15, 700)
depth = 9171
target = (7,721)
# depth = 11820
# target = (7,782)
buffer = 50
def getErrosion(index):
return (index + depth) % 20183
def getType(errosion):
return [".", "=", "|"][errosion % 3]
def getRisk(errosion):
return errosion % 3
allowedTools = {
'.': ["T", "C"],
'=': ["", "C"],
'|': ["", "T"]
}
class Location:
def __init__(self, x, y, type):
self.x = x
self.y = y
self.type = type
self.tools = {}
self.isFinal = False
def getAdjecment(self, map):
adjecment = list()
if self.x > 0:
adjecment.append(map[self.y][self.x-1])
if self.y > 0:
adjecment.append(map[self.y-1][self.x])
if self.x+1 < len(map[0]):
adjecment.append(map[self.y][self.x+1])
if self.y+1 < len(map):
adjecment.append(map[self.y+1][self.x])
return adjecment
def tryToMoveToNext(self, other):
moved = False
for tool in allowedTools[self.type]:
currentDistance = self.getDistanceWithTool(tool)
#print("Try to move from "+str((self.x, self.y))+"("+self.type+") to "+str((other.x, other.y))+"("+other.type+") with tool "+tool)
#print("Distance "+str(currentDistance)+", allowed? "+str(other.isAccessAllowed(tool)))
if other.isAccessAllowed(tool) and other.getDistanceWithTool(tool) > currentDistance + 1:
#print("Other distance with tool "+str(other.getDistanceWithTool(tool)))
other.setDistanceWithTool(tool, currentDistance + 1)
moved = True
#elif not(other.isAccessAllowed(tool)) and other.getDistanceWithTool(tool) > currentDistance + 8:
# other.setDistanceWithTool(tool, currentDistance + 8)
# moved = True
return moved
def getDistanceWithTool(self, tool):
if tool in self.tools:
return self.tools[tool]
min = 999999999
for key, value in self.tools.items():
if min > value:
min = value
return min + 7
def setDistanceWithTool(self, tool, distance):
if self.type == "." and tool == "":
raise Exception("Cannot be reached")
if self.type == "=" and tool == "T":
raise Exception("Cannot be reached")
if self.type == "|" and tool == "C":
raise Exception("Cannot be reached")
self.tools[tool] = distance
def calculateMoveCost(self, tool, other):
return 1 if other.isAccessAllowed(tool) else 7
def isAccessAllowed(self, tool):
if self.type == ".":
return tool != ""
elif self.type == "=":
return tool != "T"
elif self.type == "|":
return tool != "C"
cave = [[0 for i in range(target[0]+buffer)] for j in range(target[1]+buffer)]
for j in range(target[1]+buffer):
for i in range(target[0]+buffer):
if i == 0 and j == 0:
cave[j][i] = 0
elif (i, j) == target:
cave[j][i] = 0
elif i != 0 and j != 0:
cave[j][i] = getErrosion(cave[j-1][i]) * getErrosion(cave[j][i-1])
elif i == 0:
cave[j][i] = j*48271
elif j == 0:
cave[j][i] = i*16807
for j in range(target[1]+buffer):
for i in range(target[0]+buffer):
if i == 0 and j == 0:
print("M", end="")
elif (i, j) == target:
print("T", end="")
else:
print(getType(getErrosion(cave[j][i])), end="")
print()
riskLevel = 0
for j in range(target[1]+1):
for i in range(target[0]+1):
riskLevel += getRisk(getErrosion(cave[j][i]))
print(riskLevel)
cave2 = [[Location(x, y, getType(getErrosion(cave[y][x]))) for x in range(target[0]+buffer)] for y in range(target[1]+buffer)]
cave2[0][0].tools['T'] = 0
toProcess = list([cave2[0][0]])
while len(toProcess):
current = toProcess.pop(0)
#print("Processing "+str((current.x, current.y)))
for next in current.getAdjecment(cave2):
if current.tryToMoveToNext(next):
toProcess.append(next)
print("Calculated nearest path")
print(cave2[target[1]][target[0]].tools)
# for j in range(target[1]+buffer):
# for i in range(target[0]+buffer):
# print(getType(getErrosion(cave[j][i]))+str(cave2[j][i].tools), end=";")
# print()
#print path?
# currentNode = cave2[target[1]][target[0]]
# while currentNode.x != 0 or currentNode.y != 0:
# print(str((currentNode.x, currentNode.y))+str(currentNode.tools))
# min = 9999999
# for next in currentNode.getAdjecment(cave2):
# for tool, distance in next.tools.items():
# if distance < min:
# currentNode = next
# min = distance
# print(str((currentNode.x, currentNode.y))+str(currentNode.tools))
# img = Image.new('RGB', (len(cave2[0])*5, len(cave2)*5))
# y = 0
# for row in cave2:
# x = 0
# for c in row:
# color = (0, 0, 0)
# if c.type == ".":
# color = (100, 100, 100)
# if c.type == "|":
# color = (255, 255, 0)
# if c.type == "=":
# color = (0, 0, 200)
# for aa in range(4):
# for bb in range(4):
# img.putpixel((x*5+aa, y*5+bb), color)
# x += 1
# y += 1
# draw = ImageDraw.Draw(img)
# draw.line((100,200, 150,300), fill=128)
# img.save('result.png') | 30.303191 | 143 | 0.530981 | 2,503 | 0.439354 | 0 | 0 | 0 | 0 | 0 | 0 | 1,798 | 0.315605 |