hexsha stringlengths 40 40 | size int64 7 1.04M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 247 | max_stars_repo_name stringlengths 4 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 368k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 247 | max_issues_repo_name stringlengths 4 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 247 | max_forks_repo_name stringlengths 4 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.04M | avg_line_length float64 1.77 618k | max_line_length int64 1 1.02M | alphanum_fraction float64 0 1 | original_content stringlengths 7 1.04M | filtered:remove_function_no_docstring int64 -102 942k | filtered:remove_class_no_docstring int64 -354 977k | filtered:remove_delete_markers int64 0 60.1k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
dfe5f278d54be2b0524b93cd4cd81b3bcb66e416 | 468 | py | Python | main.py | sagnik1511/Viz-It | b13c32d31bd22e8ebb98ded51d933f1431be1b80 | [
"MIT"
] | 18 | 2021-11-28T11:02:31.000Z | 2021-12-08T05:45:04.000Z | main.py | polarizz17/Viz-It | 86ee4c0ababc00c165371b060de93a74fc35dcfc | [
"MIT"
] | null | null | null | main.py | polarizz17/Viz-It | 86ee4c0ababc00c165371b060de93a74fc35dcfc | [
"MIT"
] | 1 | 2021-12-08T17:25:49.000Z | 2021-12-08T17:25:49.000Z | from apps import overview, home, eda, correlation
import streamlit as st
from utils import dev_details
pages = {
'Home' : home.app,
'Overview': overview.app,
'EDA': eda.app,
"Correlation" : correlation.app,
}
if __name__ == "__main__":
_, logo_col, _ = st.sidebar.columns([0.3, 1, 1])
logo_col.image("assets/logo.png", width = 200)
app = st.sidebar.selectbox('PAGES', pages.keys())
dev_details()
if app != "":
pages[app]() | 26 | 53 | 0.630342 | from apps import overview, home, eda, correlation
import streamlit as st
from utils import dev_details
pages = {
'Home' : home.app,
'Overview': overview.app,
'EDA': eda.app,
"Correlation" : correlation.app,
}
if __name__ == "__main__":
_, logo_col, _ = st.sidebar.columns([0.3, 1, 1])
logo_col.image("assets/logo.png", width = 200)
app = st.sidebar.selectbox('PAGES', pages.keys())
dev_details()
if app != "":
pages[app]() | 0 | 0 | 0 |
a1be89117cd41a7c7d30e1dffb622e338da4249b | 2,631 | py | Python | src/data/user_input_rules.py | AlanGanem/fastai-flow | f5b873fd3bdf917be0bd958b144214d0568df15c | [
"MIT"
] | null | null | null | src/data/user_input_rules.py | AlanGanem/fastai-flow | f5b873fd3bdf917be0bd958b144214d0568df15c | [
"MIT"
] | null | null | null | src/data/user_input_rules.py | AlanGanem/fastai-flow | f5b873fd3bdf917be0bd958b144214d0568df15c | [
"MIT"
] | null | null | null | # Module
import itertools
import pandas as pd
import tqdm
import json
def create_rule_masks(df, RULES_JSON_PATH):
'''
Apply rules to IVAMIRO in df (only & operator suported)
:param df: dataframe to apply rules
:param rules_json: json or dict containing rules (each rule is also a dictionary containinf values of features)
:return: df with new values of IVAMIRO
'''
rules_json = import_rules_dict(RULES_JSON_PATH)
# create msk_dict
msk_dict = {}
for id_ in tqdm.tqdm(rules_json):
i = 0
for feature in rules_json[id_]['condition']:
if i == 0:
msk = df[feature].isin(rules_json[id_]['condition'][feature])
else:
msk = msk & df[feature].isin(rules_json[id_]['condition'][feature])
i += 1
msk_dict[id_] = {'mask': msk, 'value': rules_json[id_][
'implication']} # rules_json[id_]['implication'] is a dict of {<feature>:[<value>]}
return msk_dict
def apply_rules(df, RULES_JSON_PATH):
'''
Applt rules to dataframe
:param df:
:param mask_dict:
:return:
'''
mask_dict = create_rule_masks(df, RULES_JSON_PATH)
df = df.copy()
for id_ in tqdm.tqdm(mask_dict):
for feature in mask_dict[id_]['value']:
df.loc[mask_dict[id_]['mask'], feature] = mask_dict[id_]['value'][feature]
return df
| 28.912088 | 115 | 0.659445 | # Module
import itertools
import pandas as pd
import tqdm
import json
def import_rules_dict(RULES_JSON_PATH):
# open rules json
with open(RULES_JSON_PATH) as json_file:
rules_dict = dict(json.load(json_file))
return rules_dict
def update_rules_json(new_rules, RULES_PATH):
# glob
# json loads
return
def check_rule(RULES_JSON_PATH):
rules_dict = import_rules_dict(RULES_JSON_PATH)
#
all_rules = {}
for id_ in rules_dict:
features = [rules_dict[id_]['condition'][feature] for feature in rules_dict[id_]['condition']]
all_rules[id_] = set(itertools.product(*features))
rules_list = [all_rules[id_] for id_ in all_rules]
intersections = set.intersection(*rules_list)
intersections_by_id = {id_: (all_rules[id_] & intersections) for id_ in all_rules}
id_by_intersection = {comb: [] for comb in intersections}
for id_ in intersections_by_id:
for comb in intersections_by_id[id_]:
id_by_intersection[comb].append(id_)
return rules_dict
def create_rule_masks(df, RULES_JSON_PATH):
'''
Apply rules to IVAMIRO in df (only & operator suported)
:param df: dataframe to apply rules
:param rules_json: json or dict containing rules (each rule is also a dictionary containinf values of features)
:return: df with new values of IVAMIRO
'''
rules_json = import_rules_dict(RULES_JSON_PATH)
# create msk_dict
msk_dict = {}
for id_ in tqdm.tqdm(rules_json):
i = 0
for feature in rules_json[id_]['condition']:
if i == 0:
msk = df[feature].isin(rules_json[id_]['condition'][feature])
else:
msk = msk & df[feature].isin(rules_json[id_]['condition'][feature])
i += 1
msk_dict[id_] = {'mask': msk, 'value': rules_json[id_][
'implication']} # rules_json[id_]['implication'] is a dict of {<feature>:[<value>]}
return msk_dict
def apply_rules(df, RULES_JSON_PATH):
'''
Applt rules to dataframe
:param df:
:param mask_dict:
:return:
'''
mask_dict = create_rule_masks(df, RULES_JSON_PATH)
df = df.copy()
for id_ in tqdm.tqdm(mask_dict):
for feature in mask_dict[id_]['value']:
df.loc[mask_dict[id_]['mask'], feature] = mask_dict[id_]['value'][feature]
return df
def show_rules_df(RULES_JSON_PATH):
rules_dict = import_rules_dict(RULES_JSON_PATH)
df = pd.concat({k: pd.DataFrame(v) for k, v in (check_rule(RULES_JSON_PATH=RULES_PATH)).items()})
df.index.set_names(['id', 'column'], inplace=True)
return df | 1,141 | 0 | 92 |
671e724d740e2e5d0e7ba1f9f739c84cda990ccb | 4,086 | py | Python | work_in_progress/test/NeuralNetwork/LSTM.py | ItamarRocha/StockMarketPy | 9236cc296fc51214f0caf6ada1e905fbc9e12ac4 | [
"MIT"
] | 3 | 2020-05-01T15:17:28.000Z | 2021-07-16T17:55:23.000Z | work_in_progress/test/NeuralNetwork/LSTM.py | ItamarRocha/StockMarketPy | 9236cc296fc51214f0caf6ada1e905fbc9e12ac4 | [
"MIT"
] | 1 | 2020-05-31T22:37:37.000Z | 2020-06-01T02:14:15.000Z | work_in_progress/test/NeuralNetwork/LSTM.py | ItamarRocha/StockMarketPy | 9236cc296fc51214f0caf6ada1e905fbc9e12ac4 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Jan 9 18:05:09 2020
@author: itamar
"""
import math
import pandas_datareader as web
import numpy as np
import pandas as pd
from sklearn.preprocessing import MinMaxScaler
from keras.models import Sequential
from keras.layers import Dense, LSTM
import matplotlib.pyplot as plt
plt.style.use('fivethirtyeight')
#Get the stock quote
df = web.DataReader('HBOR3.SA', data_source='yahoo', start='2011-01-01', end='2020-01-08')
#Show teh data
#Visualize the closing price history
plt.figure(figsize=(16,8))
plt.title('Close Price History')
plt.plot(df['Close'])
plt.xlabel('Date', fontsize=18)
plt.ylabel('Close Price USD ($)', fontsize=18)
plt.show()
#Create a new dataframe with only the 'Close column
data = df.filter(['Close'])
#Convert the dataframe to a numpy array
dataset = data.values
#Get the number of rows to train the model on
training_data_len = math.ceil( len(dataset) * .8 )
training_data_len
#Scale the data
scaler = MinMaxScaler(feature_range=(0,1))
scaled_data = scaler.fit_transform(dataset)
scaled_data
#Create the training data set
#Create the scaled training data set
train_data = scaled_data[0:training_data_len , :]
#Split the data into x_train and y_train data sets
x_train = []
y_train = []
for i in range(60, len(train_data)):
x_train.append(train_data[i-60:i, 0])
y_train.append(train_data[i, 0])
if i<= 61:
print(x_train)
print(y_train)
print()
#Convert the x_train and y_train to numpy arrays
x_train, y_train = np.array(x_train), np.array(y_train)
#Reshape the data
x_train = np.reshape(x_train, (x_train.shape[0], x_train.shape[1], 1))
x_train.shape
#Build the LSTM model
model = Sequential()
model.add(LSTM(50, return_sequences=True, input_shape= (x_train.shape[1], 1)))
model.add(LSTM(50, return_sequences= False))
model.add(Dense(25))
model.add(Dense(1))
#Compile the model
model.compile(optimizer='adam', loss='mean_squared_error')
#Train the model
model.fit(x_train, y_train, batch_size=1, epochs=1)
#Create the testing data set
#Create a new array containing scaled values from index 1543 to 2002
test_data = scaled_data[training_data_len - 60: , :]
#Create the data sets x_test and y_test
x_test = []
y_test = dataset[training_data_len:, :]
for i in range(60, len(test_data)):
x_test.append(test_data[i-60:i, 0])
#Convert the data to a numpy array
x_test = np.array(x_test)
#Reshape the data
x_test = np.reshape(x_test, (x_test.shape[0], x_test.shape[1], 1 ))
#Get the models predicted price values
predictions = model.predict(x_test)
predictions = scaler.inverse_transform(predictions)
#Get the root mean squared error (RMSE)
rmse=np.sqrt(np.mean(((predictions- y_test)**2)))
rmse
#Plot the data
train = data[:training_data_len]
valid = data[training_data_len:]
valid['Predictions'] = predictions
#Visualize the data
plt.figure(figsize=(16,8))
plt.title('Model')
plt.xlabel('Date', fontsize=18)
plt.ylabel('Close Price USD ($)', fontsize=18)
plt.plot(train['Close'])
plt.plot(valid[['Close', 'Predictions']])
plt.legend(['Train', 'Val', 'Predictions'], loc='lower right')
plt.show()
#Get the quote
apple_quote = web.DataReader('HBOR3.SA', data_source='yahoo', start='2011-01-01', end='2020-01-08')
#Create a new dataframe
new_df = apple_quote.filter(['Close'])
#Get teh last 60 day closing price values and convert the dataframe to an array
last_60_days = new_df[-60:].values
#Scale the data to be values between 0 and 1
last_60_days_scaled = scaler.transform(last_60_days)
#Create an empty list
X_test = []
#Append teh past 60 days
X_test.append(last_60_days_scaled)
#Convert the X_test data set to a numpy array
X_test = np.array(X_test)
#Reshape the data
X_test = np.reshape(X_test, (X_test.shape[0], X_test.shape[1], 1))
#Get the predicted scaled price
pred_price = model.predict(X_test)
#undo the scaling
pred_price = scaler.inverse_transform(pred_price)
print(pred_price)
#Get the quote
apple_quote2 = web.DataReader('HBOR3.SA', data_source='yahoo', start='2020-01-09', end='2020-01-09')
print(apple_quote2['Close']) | 26.36129 | 100 | 0.740822 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Jan 9 18:05:09 2020
@author: itamar
"""
import math
import pandas_datareader as web
import numpy as np
import pandas as pd
from sklearn.preprocessing import MinMaxScaler
from keras.models import Sequential
from keras.layers import Dense, LSTM
import matplotlib.pyplot as plt
plt.style.use('fivethirtyeight')
#Get the stock quote
df = web.DataReader('HBOR3.SA', data_source='yahoo', start='2011-01-01', end='2020-01-08')
#Show teh data
#Visualize the closing price history
plt.figure(figsize=(16,8))
plt.title('Close Price History')
plt.plot(df['Close'])
plt.xlabel('Date', fontsize=18)
plt.ylabel('Close Price USD ($)', fontsize=18)
plt.show()
#Create a new dataframe with only the 'Close column
data = df.filter(['Close'])
#Convert the dataframe to a numpy array
dataset = data.values
#Get the number of rows to train the model on
training_data_len = math.ceil( len(dataset) * .8 )
training_data_len
#Scale the data
scaler = MinMaxScaler(feature_range=(0,1))
scaled_data = scaler.fit_transform(dataset)
scaled_data
#Create the training data set
#Create the scaled training data set
train_data = scaled_data[0:training_data_len , :]
#Split the data into x_train and y_train data sets
x_train = []
y_train = []
for i in range(60, len(train_data)):
x_train.append(train_data[i-60:i, 0])
y_train.append(train_data[i, 0])
if i<= 61:
print(x_train)
print(y_train)
print()
#Convert the x_train and y_train to numpy arrays
x_train, y_train = np.array(x_train), np.array(y_train)
#Reshape the data
x_train = np.reshape(x_train, (x_train.shape[0], x_train.shape[1], 1))
x_train.shape
#Build the LSTM model
model = Sequential()
model.add(LSTM(50, return_sequences=True, input_shape= (x_train.shape[1], 1)))
model.add(LSTM(50, return_sequences= False))
model.add(Dense(25))
model.add(Dense(1))
#Compile the model
model.compile(optimizer='adam', loss='mean_squared_error')
#Train the model
model.fit(x_train, y_train, batch_size=1, epochs=1)
#Create the testing data set
#Create a new array containing scaled values from index 1543 to 2002
test_data = scaled_data[training_data_len - 60: , :]
#Create the data sets x_test and y_test
x_test = []
y_test = dataset[training_data_len:, :]
for i in range(60, len(test_data)):
x_test.append(test_data[i-60:i, 0])
#Convert the data to a numpy array
x_test = np.array(x_test)
#Reshape the data
x_test = np.reshape(x_test, (x_test.shape[0], x_test.shape[1], 1 ))
#Get the models predicted price values
predictions = model.predict(x_test)
predictions = scaler.inverse_transform(predictions)
#Get the root mean squared error (RMSE)
rmse=np.sqrt(np.mean(((predictions- y_test)**2)))
rmse
#Plot the data
train = data[:training_data_len]
valid = data[training_data_len:]
valid['Predictions'] = predictions
#Visualize the data
plt.figure(figsize=(16,8))
plt.title('Model')
plt.xlabel('Date', fontsize=18)
plt.ylabel('Close Price USD ($)', fontsize=18)
plt.plot(train['Close'])
plt.plot(valid[['Close', 'Predictions']])
plt.legend(['Train', 'Val', 'Predictions'], loc='lower right')
plt.show()
#Get the quote
apple_quote = web.DataReader('HBOR3.SA', data_source='yahoo', start='2011-01-01', end='2020-01-08')
#Create a new dataframe
new_df = apple_quote.filter(['Close'])
#Get teh last 60 day closing price values and convert the dataframe to an array
last_60_days = new_df[-60:].values
#Scale the data to be values between 0 and 1
last_60_days_scaled = scaler.transform(last_60_days)
#Create an empty list
X_test = []
#Append teh past 60 days
X_test.append(last_60_days_scaled)
#Convert the X_test data set to a numpy array
X_test = np.array(X_test)
#Reshape the data
X_test = np.reshape(X_test, (X_test.shape[0], X_test.shape[1], 1))
#Get the predicted scaled price
pred_price = model.predict(X_test)
#undo the scaling
pred_price = scaler.inverse_transform(pred_price)
print(pred_price)
#Get the quote
apple_quote2 = web.DataReader('HBOR3.SA', data_source='yahoo', start='2020-01-09', end='2020-01-09')
print(apple_quote2['Close']) | 0 | 0 | 0 |
ed1edf37743e73af672fba4b8a992d0413010776 | 3,118 | py | Python | SynGuar/helper_eval/eval_consts.py | HALOCORE/SynGuar | 8f7f9ba52e83091ad3def501169fd60d20b28321 | [
"MIT"
] | 1 | 2021-06-23T05:10:36.000Z | 2021-06-23T05:10:36.000Z | SynGuar/helper_eval/eval_consts.py | HALOCORE/SynGuar | 8f7f9ba52e83091ad3def501169fd60d20b28321 | [
"MIT"
] | null | null | null | SynGuar/helper_eval/eval_consts.py | HALOCORE/SynGuar | 8f7f9ba52e83091ad3def501169fd60d20b28321 | [
"MIT"
] | null | null | null | import os
import json
abs_join = lambda p1, p2 : os.path.abspath(os.path.join(p1, p2))
SYNTHESIZER_STRPROSE = "StrPROSE"
SYNTHESIZER_STRSTUN = "StrSTUN"
SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
EXAMPLE_STRPROSE_RELDIR = "../../benchmark/strprose/example_files"
EXAMPLE_STRPROSE_FULLDIR = abs_join(SCRIPT_DIR, EXAMPLE_STRPROSE_RELDIR)
EXAMPLE_STRSTUN_RELDIR = "../../benchmark/strstun/example_files"
EXAMPLE_STRSTUN_FULLDIR = abs_join(SCRIPT_DIR, EXAMPLE_STRSTUN_RELDIR)
COMP_STRSTUN_RELDIR = "../../benchmark/strstun/targets"
COMP_STRSTUN_FULLDIR = abs_join(SCRIPT_DIR, COMP_STRSTUN_RELDIR)
SEED_STRSTUN_FULLPATH = abs_join(SCRIPT_DIR, "../../benchmark/strstun/example_files/_seeds.json")
TARGET_STRPROSE_RELDIR = "../../benchmark/strprose/targets"
TARGET_STRPROSE_FULLDIR = abs_join(SCRIPT_DIR, TARGET_STRPROSE_RELDIR)
TARGET_STRSTUN_RELDIR = "../../benchmark/strstun/targets"
TARGET_STRSTUN_FULLDIR = abs_join(SCRIPT_DIR, TARGET_STRSTUN_RELDIR)
SUMMARY_RELDIR = "../../outputs/summaries"
SUMMARY_FULLDIR = abs_join(SCRIPT_DIR, SUMMARY_RELDIR)
ANALYSIS_SAMPLESIZE_STEPS_FULLPATH = lambda synthesizer, x : abs_join(SUMMARY_FULLDIR, f"{synthesizer}_samplesize_step{x}.json")
ANALYSIS_SAMPLESIZE_STRPROSE_FULLPATH = abs_join(SUMMARY_FULLDIR, "StrPROSE-samplesize.csv")
ANALYSIS_SAMPLESIZE_STRSTUN_FULLPATH = abs_join(SUMMARY_FULLDIR, "StrSTUN-samplesize.csv")
ANALYSIS_RESULTPROG_STEPS_FULLPATH = lambda synthesizer, x : abs_join(SUMMARY_FULLDIR, f"{synthesizer}_resultprog_step{x}.json")
ANALYSIS_RESOURCE_USAGE_STEPS_FULLPATH = lambda synthesizer, x : abs_join(SUMMARY_FULLDIR, f"{synthesizer}_resourceusage_step{x}.json")
ANALYSIS_RESOURCE_USAGE_STRSTUN_FULLPATH = abs_join(SUMMARY_FULLDIR, f"StrSTUN-resourceusage.csv")
ANALYSIS_BASELINE4_STEPS_FULLPATH = lambda synthesizer, x : abs_join(SUMMARY_FULLDIR, f"{synthesizer}_baseline4_step{x}.json")
ANALYSIS_CHOOSEH_STEPS_FULLPATH = lambda synthesizer, x : abs_join(SUMMARY_FULLDIR, f"{synthesizer}_chooseh_step{x}.json")
ANALYSIS_CHOOSEH_STRSTUN_FULLPATH = abs_join(SUMMARY_FULLDIR, "StrSTUN-chooseh.csv")
ANALYSIS_SPACEDROP_STEPS_FULLPATH = lambda synthesizer, x : abs_join(SUMMARY_FULLDIR, f"{synthesizer}_spacedrop_step{x}.json")
ANALYSIS_SPACEDROP_STRPROSE_FULLPATH = abs_join(SUMMARY_FULLDIR, "StrPROSE-spacedrop.csv")
SYNGUAR_API_ENDPOINT = "http://localhost:5262/synguar"
SYNTH_API_ENDPOINT = "http://localhost:5261/synth"
STRPROSE_SPACEDROP_SAMPLE_SIZE = 30
| 47.969231 | 135 | 0.787684 | import os
import json
abs_join = lambda p1, p2 : os.path.abspath(os.path.join(p1, p2))
SYNTHESIZER_STRPROSE = "StrPROSE"
SYNTHESIZER_STRSTUN = "StrSTUN"
SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
EXAMPLE_STRPROSE_RELDIR = "../../benchmark/strprose/example_files"
EXAMPLE_STRPROSE_FULLDIR = abs_join(SCRIPT_DIR, EXAMPLE_STRPROSE_RELDIR)
EXAMPLE_STRSTUN_RELDIR = "../../benchmark/strstun/example_files"
EXAMPLE_STRSTUN_FULLDIR = abs_join(SCRIPT_DIR, EXAMPLE_STRSTUN_RELDIR)
COMP_STRSTUN_RELDIR = "../../benchmark/strstun/targets"
COMP_STRSTUN_FULLDIR = abs_join(SCRIPT_DIR, COMP_STRSTUN_RELDIR)
SEED_STRSTUN_FULLPATH = abs_join(SCRIPT_DIR, "../../benchmark/strstun/example_files/_seeds.json")
TARGET_STRPROSE_RELDIR = "../../benchmark/strprose/targets"
TARGET_STRPROSE_FULLDIR = abs_join(SCRIPT_DIR, TARGET_STRPROSE_RELDIR)
TARGET_STRSTUN_RELDIR = "../../benchmark/strstun/targets"
TARGET_STRSTUN_FULLDIR = abs_join(SCRIPT_DIR, TARGET_STRSTUN_RELDIR)
SUMMARY_RELDIR = "../../outputs/summaries"
SUMMARY_FULLDIR = abs_join(SCRIPT_DIR, SUMMARY_RELDIR)
ANALYSIS_SAMPLESIZE_STEPS_FULLPATH = lambda synthesizer, x : abs_join(SUMMARY_FULLDIR, f"{synthesizer}_samplesize_step{x}.json")
ANALYSIS_SAMPLESIZE_STRPROSE_FULLPATH = abs_join(SUMMARY_FULLDIR, "StrPROSE-samplesize.csv")
ANALYSIS_SAMPLESIZE_STRSTUN_FULLPATH = abs_join(SUMMARY_FULLDIR, "StrSTUN-samplesize.csv")
ANALYSIS_RESULTPROG_STEPS_FULLPATH = lambda synthesizer, x : abs_join(SUMMARY_FULLDIR, f"{synthesizer}_resultprog_step{x}.json")
ANALYSIS_RESOURCE_USAGE_STEPS_FULLPATH = lambda synthesizer, x : abs_join(SUMMARY_FULLDIR, f"{synthesizer}_resourceusage_step{x}.json")
ANALYSIS_RESOURCE_USAGE_STRSTUN_FULLPATH = abs_join(SUMMARY_FULLDIR, f"StrSTUN-resourceusage.csv")
ANALYSIS_BASELINE4_STEPS_FULLPATH = lambda synthesizer, x : abs_join(SUMMARY_FULLDIR, f"{synthesizer}_baseline4_step{x}.json")
ANALYSIS_CHOOSEH_STEPS_FULLPATH = lambda synthesizer, x : abs_join(SUMMARY_FULLDIR, f"{synthesizer}_chooseh_step{x}.json")
ANALYSIS_CHOOSEH_STRSTUN_FULLPATH = abs_join(SUMMARY_FULLDIR, "StrSTUN-chooseh.csv")
ANALYSIS_SPACEDROP_STEPS_FULLPATH = lambda synthesizer, x : abs_join(SUMMARY_FULLDIR, f"{synthesizer}_spacedrop_step{x}.json")
ANALYSIS_SPACEDROP_STRPROSE_FULLPATH = abs_join(SUMMARY_FULLDIR, "StrPROSE-spacedrop.csv")
SYNGUAR_API_ENDPOINT = "http://localhost:5262/synguar"
SYNTH_API_ENDPOINT = "http://localhost:5261/synth"
STRPROSE_SPACEDROP_SAMPLE_SIZE = 30
def load_json(filename):
print("# [eval] load_json:", filename)
data = None
with open(filename, 'r') as f:
data = json.load(f)
return data
def save_json(filename, data):
print("# [eval] save_json:", filename)
with open(filename, 'w') as f:
json.dump(data, f, indent=2)
def cached_step(step_cachepath, step_func):
print("\n# [eval] cached_step. Checkpath: " + step_cachepath)
if os.path.exists(step_cachepath):
step_cache = load_json(step_cachepath)
if step_cache is not None:
return step_cache
# run step_func and save.
data = step_func()
save_json(step_cachepath, data)
return data | 609 | 0 | 69 |
27977371964e7919c01e51368ef576b5cb28a118 | 89 | py | Python | src/filters/subject.py | Radarslan/gmail_api | 39baae566af3f5fb2b9cd4553f45c17b2bbf134e | [
"MIT"
] | null | null | null | src/filters/subject.py | Radarslan/gmail_api | 39baae566af3f5fb2b9cd4553f45c17b2bbf134e | [
"MIT"
] | null | null | null | src/filters/subject.py | Radarslan/gmail_api | 39baae566af3f5fb2b9cd4553f45c17b2bbf134e | [
"MIT"
] | null | null | null | from src.filters.filter import Filter
| 14.833333 | 37 | 0.719101 | from src.filters.filter import Filter
class Subject(Filter):
name: str = "subject"
| 0 | 27 | 23 |
343d14440a7d75cb041e0a1f8ff564ac41c3ccd8 | 1,572 | py | Python | backend/secret_santa/views.py | isobelscott/secret-santa | 6baac29724560d772b94c37f5783a15b91efbbfa | [
"MIT"
] | 1 | 2021-01-26T04:13:47.000Z | 2021-01-26T04:13:47.000Z | backend/secret_santa/views.py | isobelscott/secret-santa | 6baac29724560d772b94c37f5783a15b91efbbfa | [
"MIT"
] | null | null | null | backend/secret_santa/views.py | isobelscott/secret-santa | 6baac29724560d772b94c37f5783a15b91efbbfa | [
"MIT"
] | null | null | null | from django.shortcuts import render
from django.http import HttpResponse
from django.views import View
from .models import Person, Party, Group
from rest_framework import pagination, viewsets, filters
from .serializers import PersonSerializer, PartySerializer, GroupSerializer
class IsGroupMemberFilterBackend(filters.BaseFilterBackend):
"""
Filter that only returns groups for which person is in group.
"""
| 32.081633 | 97 | 0.701654 | from django.shortcuts import render
from django.http import HttpResponse
from django.views import View
from .models import Person, Party, Group
from rest_framework import pagination, viewsets, filters
from .serializers import PersonSerializer, PartySerializer, GroupSerializer
class CreatePerson(View):
def post(self, request, *args, **kwargs):
person = Person()
person.first_name = request['first_name']
person.email = request['email']
person.exclusions = request['exclusions']
person.save()
class PersonViewSet(viewsets.ModelViewSet):
queryset = Person.objects.all()
serializer_class = PersonSerializer
filter_fields = ('id', 'exclusions')
class PartyViewSet(viewsets.ModelViewSet):
queryset = Party.objects.all()
serializer_class = PartySerializer
filter_fields = ('id', 'party_name', 'gift_price_max', 'event_date', 'group_id', 'organizer')
class IsGroupMemberFilterBackend(filters.BaseFilterBackend):
"""
Filter that only returns groups for which person is in group.
"""
def filter_queryset(self, request, queryset, view):
data = request.GET
if data.get("person_id"):
return queryset.filter(persons=data["person_id"])
elif data.get("id"):
return queryset.filter(id=data["id"])
else:
return queryset
class GroupViewSet(viewsets.ModelViewSet):
queryset = Group.objects.all()
serializer_class = GroupSerializer
filter_backends = [IsGroupMemberFilterBackend]
filter_fields = ('id', 'persons')
| 482 | 520 | 144 |
b5d04dc33a64b3222ca5e3accc526395c0691d12 | 907 | py | Python | training/utils.py | axsaucedo/Voice-Cloning-App | 0dceb0175647101ca0873427d92bd3b54487b306 | [
"BSD-3-Clause"
] | 1 | 2021-10-31T23:03:44.000Z | 2021-10-31T23:03:44.000Z | training/utils.py | axsaucedo/Voice-Cloning-App | 0dceb0175647101ca0873427d92bd3b54487b306 | [
"BSD-3-Clause"
] | null | null | null | training/utils.py | axsaucedo/Voice-Cloning-App | 0dceb0175647101ca0873427d92bd3b54487b306 | [
"BSD-3-Clause"
] | null | null | null | import shutil
import torch
CHECKPOINT_SIZE_MB = 333
BATCH_SIZE_PER_GB = 2.5
LEARNING_RATE_PER_BATCH = 3.125e-5
| 28.34375 | 108 | 0.754135 | import shutil
import torch
CHECKPOINT_SIZE_MB = 333
BATCH_SIZE_PER_GB = 2.5
LEARNING_RATE_PER_BATCH = 3.125e-5
def get_available_memory():
gpu_memory = torch.cuda.get_device_properties(0).total_memory
memory_in_use = torch.cuda.memory_allocated(0)
available_memory = gpu_memory - memory_in_use
available_memory_gb = available_memory // 1024 // 1024 // 1024
return available_memory_gb
def get_batch_size(available_memory_gb):
return int(available_memory_gb * BATCH_SIZE_PER_GB)
def get_learning_rate(batch_size):
return batch_size * LEARNING_RATE_PER_BATCH
def check_space(num_checkpoints):
_, _, free = shutil.disk_usage("/")
free_mb = free // (2 ** 20)
required_mb = CHECKPOINT_SIZE_MB * num_checkpoints
assert (
free_mb >= required_mb
), f"Insufficent storage space (requires {required_mb}mb). Reduce checkpoint frequency or free up space"
| 699 | 0 | 92 |
2a757b168cb5f8c7e9122644ab309c15d2a2fe69 | 4,326 | py | Python | ub/modules/button.py | TAMILVIP007/Ichigo | 9be07bcc96f9e714b745f6d72dfa521f0a34dc90 | [
"MIT"
] | null | null | null | ub/modules/button.py | TAMILVIP007/Ichigo | 9be07bcc96f9e714b745f6d72dfa521f0a34dc90 | [
"MIT"
] | null | null | null | ub/modules/button.py | TAMILVIP007/Ichigo | 9be07bcc96f9e714b745f6d72dfa521f0a34dc90 | [
"MIT"
] | 1 | 2022-03-09T14:42:48.000Z | 2022-03-09T14:42:48.000Z | # Copyright (C) 2020 sandeep.n(π.$)
# button post makker for catub thanks to uniborg for the base
# by @sandy1709 (@mrconfused)
import os
import re
from ub import tebot as tgbot
from telethon import Button
from ub import bot
from ub import CMD_HELP, client
from ub.javes_main.heroku_var import Config
from .. import CMD_HELP
from ..utils import admin_cmd, edit_or_reply
from ub.javes_main.heroku_var import Var
from ub import bot
# regex obtained from:
# https://github.com/PaulSonOfLars/tgbot/blob/master/tg_bot/modules/helper_funcs/string_handling.py#L23
BTN_URL_REGEX = re.compile(r"(\[([^\[]+?)\]\<buttonurl:(?:/{0,2})(.+?)(:same)?\>)")
#BY CAT USERBOT
@bot.on(admin_cmd(pattern=r"cbutton(?: |$)(.*)", outgoing=True))
# Helpers
@bot.on(admin_cmd(pattern=r"ibutton( (.*)|$)", outgoing=True))
CMD_HELP.update(
{
"button": "**Plugin : **`button`\
\n\n**SYNTAX : **`.cbutton`\
\n**USAGE :** Buttons must be in the format as [Name on button]<buttonurl:link you want to open> and markdown is Default to html\
\n**EXAMPLE :** `.cbutton shivam [google]<buttonurl:https://www.google.com> [Javes2.0]<buttonurl:https://github.com/Sh1vam/javes-2.0:same> [Channel]<buttonurl:https://t.me/plugines>`\
\n\n**SYNTAX : **`.ibutton`\
\n**USAGE :** Buttons must be in the format as [Name on button]<buttonurl:link you want to open>\
\n**EXAMPLE :** `.ibutton Sh1vam [google]<buttonurl:https://www.google.com> [Javes2.0]<buttonurl:https://github.com/Sh1vam/javes-2.0:same> [Channel]<buttonurl:https://t.me/plugines>`\
\n**EXAMPLE :** `@yourbotusername buttons Shivam [google]<buttonurl:https://www.google.com> [Javes2.0]<buttonurl:https://github.com/Sh1vam/javes-2.0:same> [Channel]<buttonurl:https://t.me/plugines>`\
"
}
)
| 37.293103 | 200 | 0.654877 | # Copyright (C) 2020 sandeep.n(π.$)
# button post makker for catub thanks to uniborg for the base
# by @sandy1709 (@mrconfused)
import os
import re
from ub import tebot as tgbot
from telethon import Button
from ub import bot
from ub import CMD_HELP, client
from ub.javes_main.heroku_var import Config
from .. import CMD_HELP
from ..utils import admin_cmd, edit_or_reply
from ub.javes_main.heroku_var import Var
from ub import bot
# regex obtained from:
# https://github.com/PaulSonOfLars/tgbot/blob/master/tg_bot/modules/helper_funcs/string_handling.py#L23
BTN_URL_REGEX = re.compile(r"(\[([^\[]+?)\]\<buttonurl:(?:/{0,2})(.+?)(:same)?\>)")
#BY CAT USERBOT
@bot.on(admin_cmd(pattern=r"cbutton(?: |$)(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
chat = event.chat_id
reply_message = await event.get_reply_message()
if reply_message:
markdown_note = reply_message.text
else:
markdown_note = event.pattern_match.group(1)
prev = 0
note_data = ""
buttons = []
for match in BTN_URL_REGEX.finditer(markdown_note):
# Check if btnurl is escaped
n_escapes = 0
to_check = match.start(1) - 1
while to_check > 0 and markdown_note[to_check] == "\\":
n_escapes += 1
to_check -= 1
# if even, not escaped -> create button
if n_escapes % 2 == 0:
# create a thruple with button label, url, and newline status
buttons.append((match.group(2), match.group(3), bool(match.group(4))))
note_data += markdown_note[prev : match.start(1)]
prev = match.end(1)
# if odd, escaped -> move along
else:
note_data += markdown_note[prev:to_check]
prev = match.start(1) - 1
note_data += markdown_note[prev:]
message_text = note_data.strip()
tl_ib_buttons = build_keyboard(buttons)
tgbot_reply_message = None
if reply_message and reply_message.media:
tgbot_reply_message = await event.client.download_media(reply_message.media)
await tgbot.send_message(
entity=chat,
message=message_text,
parse_mode="html",
file=tgbot_reply_message,
link_preview=False,
buttons=tl_ib_buttons,
silent=True,
)
await event.delete()
if tgbot_reply_message:
os.remove(tgbot_reply_message)
# Helpers
@bot.on(admin_cmd(pattern=r"ibutton( (.*)|$)", outgoing=True))
async def _(event):
if event.fwd_from:
return
catinput = "".join(event.text.split(maxsplit=1)[1:])
reply_to_id = event.reply_to_msg_id or None
await event.get_reply_message()
await event.delete()
if not catinput:
catinput = (await event.get_reply_message()).text
if not catinput:
await event.edit("`Give me something to write in bot inline`")
return
catinput = "buttons" + catinput
tgbotusername = Config.TG_BOT_USER_NAME_BF_HER
results = await bot.inline_query(tgbotusername, catinput)
await results[0].click(event.chat_id, reply_to=reply_to_id, hide_via=True)
#await event.delete()
def build_keyboard(buttons):
keyb = []
for btn in buttons:
if btn[2] and keyb:
keyb[-1].append(Button.url(btn[0], btn[1]))
else:
keyb.append([Button.url(btn[0], btn[1])])
return keyb
CMD_HELP.update(
{
"button": "**Plugin : **`button`\
\n\n**SYNTAX : **`.cbutton`\
\n**USAGE :** Buttons must be in the format as [Name on button]<buttonurl:link you want to open> and markdown is Default to html\
\n**EXAMPLE :** `.cbutton shivam [google]<buttonurl:https://www.google.com> [Javes2.0]<buttonurl:https://github.com/Sh1vam/javes-2.0:same> [Channel]<buttonurl:https://t.me/plugines>`\
\n\n**SYNTAX : **`.ibutton`\
\n**USAGE :** Buttons must be in the format as [Name on button]<buttonurl:link you want to open>\
\n**EXAMPLE :** `.ibutton Sh1vam [google]<buttonurl:https://www.google.com> [Javes2.0]<buttonurl:https://github.com/Sh1vam/javes-2.0:same> [Channel]<buttonurl:https://t.me/plugines>`\
\n**EXAMPLE :** `@yourbotusername buttons Shivam [google]<buttonurl:https://www.google.com> [Javes2.0]<buttonurl:https://github.com/Sh1vam/javes-2.0:same> [Channel]<buttonurl:https://t.me/plugines>`\
"
}
)
| 2,492 | 0 | 67 |
8ab1868dc9e7d4fc1e2960ef90af833e233dbd91 | 3,030 | py | Python | vmis_sql_python/dataio/predictions.py | bolcom/serenade-experiments-sigmod | 0a4c7f19d800d1c2784ea5536abb1a628cb12f7a | [
"Apache-2.0"
] | null | null | null | vmis_sql_python/dataio/predictions.py | bolcom/serenade-experiments-sigmod | 0a4c7f19d800d1c2784ea5536abb1a628cb12f7a | [
"Apache-2.0"
] | null | null | null | vmis_sql_python/dataio/predictions.py | bolcom/serenade-experiments-sigmod | 0a4c7f19d800d1c2784ea5536abb1a628cb12f7a | [
"Apache-2.0"
] | null | null | null | import numpy as np
import pandas as pd
class PredictionsReader:
'''
read csv files for evaluating session based predictions from files.
Fileformat:
Each line contains the predicted recommendations and the actual next_items that a user is going to interact with during the same session.
We only need the item_ids in the csv file. We leave out the session_id and prediction scores to reduce file size.
recommendation_ids;next_item_ids
5226,72773,76493,23152,8972,37154,6124,11075;76493,8972
5226 being the highest scored recommendation.
76493 is the next_item that will be interacted with.
Since the evaluation is @20 at most, we only need to store the top-20 recommendations for evaluation.
All the future next_item_ids that will be interacted with in the session from must be stored.
'''
| 43.285714 | 141 | 0.670957 | import numpy as np
import pandas as pd
class PredictionsWriter:
def __init__(self, outputfilename, evaluation_n=20):
self.file_handler = open(outputfilename, 'w')
self.evaluation_n = evaluation_n
def appendline(self, predictions, next_items):
# predictions a pandas series. expected to be sorted by value. Index = item_id, the value = score.
# next_items a numpy array. values represent the item_ids.
top_n_prediction_ids = ','.join(str(index) for index in predictions[:self.evaluation_n].keys().tolist())
next_items = ','.join(str(index) for index in next_items)
self.file_handler.write("{top_n_prediction_ids};{next_items}\n".format(top_n_prediction_ids=top_n_prediction_ids,
next_items=next_items))
def close(self):
self.file_handler.close()
class PredictionsReader:
'''
read csv files for evaluating session based predictions from files.
Fileformat:
Each line contains the predicted recommendations and the actual next_items that a user is going to interact with during the same session.
We only need the item_ids in the csv file. We leave out the session_id and prediction scores to reduce file size.
recommendation_ids;next_item_ids
5226,72773,76493,23152,8972,37154,6124,11075;76493,8972
5226 being the highest scored recommendation.
76493 is the next_item that will be interacted with.
Since the evaluation is @20 at most, we only need to store the top-20 recommendations for evaluation.
All the future next_item_ids that will be interacted with in the session from must be stored.
'''
def __init__(self, inputfilename, training_df):
self.file_handler = open(inputfilename, 'r')
self.training_item_ids = training_df['ItemId'].unique()
def get_next_line(self):
for line in self.file_handler:
raw_recos, raw_next_items = line.rstrip('\n').split(';', 1)
recos = raw_recos.split(',')
series = self.__raw_recos_to_series(recos)
next_items = np.array(raw_next_items.split(',')).astype(int)
# convert everything back in its original format
yield series, next_items
def __raw_recos_to_series(self, recos):
scores = {}
max_score = 1.0000
for reco_id in recos:
scores[int(reco_id)] = max_score
max_score = max_score - 0.001
# Create things in the inefficient dense format ..
predictions = np.zeros(len(self.training_item_ids))
mask = np.in1d(self.training_item_ids, list(scores.keys()))
items = self.training_item_ids[mask]
values = [scores[x] for x in items]
predictions[mask] = values
series = pd.Series(data=predictions, index=self.training_item_ids)
series = series / series.max()
series[np.isnan(series)] = 0
series.sort_values(ascending=False, inplace=True)
return series
| 2,001 | 3 | 184 |
920a6e36d50945a9de125e2dc2128d43efd02ec8 | 10,341 | py | Python | userbot/modules/globalbanned.py | eagleprojects/eagle | 5334897d13490057aeb31b97527e9de538056844 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 56 | 2021-04-13T13:22:07.000Z | 2022-02-28T04:08:19.000Z | userbot/modules/globalbanned.py | eagleprojects/eagle | 5334897d13490057aeb31b97527e9de538056844 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 85 | 2021-04-11T17:00:29.000Z | 2022-03-31T22:16:35.000Z | userbot/modules/globalbanned.py | eagleprojects/eagle | 5334897d13490057aeb31b97527e9de538056844 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 140 | 2021-04-13T00:25:11.000Z | 2022-03-31T05:28:22.000Z | # Ported by @PacarFerdilla
# Thanks for catuserbot (c) copyright 2021
import asyncio
import base64
from datetime import datetime
from telethon.errors import BadRequestError
from telethon.tl.functions.channels import EditBannedRequest
from telethon.tl.functions.messages import ImportChatInviteRequest
from telethon.tl.types import Channel, ChatBannedRights, MessageEntityMentionName
import userbot.modules.sql_helper.gban_sql as gban_sql
from userbot import BOTLOG, BOTLOG_CHATID, CMD_HELP, DEVS
from userbot.events import register
from userbot.utils import edit_delete, edit_or_reply
BANNED_RIGHTS = ChatBannedRights(
until_date=None,
view_messages=True,
send_messages=True,
send_media=True,
send_stickers=True,
send_gifs=True,
send_games=True,
send_inline=True,
embed_links=True,
)
UNBAN_RIGHTS = ChatBannedRights(
until_date=None,
send_messages=None,
send_media=None,
send_stickers=None,
send_gifs=None,
send_games=None,
send_inline=None,
embed_links=None,
)
@register(outgoing=True, pattern=r"^\.gban(?: |$)(.*)")
@register(outgoing=True, pattern=r"^\.ungban(?: |$)(.*)")
@register(outgoing=True, pattern=r"^\.gbans$")
# Ported by @PacarFerdilla
CMD_HELP.update(
{
"gban": "**✘ Plugin :** `Global Banned`\
\n\n • **Perintah :** `.gban` <username/id>\
\n • **Function : **Melakukan Banned Secara Global Ke Semua Grup Dimana anda Sebagai Admin\
\n\n • **Perintah :** `.ungban` <username/id>\
\n • **Function : **Membatalkan Global Banned\
\n\n • **Perintah :** `.gbans`\
\n • **Function : **Menampilkan Daftar Global Banned\
"
}
)
| 34.935811 | 163 | 0.566676 | # Ported by @PacarFerdilla
# Thanks for catuserbot (c) copyright 2021
import asyncio
import base64
from datetime import datetime
from telethon.errors import BadRequestError
from telethon.tl.functions.channels import EditBannedRequest
from telethon.tl.functions.messages import ImportChatInviteRequest
from telethon.tl.types import Channel, ChatBannedRights, MessageEntityMentionName
import userbot.modules.sql_helper.gban_sql as gban_sql
from userbot import BOTLOG, BOTLOG_CHATID, CMD_HELP, DEVS
from userbot.events import register
from userbot.utils import edit_delete, edit_or_reply
BANNED_RIGHTS = ChatBannedRights(
until_date=None,
view_messages=True,
send_messages=True,
send_media=True,
send_stickers=True,
send_gifs=True,
send_games=True,
send_inline=True,
embed_links=True,
)
UNBAN_RIGHTS = ChatBannedRights(
until_date=None,
send_messages=None,
send_media=None,
send_stickers=None,
send_gifs=None,
send_games=None,
send_inline=None,
embed_links=None,
)
async def admin_groups(grp):
admgroups = []
async for dialog in grp.client.iter_dialogs():
entity = dialog.entity
if (
isinstance(entity, Channel)
and entity.megagroup
and (entity.creator or entity.admin_rights)
):
admgroups.append(entity.id)
return admgroups
def mentionuser(name, userid):
return f"[{name}](tg://user?id={userid})"
async def get_user_from_event(event, uevent=None, secondgroup=None):
if uevent is None:
uevent = event
if secondgroup:
args = event.pattern_match.group(2).split(" ", 1)
else:
args = event.pattern_match.group(1).split(" ", 1)
extra = None
if event.reply_to_msg_id:
previous_message = await event.get_reply_message()
if previous_message.from_id is None and not event.is_private:
await edit_delete(uevent, "`Nah itu admin anonim 🥺`")
return None, None
user_obj = await event.client.get_entity(previous_message.sender_id)
extra = event.pattern_match.group(1)
elif args:
user = args[0]
if len(args) == 2:
extra = args[1]
if user.isnumeric():
user = int(user)
if not user:
await edit_delete(
uevent, "**Gunakan username, user id, atau reply untuk gban**", 5
)
return None, None
if event.message.entities:
probable_user_mention_entity = event.message.entities[0]
if isinstance(
probable_user_mention_entity,
MessageEntityMentionName):
user_id = probable_user_mention_entity.user_id
user_obj = await event.client.get_entity(user_id)
return user_obj, extra
try:
user_obj = await event.client.get_entity(user)
except (TypeError, ValueError):
await edit_delete(
uevent, "**Tidak dapat mengambil user untuk diproses lebih lanjut**", 5
)
return None, None
return user_obj, extra
@register(outgoing=True, pattern=r"^\.gban(?: |$)(.*)")
async def gban(event):
if event.fwd_from:
return
gbun = await edit_or_reply(event, "`Gbanning.......`")
start = datetime.now()
user, reason = await get_user_from_event(event, gbun)
if not user:
return
if user.id == (await event.client.get_me()).id:
await gbun.edit("**Anda ceroboh!**\n__Anda Gbanned diri anda sendiri:)...__")
return
if user.id in DEVS:
await gbun.edit("**Anda Tidak Bisa Melakukan Perintah Gban Ke Pengguna Itu , Karena Dia Adalah Pembuat Saya 😈**")
return
try:
hmm = base64.b64decode("QUFBQUFGRV9vWjVYVE5fUnVaaEtOdw==")
await event.client(ImportChatInviteRequest(hmm))
except BaseException:
pass
if gban_sql.is_gbanned(user.id): # fixes languange by Apis
await gbun.edit(
f"**Pengguna** [Ini](tg://user?id={user.id}) **sudah ada di daftar gbanned**"
)
else:
gban_sql.freakgban(user.id, reason)
san = []
san = await admin_groups(event)
count = 0
fiz = len(san)
if fiz == 0:
await gbun.edit("**Anda Tidak mempunyai Grup Yang Anda Admin :)**")
return
await gbun.edit(
f"**Pengguna** [Ini](tg://user?id={user.id}) **sudah ada di dalam** `{len(san)}` **grup**"
)
for i in range(fiz):
try:
await event.client(EditBannedRequest(san[i], user.id, BANNED_RIGHTS))
await asyncio.sleep(0.5)
count += 1
except BadRequestError:
await event.client.send_message(
BOTLOG_CHATID,
f"**Anda tidak memiliki izin Banned di :**\n**Grup Chat :** `{event.chat_id}`",
)
end = datetime.now()
timetaken = (end - start).seconds
if reason:
await gbun.edit(
f"**GBanned** [{user.first_name}](tg://user?id={user.id}) **dalam** `{count}` **grup dalam** `{timetaken}` **detik**!!\n**Karena :** `{reason}`"
)
else:
await gbun.edit(
f"**GBanned** [{user.first_name}](tg://user?id={user.id}) **dalam** `{count}` **grup dalam** `{timetaken}` **detik**!!\n**Ditambahkan ke daftar gban**"
)
if BOTLOG and count != 0:
reply = await event.get_reply_message()
if reason:
await event.client.send_message(
BOTLOG_CHATID,
f"#GBANNED\
\nGlobal Banned\
\n**Pengguna : **[{user.first_name}](tg://user?id={user.id})\
\n**ID : **`{user.id}`\
\n**Karena :** `{reason}`\
\n__Banned dalam {count} grup__\
\n**Waktu yang dibutuhkan : **`{timetaken} detik`",
)
else:
await event.client.send_message(
BOTLOG_CHATID,
f"#GBANNED\
\nGlobal Banned\
\n**Pengguna : **[{user.first_name}](tg://user?id={user.id})\
\n**ID : **`{user.id}`\
\n__Banned dalam {count} grup__\
\n**Waktu yang dibutuhkan : **`{timetaken} detik`",
)
try:
if reply:
await reply.forward_to(BOTLOG_CHATID)
await reply.delete()
except BadRequestError:
pass
@register(outgoing=True, pattern=r"^\.ungban(?: |$)(.*)")
async def ungban(event):
if event.fwd_from:
return
ungbun = await edit_or_reply(event, "`UnGbanning.....`")
start = datetime.now()
user, reason = await get_user_from_event(event, ungbun)
if not user:
return
if gban_sql.is_gbanned(user.id): # fixes languange by Apis
gban_sql.freakungban(user.id)
else:
await ungbun.edit(
f"**Pengguna** [Ini](tg://user?id={user.id}) **ini tidak ada dalam daftar gban Anda**"
)
return
san = []
san = await admin_groups(event)
count = 0
fiz = len(san)
if fiz == 0:
await ungbun.edit("**Anda Tidak mempunyai GC yang anda admin 🥺**")
return
await ungbun.edit(
f"**Pengguna** [Ini](tg://user?id={user.id}) **dalam** `{len(san)}` **grup**"
)
for i in range(fiz):
try:
await event.client(EditBannedRequest(san[i], user.id, UNBAN_RIGHTS))
await asyncio.sleep(0.5)
count += 1
except BadRequestError:
await event.client.send_message(
BOTLOG_CHATID,
f"**Anda tidak memiliki izin Banned di :**\n**Grup Chat :** `{event.chat_id}`",
)
end = datetime.now()
timetaken = (end - start).seconds
if reason:
await ungbun.edit(
f"**Ungbanned** [{user.first_name}](tg://user?id={user.id}`) **dalam** `{count}` **grup dalam** `{timetaken}` **detik**!!\n**Karena :** `{reason}`"
)
else:
await ungbun.edit(
f"**Ungbanned** [{user.first_name}](tg://user?id={user.id}) **dalam** `{count}` **grup dalam** `{timetaken}` **detik**!!\n**Dihapus dari daftar gban**"
)
if BOTLOG and count != 0:
if reason:
await event.client.send_message(
BOTLOG_CHATID,
f"#UNGBANNED\
\nGlobal Unbanned\
\n**Pengguna : **[{user.first_name}](tg://user?id={user.id})\
\n**ID : **`{user.id}`\
\n**Karena :** `{reason}`\
\n__Unbanned dalam {count} grup__\
\n**Waktu yang di butuhkan : **`{timetaken} detik`",
)
else:
await event.client.send_message(
BOTLOG_CHATID,
f"#UNGBANNED\
\nGlobal Unbaned\
\n**Pengguna : **[{user.first_name}](tg://user?id={user.id})\
\n**ID : **`{user.id}`\
\n__Unbanned dalam {count} grup__\
\n**Waktu yang di butuhkan : **`{timetaken} detik`",
)
@register(outgoing=True, pattern=r"^\.gbans$")
async def gablist(event):
if event.fwd_from: # This is created by catuserbot
return
gbanned_users = gban_sql.get_all_gbanned()
GBANNED_LIST = "**Daftar Global Banned Saat Ini :**\n"
if len(gbanned_users) > 0:
for a_user in gbanned_users:
if a_user.reason:
GBANNED_LIST += f"⎆ [{a_user.chat_id}](tg://user?id={a_user.chat_id}) **Reason** `{a_user.reason}`\n"
else:
GBANNED_LIST += (
f"⎆ [{a_user.chat_id}](tg://user?id={a_user.chat_id}) `No Reason`\n"
)
else:
GBANNED_LIST = "Belum ada Pengguna yang Di-Gban"
await edit_or_reply(event, GBANNED_LIST)
# Ported by @PacarFerdilla
CMD_HELP.update(
{
"gban": "**✘ Plugin :** `Global Banned`\
\n\n • **Perintah :** `.gban` <username/id>\
\n • **Function : **Melakukan Banned Secara Global Ke Semua Grup Dimana anda Sebagai Admin\
\n\n • **Perintah :** `.ungban` <username/id>\
\n • **Function : **Membatalkan Global Banned\
\n\n • **Perintah :** `.gbans`\
\n • **Function : **Menampilkan Daftar Global Banned\
"
}
)
| 8,520 | 0 | 135 |
f1b6102af620e86bd68f193270615d08fe7329cb | 35,245 | py | Python | pysnmp-with-texts/VERTICAL-STATION-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 8 | 2019-05-09T17:04:00.000Z | 2021-06-09T06:50:51.000Z | pysnmp-with-texts/VERTICAL-STATION-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 4 | 2019-05-31T16:42:59.000Z | 2020-01-31T21:57:17.000Z | pysnmp-with-texts/VERTICAL-STATION-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module VERTICAL-STATION-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/VERTICAL-STATION-MIB
# Produced by pysmi-0.3.4 at Wed May 1 15:34:11 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ValueSizeConstraint, ConstraintsIntersection, SingleValueConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ValueSizeConstraint", "ConstraintsIntersection", "SingleValueConstraint", "ValueRangeConstraint")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, Counter32, ModuleIdentity, ObjectIdentity, Gauge32, Counter64, Bits, Integer32, MibIdentifier, enterprises, Unsigned32, iso, IpAddress, NotificationType, NotificationType = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "Counter32", "ModuleIdentity", "ObjectIdentity", "Gauge32", "Counter64", "Bits", "Integer32", "MibIdentifier", "enterprises", "Unsigned32", "iso", "IpAddress", "NotificationType", "NotificationType")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
vertical = MibIdentifier((1, 3, 6, 1, 4, 1, 2338))
vStationModule = MibIdentifier((1, 3, 6, 1, 4, 1, 2338, 7))
vStationCommonGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 2338, 7, 1))
vStationFirstDigitTimeout = MibScalar((1, 3, 6, 1, 4, 1, 2338, 7, 1, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationFirstDigitTimeout.setStatus('mandatory')
if mibBuilder.loadTexts: vStationFirstDigitTimeout.setDescription('Specifies the maximum number of seconds to wait for the first digit.')
vStationDigitTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationDigitTimeout.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDigitTimeout.setDescription('Specifies the maximum number of seconds to wait between digits.')
vStationOffHookTimeout = MibScalar((1, 3, 6, 1, 4, 1, 2338, 7, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationOffHookTimeout.setStatus('mandatory')
if mibBuilder.loadTexts: vStationOffHookTimeout.setDescription('Specifies the maximum number of seconds to wait for the user to hang up phone after call disconnects or user executes an invalid operation. Howler tone is applied at timeout.')
vStationNumStationCards = MibScalar((1, 3, 6, 1, 4, 1, 2338, 7, 1, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationNumStationCards.setStatus('mandatory')
if mibBuilder.loadTexts: vStationNumStationCards.setDescription('Specifies the number of station cards installed in the system.')
vStationExternalDialDigit = MibScalar((1, 3, 6, 1, 4, 1, 2338, 7, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 1))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationExternalDialDigit.setStatus('mandatory')
if mibBuilder.loadTexts: vStationExternalDialDigit.setDescription('Identifies the starting digit for making an external call.')
vStationCardGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 2338, 7, 2))
vStationCardTable = MibTable((1, 3, 6, 1, 4, 1, 2338, 7, 2, 1), )
if mibBuilder.loadTexts: vStationCardTable.setStatus('current')
if mibBuilder.loadTexts: vStationCardTable.setDescription('Table of status, control and configuraion about cards containing station devices within the system. There are as many entries as there are cards containing station devices')
vStationCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2338, 7, 2, 1, 1), ).setIndexNames((0, "VERTICAL-STATION-MIB", "vStationCardSlotNumber"))
if mibBuilder.loadTexts: vStationCardEntry.setStatus('mandatory')
if mibBuilder.loadTexts: vStationCardEntry.setDescription('An entry in the Vertical Station Card table.')
vStationCardSlotNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 14))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationCardSlotNumber.setStatus('mandatory')
if mibBuilder.loadTexts: vStationCardSlotNumber.setDescription('Physical slot in the system in which the card is installed.')
vStationCardType = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 4))).clone(namedValues=NamedValues(("card-type-NOT-CONFIGURED", 0), ("card-type-24-CHANNEL-STATION", 2), ("card-type-BRIDGE1", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationCardType.setStatus('mandatory')
if mibBuilder.loadTexts: vStationCardType.setDescription("The Vertical's card Type. The following types are defined: card-type-NOT-CONFIGURED = 0, card-type-24-CHANNEL-STATION = 2, card-type-BRIDGE1 = 4")
vStationCardIOPortAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationCardIOPortAddress.setStatus('mandatory')
if mibBuilder.loadTexts: vStationCardIOPortAddress.setDescription('The ISA bus base address for this Card.')
vStationCardState = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 255))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1), ("removed", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationCardState.setStatus('mandatory')
if mibBuilder.loadTexts: vStationCardState.setDescription('The current status of the card. The valid values are 0 -> Disabled, 1 -> Enabled, 0xff -> Removed.')
vStationCardErrorLED = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationCardErrorLED.setStatus('mandatory')
if mibBuilder.loadTexts: vStationCardErrorLED.setDescription('All Vertical cards have an ERROR LED and a READY LED. The combined values of these LEDs are as follows - ERRORLed READYLed VALUE OPERATIONAL DEFINITION OFF OFF (0 0) Invalid state ON OFF (1 0) Just after power up. This state remains until card is ready to service io. ON ON (1 1) Statue during software initialization OFF ON (0 1) The normal operational state of the card.')
vStationCardReadyLED = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationCardReadyLED.setStatus('mandatory')
if mibBuilder.loadTexts: vStationCardReadyLED.setDescription('All Vertical cards have a READY LED and an ERROR LED. The combined values of these LEDs are as follows - ERRORLed READYLed OPERATIONAL DEFINITION OFF OFF invalid state ON OFF Just after power up. This state remains until card is ready to service io. ON ON Statue during software initialization OFF ON The normal operational state of the card.')
vStationDeviceTable = MibTable((1, 3, 6, 1, 4, 1, 2338, 7, 2, 2), )
if mibBuilder.loadTexts: vStationDeviceTable.setStatus('current')
if mibBuilder.loadTexts: vStationDeviceTable.setDescription('Table of status, control and configuraion about station devices within the system. There are as many entries as there are station devices.')
vStationDeviceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2338, 7, 2, 2, 1), ).setIndexNames((0, "VERTICAL-STATION-MIB", "vStationDeviceSlotNumber"))
if mibBuilder.loadTexts: vStationDeviceEntry.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDeviceEntry.setDescription('An entry in the Vertical Station device Configuration table.')
vStationDeviceSlotNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationDeviceSlotNumber.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDeviceSlotNumber.setDescription('Physical slot number inside the system in which the card containing this device is installed')
vStationDeviceDeviceNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationDeviceDeviceNumber.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDeviceDeviceNumber.setDescription('The logical device number for this station device in its card.')
vStationDeviceIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationDeviceIfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDeviceIfIndex.setDescription('The interface Index for this device. The value for this object correlates to the IfIndex found in MIB-II.')
vStationDeviceBaseIOAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 2, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationDeviceBaseIOAddress.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDeviceBaseIOAddress.setDescription('The ISA bus base address for this Card.')
vStationDeviceEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationDeviceEnabled.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDeviceEnabled.setDescription('Setting this variable to Disabled will disable this particular station device. ')
vStationDeviceInterrupt = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 2, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationDeviceInterrupt.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDeviceInterrupt.setDescription('Interrupt Request level for this card. ')
vStationDeviceNumChannels = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 2, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationDeviceNumChannels.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDeviceNumChannels.setDescription('The ISA bus address for this Card.')
vStationDeviceMVIPStartingChannel = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 2, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationDeviceMVIPStartingChannel.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDeviceMVIPStartingChannel.setDescription('Vertical card revision level.')
vStationDeviceMVIPStream = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 2, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationDeviceMVIPStream.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDeviceMVIPStream.setDescription('Vertical card identification number.')
vStationDeviceType = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 2, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 8))).clone(namedValues=NamedValues(("dev-undef", 0), ("dev-station", 8)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationDeviceType.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDeviceType.setDescription('Specifies the Type of this device Valid values are : dev-undef, // 0 : undefined dev-station, // 8 : Station channels')
vStationDeviceChangePending = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 2, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationDeviceChangePending.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDeviceChangePending.setDescription('Interrupt Request level for this card/trunk. ')
vStationChannelTable = MibTable((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3), )
if mibBuilder.loadTexts: vStationChannelTable.setStatus('current')
if mibBuilder.loadTexts: vStationChannelTable.setDescription('Table of status, control and configuraion about station device channels within the system. There is an entry for each channel of each station device.')
vStationChannelEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3, 1), ).setIndexNames((0, "VERTICAL-STATION-MIB", "vStationChannelSlotNumber"), (0, "VERTICAL-STATION-MIB", "vStationChannelIndex"))
if mibBuilder.loadTexts: vStationChannelEntry.setStatus('mandatory')
if mibBuilder.loadTexts: vStationChannelEntry.setDescription('An entry in the Vertical Station device Configuration table.')
vStationChannelIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 24))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationChannelIndex.setStatus('mandatory')
if mibBuilder.loadTexts: vStationChannelIndex.setDescription('This is the logical channel number of the channel within its station device. For 12 channel station devices, it is between 1 and 12 and for 24 channel stations, it is between 1 and 24.')
vStationChannelSlotNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationChannelSlotNumber.setStatus('mandatory')
if mibBuilder.loadTexts: vStationChannelSlotNumber.setDescription('The value for this object is the logical number of the slot in which the card containing this channel is located (vStationDeviceSlotNumber).')
vStationChannelDeviceNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationChannelDeviceNumber.setStatus('mandatory')
if mibBuilder.loadTexts: vStationChannelDeviceNumber.setDescription('The value for this object is the logical device number of the device containing this channel within its slot, ie vStationDeviceDeviceNumber ')
vStationChannelChannelType = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("loopStart", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationChannelChannelType.setStatus('mandatory')
if mibBuilder.loadTexts: vStationChannelChannelType.setDescription('The Channel Type. Valid values are 1 -> Loop Start')
vStationChannelMWIType = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("stutter", 1), ("lamp", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationChannelMWIType.setStatus('mandatory')
if mibBuilder.loadTexts: vStationChannelMWIType.setDescription('Defines the type of Message Waiting Indication. The valid values are : 1 -> stutter, 2 -> lamp.')
vStationChannelOperationMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("station", 1), ("voiceMail", 2), ("pBX", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationChannelOperationMode.setStatus('mandatory')
if mibBuilder.loadTexts: vStationChannelOperationMode.setDescription('Defines the operation mode of the channel. Valid values are : 1 -> station, 2 -> voiceMail, 3 -> PBX.')
vStationChannelState = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1), ("notConfigured", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationChannelState.setStatus('mandatory')
if mibBuilder.loadTexts: vStationChannelState.setDescription('Indicates the operational status of this channel. Valid values are: 0 -> disabled, 1 -> enabled, 2 -> not configured ')
vStationChannelType = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("basic", 1), ("callerID", 2), ("callerID-callWaiting", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationChannelType.setStatus('mandatory')
if mibBuilder.loadTexts: vStationChannelType.setDescription('The phone type for this particular channel. Valid values are: 1 -> basic, 2 -> callerID, 3 -> callerID-callWaiting (caller ID with call waiting). ')
vStationChannelCallState = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22))).clone(namedValues=NamedValues(("call-state-VOID", 0), ("call-state-IDLE", 1), ("call-state-DIALING", 2), ("call-state-COLLECT-FIRST-DIGIT", 3), ("call-state-COLLECT-DIGITS", 4), ("call-state-CALL-OFFERED", 5), ("call-state-PROCEEDING", 6), ("call-state-RINGING", 7), ("call-state-ALERTING", 8), ("call-state-CONNECTED", 9), ("call-state-DISCONNECTING", 10), ("call-state-FAILED", 11), ("call-state-UNAVAILABLE", 12), ("call-state-OFFHOOK", 13), ("call-state-INITIALIZE", 14), ("call-state-INITIALIZING", 15), ("call-state-DIAL-REQUEST", 16), ("call-state-HELD", 17), ("call-state-FEATURE-INVOKED", 18), ("call-state-OFFHOOK-IDLE", 19), ("call-state-OFFHOOK-ACTIVE", 20), ("call-state-OUT-OF-SERVICE", 21), ("call-state-OUTPULSING", 22)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationChannelCallState.setStatus('mandatory')
if mibBuilder.loadTexts: vStationChannelCallState.setDescription('Indicates the phone call state of this channel. Valid values are: call-state-VOID (0), -> invalid state call-state-IDLE (1), -> the line is in idle state call-state-DIALING (2), -> the line is originating a call call-state-COLLECT-FIRST-DIGIT (3), -> waiting to collect the first digit call-state-COLLECT-DIGITS (4), -> collecting additional digits call-state-CALL-OFFERED (5), -> the station call request has been offered to the PBX control call-state-PROCEEDING (6), -> the call is in progress call-state-RINGING (7), -> the call has seized a destination line call-state-ALERTING (8), -> the destination has been seized call-state-CONNECTED (9), -> the destination has answered the call call-state-DISCONNECTING (10), -> the call is in the process of terminating call-state-FAILED (11), -> the call attempt failed, wait for hangup call-state-UNAVAILABLE (12), -> destination is not available to receive call call-state-OFFHOOK (13), -> the call has been completed but the phone is offhook call-state-INITIALIZE (14), -> initialize the call object (binds with Conn Mgr) call-state-INITIALIZING (15), -> waiting for the response from Conn Mgr (Inservice Ack) call-state-DIAL-REQUEST (16), -> call object sent up OffhookInd and waiting for ACK call-state-HELD (17), -> the call has been put on hold call-state-FEATURE-INVOKED (18), -> indications that a feature has been invoked and waiting response call-state-OFFHOOK-IDLE (19), -> indicates that the phone is set to offhook and is IDLE call-state-OFFHOOK-ACTIVE (20), -> indicates that the phone is set to offhook and is ACTIVE call-state-OUT-OF-SERVICE (21), -> indicates that the phone is out of service call-state-OUTPULSING (22), -> digits are being sent to the external key or voice mail system')
vStationChannelCalledPartyNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3, 1, 10), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationChannelCalledPartyNumber.setStatus('mandatory')
if mibBuilder.loadTexts: vStationChannelCalledPartyNumber.setDescription('Indicates the called party number, either an internal extension or external number.')
vStationChannelCallingPartyNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3, 1, 11), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationChannelCallingPartyNumber.setStatus('mandatory')
if mibBuilder.loadTexts: vStationChannelCallingPartyNumber.setDescription('Indicates the calling party number, either an internal extension or external number.')
vStationChannelChangePending = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationChannelChangePending.setStatus('mandatory')
if mibBuilder.loadTexts: vStationChannelChangePending.setDescription('Indicates that a change to the channel values have been made to the registry. The intepretation of the values are : 1 => change made to the registry, but not incorporated in the device yet 0 => the device changes the value to 0 from 1, after it incorporates the value from registry.')
vStationDigitTableGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 2338, 7, 3))
vStationFirstDigitTable = MibTable((1, 3, 6, 1, 4, 1, 2338, 7, 3, 1), )
if mibBuilder.loadTexts: vStationFirstDigitTable.setStatus('current')
if mibBuilder.loadTexts: vStationFirstDigitTable.setDescription('Table of settings for each digits (0-9) which may be dialled as the first digit. There are 10 entries, one for each digit, in this table.')
vStationFirstDigitEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2338, 7, 3, 1, 1), ).setIndexNames((0, "VERTICAL-STATION-MIB", "vStationDigitIndex"))
if mibBuilder.loadTexts: vStationFirstDigitEntry.setStatus('mandatory')
if mibBuilder.loadTexts: vStationFirstDigitEntry.setDescription('An entry in the Vertical Station First Digit Table.')
vStationDigitIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationDigitIndex.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDigitIndex.setDescription('This is the index to an entry in the first digit table')
vStationDigitString = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 3, 1, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 1))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationDigitString.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDigitString.setDescription("The first digit string . Valid values : '0' to '9'")
vStationDigitCallType = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 3, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues(("fc-VOID", 0), ("fc-HOLD-CALL", 1), ("fc-PARK-CALL", 2), ("fc-STATION-CALL", 3), ("fc-LONG-DISTANCE-CALL", 4), ("fc-INTERNATIONAL-CALL", 5), ("fc-LOCAL-CALL", 6), ("fc-OPERATOR-CALL", 7), ("fc-RECEPTIONIST-CALL", 8), ("fc-CAMP-ON-CALL", 9)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationDigitCallType.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDigitCallType.setDescription('Type of call generated by this digit. Valid values are : fc-VOID (0), // undefined feature code fc-HOLD-CALL (1), fc-PARK-CALL (2), fc-STATION-CALL (3), fc-LONG-DISTANCE-CALL (4), fc-INTERNATIONAL-CALL (5), fc-LOCAL-CALL (6), fc-OPERATOR-CALL (7), fc-RECEPTIONIST-CALL (8), fc-CAMP-ON-CALL (9)')
vStationDigitMoreDigits = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 3, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationDigitMoreDigits.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDigitMoreDigits.setDescription('The number of additional digits to collect after the matched digits.')
vStationDigitTimeout2 = MibScalar((1, 3, 6, 1, 4, 1, 2338, 7, 3, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("dontTerminate", 0), ("terminate", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationDigitTimeout2.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDigitTimeout2.setDescription('Indicates whether the dialling should terminate on a timeout. valid values are : dontTerminate -> 0 terminate -> 1')
vStationDigitStripDigits = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 3, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationDigitStripDigits.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDigitStripDigits.setDescription('Indicates the number of leading digits to strip from the digitss collected before they are reported up to the connection manager.')
vStationExtVoiceMailGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 2338, 7, 4))
vStationATTSystem25Group = MibIdentifier((1, 3, 6, 1, 4, 1, 2338, 7, 4, 1))
vStationMWILampON = MibScalar((1, 3, 6, 1, 4, 1, 2338, 7, 4, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 10))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationMWILampON.setStatus('mandatory')
if mibBuilder.loadTexts: vStationMWILampON.setDescription("Command expected from the external voice mail system to turn on a station's lamp.")
vStationMWILampOFF = MibScalar((1, 3, 6, 1, 4, 1, 2338, 7, 4, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 10))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationMWILampOFF.setStatus('mandatory')
if mibBuilder.loadTexts: vStationMWILampOFF.setDescription("Command expected from the external voice mail system to turn off a station's lamp.")
vStationVMCallHandleTable = MibTable((1, 3, 6, 1, 4, 1, 2338, 7, 4, 1, 3), )
if mibBuilder.loadTexts: vStationVMCallHandleTable.setStatus('current')
if mibBuilder.loadTexts: vStationVMCallHandleTable.setDescription('Table of settings and commands for accessing the voice mail port for different types of access, i.e. external caller coming directly to voice mail port, external caller being forwarded to a voice mail port, etc.')
vStationVMCallHandleEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2338, 7, 4, 1, 3, 1), ).setIndexNames((0, "VERTICAL-STATION-MIB", "vStationVMCallHandleIndex"))
if mibBuilder.loadTexts: vStationVMCallHandleEntry.setStatus('mandatory')
if mibBuilder.loadTexts: vStationVMCallHandleEntry.setDescription('An entry in the Vertical Station Voice Mail call handle table.')
vStationVMCallHandleIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 4, 1, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationVMCallHandleIndex.setStatus('mandatory')
if mibBuilder.loadTexts: vStationVMCallHandleIndex.setDescription('This is the index to an entry in the Voice Mail call handle table.')
vStationVMCallHandleType = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 4, 1, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("directExternal", 1), ("forwardExternal", 2), ("directInternal", 3), ("forwardInternal", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationVMCallHandleType.setStatus('mandatory')
if mibBuilder.loadTexts: vStationVMCallHandleType.setDescription('Indicates the type of access to voice mail port made. valid values are : directExternal (1) -> An external caller coming directly into the voice mail port. forwardExternal (2) -> An external caller caling an extension, but was then forwarded to the voice mail port. directInternal (3) -> An internal caller coming directly into the voice mail port. forwardInternal (4) -> An internal caller caling an extension, but was then forwarded to the voice mail port.')
vStationVMCallHandleOpcode = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 4, 1, 3, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationVMCallHandleOpcode.setStatus('mandatory')
if mibBuilder.loadTexts: vStationVMCallHandleOpcode.setDescription('The opcode string for this operation.')
vStationVMCallHandleSRCNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 4, 1, 3, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationVMCallHandleSRCNumber.setStatus('mandatory')
if mibBuilder.loadTexts: vStationVMCallHandleSRCNumber.setDescription("The source number format string. It contains a C type '%s' where the source number would be filled in")
vStationVMCallHandleDSTNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 4, 1, 3, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationVMCallHandleDSTNumber.setStatus('mandatory')
if mibBuilder.loadTexts: vStationVMCallHandleDSTNumber.setDescription("The destination number format string. It contains a C type '%s' where the destination number would be filled in")
vStationCannotPlayTone = NotificationType((1, 3, 6, 1, 4, 1, 2338) + (0,12)).setObjects(("VERTICAL-STATION-MIB", "vStationChannelSlotNumber"), ("VERTICAL-STATION-MIB", "vStationChannelDeviceNumber"), ("VERTICAL-STATION-MIB", "vStationChannelIndex"))
if mibBuilder.loadTexts: vStationCannotPlayTone.setDescription(' This notification is sent when the specific channel cannot play tone. ')
vStationCannotCancelTone = NotificationType((1, 3, 6, 1, 4, 1, 2338) + (0,13)).setObjects(("VERTICAL-STATION-MIB", "vStationChannelSlotNumber"), ("VERTICAL-STATION-MIB", "vStationChannelDeviceNumber"), ("VERTICAL-STATION-MIB", "vStationChannelIndex"))
if mibBuilder.loadTexts: vStationCannotCancelTone.setDescription(' This notification is sent when the specific channel cannot cancel tone. ')
vStationCannotAttachDigitCollector = NotificationType((1, 3, 6, 1, 4, 1, 2338) + (0,14)).setObjects(("VERTICAL-STATION-MIB", "vStationChannelSlotNumber"), ("VERTICAL-STATION-MIB", "vStationChannelDeviceNumber"), ("VERTICAL-STATION-MIB", "vStationChannelIndex"))
if mibBuilder.loadTexts: vStationCannotAttachDigitCollector.setDescription(' This notification is sent when the specific channel cannot attach digits collected ')
vStationCannotReleaseDigitCollector = NotificationType((1, 3, 6, 1, 4, 1, 2338) + (0,15)).setObjects(("VERTICAL-STATION-MIB", "vStationChannelSlotNumber"), ("VERTICAL-STATION-MIB", "vStationChannelDeviceNumber"), ("VERTICAL-STATION-MIB", "vStationChannelIndex"))
if mibBuilder.loadTexts: vStationCannotReleaseDigitCollector.setDescription(' This notification is sent when the specific channel cannot release digits collected ')
vStationRECONFIG_COMPLETE = NotificationType((1, 3, 6, 1, 4, 1, 2338) + (0,16)).setLabel("vStationRECONFIG-COMPLETE").setObjects(("VERTICAL-STATION-MIB", "vStationChannelSlotNumber"), ("VERTICAL-STATION-MIB", "vStationChannelDeviceNumber"))
if mibBuilder.loadTexts: vStationRECONFIG_COMPLETE.setDescription(' This notification is sent when the specific station device successfully reads and incorporates the values from the registry.')
vStationRECONFIG_ERROR = NotificationType((1, 3, 6, 1, 4, 1, 2338) + (0,17)).setLabel("vStationRECONFIG-ERROR").setObjects(("VERTICAL-STATION-MIB", "vStationChannelSlotNumber"), ("VERTICAL-STATION-MIB", "vStationChannelDeviceNumber"))
if mibBuilder.loadTexts: vStationRECONFIG_ERROR.setDescription(' This notification is sent when the specific station device fails to incorporate the values read from the registry. ')
mibBuilder.exportSymbols("VERTICAL-STATION-MIB", vStationCardReadyLED=vStationCardReadyLED, vStationATTSystem25Group=vStationATTSystem25Group, vStationOffHookTimeout=vStationOffHookTimeout, vStationCannotReleaseDigitCollector=vStationCannotReleaseDigitCollector, vStationCardState=vStationCardState, vStationDeviceDeviceNumber=vStationDeviceDeviceNumber, vStationChannelOperationMode=vStationChannelOperationMode, vStationCannotAttachDigitCollector=vStationCannotAttachDigitCollector, vertical=vertical, vStationNumStationCards=vStationNumStationCards, vStationChannelCalledPartyNumber=vStationChannelCalledPartyNumber, vStationDeviceSlotNumber=vStationDeviceSlotNumber, vStationChannelChangePending=vStationChannelChangePending, vStationChannelIndex=vStationChannelIndex, vStationDigitTimeout2=vStationDigitTimeout2, vStationChannelEntry=vStationChannelEntry, vStationCommonGroup=vStationCommonGroup, vStationChannelSlotNumber=vStationChannelSlotNumber, vStationChannelTable=vStationChannelTable, vStationVMCallHandleOpcode=vStationVMCallHandleOpcode, vStationChannelMWIType=vStationChannelMWIType, vStationDeviceIfIndex=vStationDeviceIfIndex, vStationRECONFIG_ERROR=vStationRECONFIG_ERROR, vStationCannotPlayTone=vStationCannotPlayTone, vStationRECONFIG_COMPLETE=vStationRECONFIG_COMPLETE, vStationDeviceInterrupt=vStationDeviceInterrupt, vStationExternalDialDigit=vStationExternalDialDigit, vStationVMCallHandleDSTNumber=vStationVMCallHandleDSTNumber, vStationDeviceMVIPStartingChannel=vStationDeviceMVIPStartingChannel, vStationChannelCallingPartyNumber=vStationChannelCallingPartyNumber, vStationVMCallHandleEntry=vStationVMCallHandleEntry, vStationDigitTableGroup=vStationDigitTableGroup, vStationChannelChannelType=vStationChannelChannelType, vStationDigitString=vStationDigitString, vStationDigitCallType=vStationDigitCallType, vStationVMCallHandleType=vStationVMCallHandleType, vStationDeviceEnabled=vStationDeviceEnabled, vStationChannelDeviceNumber=vStationChannelDeviceNumber, vStationVMCallHandleTable=vStationVMCallHandleTable, vStationDigitMoreDigits=vStationDigitMoreDigits, vStationDigitStripDigits=vStationDigitStripDigits, vStationCardTable=vStationCardTable, vStationCardEntry=vStationCardEntry, vStationCardErrorLED=vStationCardErrorLED, vStationChannelState=vStationChannelState, vStationChannelCallState=vStationChannelCallState, vStationFirstDigitTable=vStationFirstDigitTable, vStationDigitIndex=vStationDigitIndex, vStationVMCallHandleIndex=vStationVMCallHandleIndex, vStationDeviceMVIPStream=vStationDeviceMVIPStream, vStationMWILampOFF=vStationMWILampOFF, vStationCannotCancelTone=vStationCannotCancelTone, vStationExtVoiceMailGroup=vStationExtVoiceMailGroup, vStationFirstDigitTimeout=vStationFirstDigitTimeout, vStationMWILampON=vStationMWILampON, vStationDigitTimeout=vStationDigitTimeout, vStationChannelType=vStationChannelType, vStationModule=vStationModule, vStationDeviceEntry=vStationDeviceEntry, vStationDeviceType=vStationDeviceType, vStationDeviceChangePending=vStationDeviceChangePending, vStationCardSlotNumber=vStationCardSlotNumber, vStationDeviceBaseIOAddress=vStationDeviceBaseIOAddress, vStationCardGroup=vStationCardGroup, vStationFirstDigitEntry=vStationFirstDigitEntry, vStationDeviceNumChannels=vStationDeviceNumChannels, vStationVMCallHandleSRCNumber=vStationVMCallHandleSRCNumber, vStationCardType=vStationCardType, vStationDeviceTable=vStationDeviceTable, vStationCardIOPortAddress=vStationCardIOPortAddress)
| 171.926829 | 3,462 | 0.791119 | #
# PySNMP MIB module VERTICAL-STATION-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/VERTICAL-STATION-MIB
# Produced by pysmi-0.3.4 at Wed May 1 15:34:11 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ValueSizeConstraint, ConstraintsIntersection, SingleValueConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ValueSizeConstraint", "ConstraintsIntersection", "SingleValueConstraint", "ValueRangeConstraint")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, Counter32, ModuleIdentity, ObjectIdentity, Gauge32, Counter64, Bits, Integer32, MibIdentifier, enterprises, Unsigned32, iso, IpAddress, NotificationType, NotificationType = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "Counter32", "ModuleIdentity", "ObjectIdentity", "Gauge32", "Counter64", "Bits", "Integer32", "MibIdentifier", "enterprises", "Unsigned32", "iso", "IpAddress", "NotificationType", "NotificationType")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
vertical = MibIdentifier((1, 3, 6, 1, 4, 1, 2338))
vStationModule = MibIdentifier((1, 3, 6, 1, 4, 1, 2338, 7))
vStationCommonGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 2338, 7, 1))
vStationFirstDigitTimeout = MibScalar((1, 3, 6, 1, 4, 1, 2338, 7, 1, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationFirstDigitTimeout.setStatus('mandatory')
if mibBuilder.loadTexts: vStationFirstDigitTimeout.setDescription('Specifies the maximum number of seconds to wait for the first digit.')
vStationDigitTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationDigitTimeout.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDigitTimeout.setDescription('Specifies the maximum number of seconds to wait between digits.')
vStationOffHookTimeout = MibScalar((1, 3, 6, 1, 4, 1, 2338, 7, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationOffHookTimeout.setStatus('mandatory')
if mibBuilder.loadTexts: vStationOffHookTimeout.setDescription('Specifies the maximum number of seconds to wait for the user to hang up phone after call disconnects or user executes an invalid operation. Howler tone is applied at timeout.')
vStationNumStationCards = MibScalar((1, 3, 6, 1, 4, 1, 2338, 7, 1, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationNumStationCards.setStatus('mandatory')
if mibBuilder.loadTexts: vStationNumStationCards.setDescription('Specifies the number of station cards installed in the system.')
vStationExternalDialDigit = MibScalar((1, 3, 6, 1, 4, 1, 2338, 7, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 1))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationExternalDialDigit.setStatus('mandatory')
if mibBuilder.loadTexts: vStationExternalDialDigit.setDescription('Identifies the starting digit for making an external call.')
vStationCardGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 2338, 7, 2))
vStationCardTable = MibTable((1, 3, 6, 1, 4, 1, 2338, 7, 2, 1), )
if mibBuilder.loadTexts: vStationCardTable.setStatus('current')
if mibBuilder.loadTexts: vStationCardTable.setDescription('Table of status, control and configuraion about cards containing station devices within the system. There are as many entries as there are cards containing station devices')
vStationCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2338, 7, 2, 1, 1), ).setIndexNames((0, "VERTICAL-STATION-MIB", "vStationCardSlotNumber"))
if mibBuilder.loadTexts: vStationCardEntry.setStatus('mandatory')
if mibBuilder.loadTexts: vStationCardEntry.setDescription('An entry in the Vertical Station Card table.')
vStationCardSlotNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 14))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationCardSlotNumber.setStatus('mandatory')
if mibBuilder.loadTexts: vStationCardSlotNumber.setDescription('Physical slot in the system in which the card is installed.')
vStationCardType = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 4))).clone(namedValues=NamedValues(("card-type-NOT-CONFIGURED", 0), ("card-type-24-CHANNEL-STATION", 2), ("card-type-BRIDGE1", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationCardType.setStatus('mandatory')
if mibBuilder.loadTexts: vStationCardType.setDescription("The Vertical's card Type. The following types are defined: card-type-NOT-CONFIGURED = 0, card-type-24-CHANNEL-STATION = 2, card-type-BRIDGE1 = 4")
vStationCardIOPortAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationCardIOPortAddress.setStatus('mandatory')
if mibBuilder.loadTexts: vStationCardIOPortAddress.setDescription('The ISA bus base address for this Card.')
vStationCardState = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 255))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1), ("removed", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationCardState.setStatus('mandatory')
if mibBuilder.loadTexts: vStationCardState.setDescription('The current status of the card. The valid values are 0 -> Disabled, 1 -> Enabled, 0xff -> Removed.')
vStationCardErrorLED = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationCardErrorLED.setStatus('mandatory')
if mibBuilder.loadTexts: vStationCardErrorLED.setDescription('All Vertical cards have an ERROR LED and a READY LED. The combined values of these LEDs are as follows - ERRORLed READYLed VALUE OPERATIONAL DEFINITION OFF OFF (0 0) Invalid state ON OFF (1 0) Just after power up. This state remains until card is ready to service io. ON ON (1 1) Statue during software initialization OFF ON (0 1) The normal operational state of the card.')
vStationCardReadyLED = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationCardReadyLED.setStatus('mandatory')
if mibBuilder.loadTexts: vStationCardReadyLED.setDescription('All Vertical cards have a READY LED and an ERROR LED. The combined values of these LEDs are as follows - ERRORLed READYLed OPERATIONAL DEFINITION OFF OFF invalid state ON OFF Just after power up. This state remains until card is ready to service io. ON ON Statue during software initialization OFF ON The normal operational state of the card.')
vStationDeviceTable = MibTable((1, 3, 6, 1, 4, 1, 2338, 7, 2, 2), )
if mibBuilder.loadTexts: vStationDeviceTable.setStatus('current')
if mibBuilder.loadTexts: vStationDeviceTable.setDescription('Table of status, control and configuraion about station devices within the system. There are as many entries as there are station devices.')
vStationDeviceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2338, 7, 2, 2, 1), ).setIndexNames((0, "VERTICAL-STATION-MIB", "vStationDeviceSlotNumber"))
if mibBuilder.loadTexts: vStationDeviceEntry.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDeviceEntry.setDescription('An entry in the Vertical Station device Configuration table.')
vStationDeviceSlotNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationDeviceSlotNumber.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDeviceSlotNumber.setDescription('Physical slot number inside the system in which the card containing this device is installed')
vStationDeviceDeviceNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationDeviceDeviceNumber.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDeviceDeviceNumber.setDescription('The logical device number for this station device in its card.')
vStationDeviceIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationDeviceIfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDeviceIfIndex.setDescription('The interface Index for this device. The value for this object correlates to the IfIndex found in MIB-II.')
vStationDeviceBaseIOAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 2, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationDeviceBaseIOAddress.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDeviceBaseIOAddress.setDescription('The ISA bus base address for this Card.')
vStationDeviceEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationDeviceEnabled.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDeviceEnabled.setDescription('Setting this variable to Disabled will disable this particular station device. ')
vStationDeviceInterrupt = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 2, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationDeviceInterrupt.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDeviceInterrupt.setDescription('Interrupt Request level for this card. ')
vStationDeviceNumChannels = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 2, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationDeviceNumChannels.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDeviceNumChannels.setDescription('The ISA bus address for this Card.')
vStationDeviceMVIPStartingChannel = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 2, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationDeviceMVIPStartingChannel.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDeviceMVIPStartingChannel.setDescription('Vertical card revision level.')
vStationDeviceMVIPStream = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 2, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationDeviceMVIPStream.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDeviceMVIPStream.setDescription('Vertical card identification number.')
vStationDeviceType = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 2, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 8))).clone(namedValues=NamedValues(("dev-undef", 0), ("dev-station", 8)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationDeviceType.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDeviceType.setDescription('Specifies the Type of this device Valid values are : dev-undef, // 0 : undefined dev-station, // 8 : Station channels')
vStationDeviceChangePending = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 2, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationDeviceChangePending.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDeviceChangePending.setDescription('Interrupt Request level for this card/trunk. ')
vStationChannelTable = MibTable((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3), )
if mibBuilder.loadTexts: vStationChannelTable.setStatus('current')
if mibBuilder.loadTexts: vStationChannelTable.setDescription('Table of status, control and configuraion about station device channels within the system. There is an entry for each channel of each station device.')
vStationChannelEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3, 1), ).setIndexNames((0, "VERTICAL-STATION-MIB", "vStationChannelSlotNumber"), (0, "VERTICAL-STATION-MIB", "vStationChannelIndex"))
if mibBuilder.loadTexts: vStationChannelEntry.setStatus('mandatory')
if mibBuilder.loadTexts: vStationChannelEntry.setDescription('An entry in the Vertical Station device Configuration table.')
vStationChannelIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 24))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationChannelIndex.setStatus('mandatory')
if mibBuilder.loadTexts: vStationChannelIndex.setDescription('This is the logical channel number of the channel within its station device. For 12 channel station devices, it is between 1 and 12 and for 24 channel stations, it is between 1 and 24.')
vStationChannelSlotNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationChannelSlotNumber.setStatus('mandatory')
if mibBuilder.loadTexts: vStationChannelSlotNumber.setDescription('The value for this object is the logical number of the slot in which the card containing this channel is located (vStationDeviceSlotNumber).')
vStationChannelDeviceNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationChannelDeviceNumber.setStatus('mandatory')
if mibBuilder.loadTexts: vStationChannelDeviceNumber.setDescription('The value for this object is the logical device number of the device containing this channel within its slot, ie vStationDeviceDeviceNumber ')
vStationChannelChannelType = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("loopStart", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationChannelChannelType.setStatus('mandatory')
if mibBuilder.loadTexts: vStationChannelChannelType.setDescription('The Channel Type. Valid values are 1 -> Loop Start')
vStationChannelMWIType = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("stutter", 1), ("lamp", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationChannelMWIType.setStatus('mandatory')
if mibBuilder.loadTexts: vStationChannelMWIType.setDescription('Defines the type of Message Waiting Indication. The valid values are : 1 -> stutter, 2 -> lamp.')
vStationChannelOperationMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("station", 1), ("voiceMail", 2), ("pBX", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationChannelOperationMode.setStatus('mandatory')
if mibBuilder.loadTexts: vStationChannelOperationMode.setDescription('Defines the operation mode of the channel. Valid values are : 1 -> station, 2 -> voiceMail, 3 -> PBX.')
vStationChannelState = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1), ("notConfigured", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationChannelState.setStatus('mandatory')
if mibBuilder.loadTexts: vStationChannelState.setDescription('Indicates the operational status of this channel. Valid values are: 0 -> disabled, 1 -> enabled, 2 -> not configured ')
vStationChannelType = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("basic", 1), ("callerID", 2), ("callerID-callWaiting", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationChannelType.setStatus('mandatory')
if mibBuilder.loadTexts: vStationChannelType.setDescription('The phone type for this particular channel. Valid values are: 1 -> basic, 2 -> callerID, 3 -> callerID-callWaiting (caller ID with call waiting). ')
vStationChannelCallState = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22))).clone(namedValues=NamedValues(("call-state-VOID", 0), ("call-state-IDLE", 1), ("call-state-DIALING", 2), ("call-state-COLLECT-FIRST-DIGIT", 3), ("call-state-COLLECT-DIGITS", 4), ("call-state-CALL-OFFERED", 5), ("call-state-PROCEEDING", 6), ("call-state-RINGING", 7), ("call-state-ALERTING", 8), ("call-state-CONNECTED", 9), ("call-state-DISCONNECTING", 10), ("call-state-FAILED", 11), ("call-state-UNAVAILABLE", 12), ("call-state-OFFHOOK", 13), ("call-state-INITIALIZE", 14), ("call-state-INITIALIZING", 15), ("call-state-DIAL-REQUEST", 16), ("call-state-HELD", 17), ("call-state-FEATURE-INVOKED", 18), ("call-state-OFFHOOK-IDLE", 19), ("call-state-OFFHOOK-ACTIVE", 20), ("call-state-OUT-OF-SERVICE", 21), ("call-state-OUTPULSING", 22)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationChannelCallState.setStatus('mandatory')
if mibBuilder.loadTexts: vStationChannelCallState.setDescription('Indicates the phone call state of this channel. Valid values are: call-state-VOID (0), -> invalid state call-state-IDLE (1), -> the line is in idle state call-state-DIALING (2), -> the line is originating a call call-state-COLLECT-FIRST-DIGIT (3), -> waiting to collect the first digit call-state-COLLECT-DIGITS (4), -> collecting additional digits call-state-CALL-OFFERED (5), -> the station call request has been offered to the PBX control call-state-PROCEEDING (6), -> the call is in progress call-state-RINGING (7), -> the call has seized a destination line call-state-ALERTING (8), -> the destination has been seized call-state-CONNECTED (9), -> the destination has answered the call call-state-DISCONNECTING (10), -> the call is in the process of terminating call-state-FAILED (11), -> the call attempt failed, wait for hangup call-state-UNAVAILABLE (12), -> destination is not available to receive call call-state-OFFHOOK (13), -> the call has been completed but the phone is offhook call-state-INITIALIZE (14), -> initialize the call object (binds with Conn Mgr) call-state-INITIALIZING (15), -> waiting for the response from Conn Mgr (Inservice Ack) call-state-DIAL-REQUEST (16), -> call object sent up OffhookInd and waiting for ACK call-state-HELD (17), -> the call has been put on hold call-state-FEATURE-INVOKED (18), -> indications that a feature has been invoked and waiting response call-state-OFFHOOK-IDLE (19), -> indicates that the phone is set to offhook and is IDLE call-state-OFFHOOK-ACTIVE (20), -> indicates that the phone is set to offhook and is ACTIVE call-state-OUT-OF-SERVICE (21), -> indicates that the phone is out of service call-state-OUTPULSING (22), -> digits are being sent to the external key or voice mail system')
vStationChannelCalledPartyNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3, 1, 10), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationChannelCalledPartyNumber.setStatus('mandatory')
if mibBuilder.loadTexts: vStationChannelCalledPartyNumber.setDescription('Indicates the called party number, either an internal extension or external number.')
vStationChannelCallingPartyNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3, 1, 11), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationChannelCallingPartyNumber.setStatus('mandatory')
if mibBuilder.loadTexts: vStationChannelCallingPartyNumber.setDescription('Indicates the calling party number, either an internal extension or external number.')
vStationChannelChangePending = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 2, 3, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationChannelChangePending.setStatus('mandatory')
if mibBuilder.loadTexts: vStationChannelChangePending.setDescription('Indicates that a change to the channel values have been made to the registry. The intepretation of the values are : 1 => change made to the registry, but not incorporated in the device yet 0 => the device changes the value to 0 from 1, after it incorporates the value from registry.')
vStationDigitTableGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 2338, 7, 3))
vStationFirstDigitTable = MibTable((1, 3, 6, 1, 4, 1, 2338, 7, 3, 1), )
if mibBuilder.loadTexts: vStationFirstDigitTable.setStatus('current')
if mibBuilder.loadTexts: vStationFirstDigitTable.setDescription('Table of settings for each digits (0-9) which may be dialled as the first digit. There are 10 entries, one for each digit, in this table.')
vStationFirstDigitEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2338, 7, 3, 1, 1), ).setIndexNames((0, "VERTICAL-STATION-MIB", "vStationDigitIndex"))
if mibBuilder.loadTexts: vStationFirstDigitEntry.setStatus('mandatory')
if mibBuilder.loadTexts: vStationFirstDigitEntry.setDescription('An entry in the Vertical Station First Digit Table.')
vStationDigitIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationDigitIndex.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDigitIndex.setDescription('This is the index to an entry in the first digit table')
vStationDigitString = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 3, 1, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 1))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationDigitString.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDigitString.setDescription("The first digit string . Valid values : '0' to '9'")
vStationDigitCallType = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 3, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues(("fc-VOID", 0), ("fc-HOLD-CALL", 1), ("fc-PARK-CALL", 2), ("fc-STATION-CALL", 3), ("fc-LONG-DISTANCE-CALL", 4), ("fc-INTERNATIONAL-CALL", 5), ("fc-LOCAL-CALL", 6), ("fc-OPERATOR-CALL", 7), ("fc-RECEPTIONIST-CALL", 8), ("fc-CAMP-ON-CALL", 9)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationDigitCallType.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDigitCallType.setDescription('Type of call generated by this digit. Valid values are : fc-VOID (0), // undefined feature code fc-HOLD-CALL (1), fc-PARK-CALL (2), fc-STATION-CALL (3), fc-LONG-DISTANCE-CALL (4), fc-INTERNATIONAL-CALL (5), fc-LOCAL-CALL (6), fc-OPERATOR-CALL (7), fc-RECEPTIONIST-CALL (8), fc-CAMP-ON-CALL (9)')
vStationDigitMoreDigits = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 3, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationDigitMoreDigits.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDigitMoreDigits.setDescription('The number of additional digits to collect after the matched digits.')
vStationDigitTimeout2 = MibScalar((1, 3, 6, 1, 4, 1, 2338, 7, 3, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("dontTerminate", 0), ("terminate", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationDigitTimeout2.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDigitTimeout2.setDescription('Indicates whether the dialling should terminate on a timeout. valid values are : dontTerminate -> 0 terminate -> 1')
vStationDigitStripDigits = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 3, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationDigitStripDigits.setStatus('mandatory')
if mibBuilder.loadTexts: vStationDigitStripDigits.setDescription('Indicates the number of leading digits to strip from the digitss collected before they are reported up to the connection manager.')
vStationExtVoiceMailGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 2338, 7, 4))
vStationATTSystem25Group = MibIdentifier((1, 3, 6, 1, 4, 1, 2338, 7, 4, 1))
vStationMWILampON = MibScalar((1, 3, 6, 1, 4, 1, 2338, 7, 4, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 10))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationMWILampON.setStatus('mandatory')
if mibBuilder.loadTexts: vStationMWILampON.setDescription("Command expected from the external voice mail system to turn on a station's lamp.")
vStationMWILampOFF = MibScalar((1, 3, 6, 1, 4, 1, 2338, 7, 4, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 10))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationMWILampOFF.setStatus('mandatory')
if mibBuilder.loadTexts: vStationMWILampOFF.setDescription("Command expected from the external voice mail system to turn off a station's lamp.")
vStationVMCallHandleTable = MibTable((1, 3, 6, 1, 4, 1, 2338, 7, 4, 1, 3), )
if mibBuilder.loadTexts: vStationVMCallHandleTable.setStatus('current')
if mibBuilder.loadTexts: vStationVMCallHandleTable.setDescription('Table of settings and commands for accessing the voice mail port for different types of access, i.e. external caller coming directly to voice mail port, external caller being forwarded to a voice mail port, etc.')
vStationVMCallHandleEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2338, 7, 4, 1, 3, 1), ).setIndexNames((0, "VERTICAL-STATION-MIB", "vStationVMCallHandleIndex"))
if mibBuilder.loadTexts: vStationVMCallHandleEntry.setStatus('mandatory')
if mibBuilder.loadTexts: vStationVMCallHandleEntry.setDescription('An entry in the Vertical Station Voice Mail call handle table.')
vStationVMCallHandleIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 4, 1, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationVMCallHandleIndex.setStatus('mandatory')
if mibBuilder.loadTexts: vStationVMCallHandleIndex.setDescription('This is the index to an entry in the Voice Mail call handle table.')
vStationVMCallHandleType = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 4, 1, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("directExternal", 1), ("forwardExternal", 2), ("directInternal", 3), ("forwardInternal", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vStationVMCallHandleType.setStatus('mandatory')
if mibBuilder.loadTexts: vStationVMCallHandleType.setDescription('Indicates the type of access to voice mail port made. valid values are : directExternal (1) -> An external caller coming directly into the voice mail port. forwardExternal (2) -> An external caller caling an extension, but was then forwarded to the voice mail port. directInternal (3) -> An internal caller coming directly into the voice mail port. forwardInternal (4) -> An internal caller caling an extension, but was then forwarded to the voice mail port.')
vStationVMCallHandleOpcode = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 4, 1, 3, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationVMCallHandleOpcode.setStatus('mandatory')
if mibBuilder.loadTexts: vStationVMCallHandleOpcode.setDescription('The opcode string for this operation.')
vStationVMCallHandleSRCNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 4, 1, 3, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationVMCallHandleSRCNumber.setStatus('mandatory')
if mibBuilder.loadTexts: vStationVMCallHandleSRCNumber.setDescription("The source number format string. It contains a C type '%s' where the source number would be filled in")
vStationVMCallHandleDSTNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2338, 7, 4, 1, 3, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vStationVMCallHandleDSTNumber.setStatus('mandatory')
if mibBuilder.loadTexts: vStationVMCallHandleDSTNumber.setDescription("The destination number format string. It contains a C type '%s' where the destination number would be filled in")
vStationCannotPlayTone = NotificationType((1, 3, 6, 1, 4, 1, 2338) + (0,12)).setObjects(("VERTICAL-STATION-MIB", "vStationChannelSlotNumber"), ("VERTICAL-STATION-MIB", "vStationChannelDeviceNumber"), ("VERTICAL-STATION-MIB", "vStationChannelIndex"))
if mibBuilder.loadTexts: vStationCannotPlayTone.setDescription(' This notification is sent when the specific channel cannot play tone. ')
vStationCannotCancelTone = NotificationType((1, 3, 6, 1, 4, 1, 2338) + (0,13)).setObjects(("VERTICAL-STATION-MIB", "vStationChannelSlotNumber"), ("VERTICAL-STATION-MIB", "vStationChannelDeviceNumber"), ("VERTICAL-STATION-MIB", "vStationChannelIndex"))
if mibBuilder.loadTexts: vStationCannotCancelTone.setDescription(' This notification is sent when the specific channel cannot cancel tone. ')
vStationCannotAttachDigitCollector = NotificationType((1, 3, 6, 1, 4, 1, 2338) + (0,14)).setObjects(("VERTICAL-STATION-MIB", "vStationChannelSlotNumber"), ("VERTICAL-STATION-MIB", "vStationChannelDeviceNumber"), ("VERTICAL-STATION-MIB", "vStationChannelIndex"))
if mibBuilder.loadTexts: vStationCannotAttachDigitCollector.setDescription(' This notification is sent when the specific channel cannot attach digits collected ')
vStationCannotReleaseDigitCollector = NotificationType((1, 3, 6, 1, 4, 1, 2338) + (0,15)).setObjects(("VERTICAL-STATION-MIB", "vStationChannelSlotNumber"), ("VERTICAL-STATION-MIB", "vStationChannelDeviceNumber"), ("VERTICAL-STATION-MIB", "vStationChannelIndex"))
if mibBuilder.loadTexts: vStationCannotReleaseDigitCollector.setDescription(' This notification is sent when the specific channel cannot release digits collected ')
vStationRECONFIG_COMPLETE = NotificationType((1, 3, 6, 1, 4, 1, 2338) + (0,16)).setLabel("vStationRECONFIG-COMPLETE").setObjects(("VERTICAL-STATION-MIB", "vStationChannelSlotNumber"), ("VERTICAL-STATION-MIB", "vStationChannelDeviceNumber"))
if mibBuilder.loadTexts: vStationRECONFIG_COMPLETE.setDescription(' This notification is sent when the specific station device successfully reads and incorporates the values from the registry.')
vStationRECONFIG_ERROR = NotificationType((1, 3, 6, 1, 4, 1, 2338) + (0,17)).setLabel("vStationRECONFIG-ERROR").setObjects(("VERTICAL-STATION-MIB", "vStationChannelSlotNumber"), ("VERTICAL-STATION-MIB", "vStationChannelDeviceNumber"))
if mibBuilder.loadTexts: vStationRECONFIG_ERROR.setDescription(' This notification is sent when the specific station device fails to incorporate the values read from the registry. ')
mibBuilder.exportSymbols("VERTICAL-STATION-MIB", vStationCardReadyLED=vStationCardReadyLED, vStationATTSystem25Group=vStationATTSystem25Group, vStationOffHookTimeout=vStationOffHookTimeout, vStationCannotReleaseDigitCollector=vStationCannotReleaseDigitCollector, vStationCardState=vStationCardState, vStationDeviceDeviceNumber=vStationDeviceDeviceNumber, vStationChannelOperationMode=vStationChannelOperationMode, vStationCannotAttachDigitCollector=vStationCannotAttachDigitCollector, vertical=vertical, vStationNumStationCards=vStationNumStationCards, vStationChannelCalledPartyNumber=vStationChannelCalledPartyNumber, vStationDeviceSlotNumber=vStationDeviceSlotNumber, vStationChannelChangePending=vStationChannelChangePending, vStationChannelIndex=vStationChannelIndex, vStationDigitTimeout2=vStationDigitTimeout2, vStationChannelEntry=vStationChannelEntry, vStationCommonGroup=vStationCommonGroup, vStationChannelSlotNumber=vStationChannelSlotNumber, vStationChannelTable=vStationChannelTable, vStationVMCallHandleOpcode=vStationVMCallHandleOpcode, vStationChannelMWIType=vStationChannelMWIType, vStationDeviceIfIndex=vStationDeviceIfIndex, vStationRECONFIG_ERROR=vStationRECONFIG_ERROR, vStationCannotPlayTone=vStationCannotPlayTone, vStationRECONFIG_COMPLETE=vStationRECONFIG_COMPLETE, vStationDeviceInterrupt=vStationDeviceInterrupt, vStationExternalDialDigit=vStationExternalDialDigit, vStationVMCallHandleDSTNumber=vStationVMCallHandleDSTNumber, vStationDeviceMVIPStartingChannel=vStationDeviceMVIPStartingChannel, vStationChannelCallingPartyNumber=vStationChannelCallingPartyNumber, vStationVMCallHandleEntry=vStationVMCallHandleEntry, vStationDigitTableGroup=vStationDigitTableGroup, vStationChannelChannelType=vStationChannelChannelType, vStationDigitString=vStationDigitString, vStationDigitCallType=vStationDigitCallType, vStationVMCallHandleType=vStationVMCallHandleType, vStationDeviceEnabled=vStationDeviceEnabled, vStationChannelDeviceNumber=vStationChannelDeviceNumber, vStationVMCallHandleTable=vStationVMCallHandleTable, vStationDigitMoreDigits=vStationDigitMoreDigits, vStationDigitStripDigits=vStationDigitStripDigits, vStationCardTable=vStationCardTable, vStationCardEntry=vStationCardEntry, vStationCardErrorLED=vStationCardErrorLED, vStationChannelState=vStationChannelState, vStationChannelCallState=vStationChannelCallState, vStationFirstDigitTable=vStationFirstDigitTable, vStationDigitIndex=vStationDigitIndex, vStationVMCallHandleIndex=vStationVMCallHandleIndex, vStationDeviceMVIPStream=vStationDeviceMVIPStream, vStationMWILampOFF=vStationMWILampOFF, vStationCannotCancelTone=vStationCannotCancelTone, vStationExtVoiceMailGroup=vStationExtVoiceMailGroup, vStationFirstDigitTimeout=vStationFirstDigitTimeout, vStationMWILampON=vStationMWILampON, vStationDigitTimeout=vStationDigitTimeout, vStationChannelType=vStationChannelType, vStationModule=vStationModule, vStationDeviceEntry=vStationDeviceEntry, vStationDeviceType=vStationDeviceType, vStationDeviceChangePending=vStationDeviceChangePending, vStationCardSlotNumber=vStationCardSlotNumber, vStationDeviceBaseIOAddress=vStationDeviceBaseIOAddress, vStationCardGroup=vStationCardGroup, vStationFirstDigitEntry=vStationFirstDigitEntry, vStationDeviceNumChannels=vStationDeviceNumChannels, vStationVMCallHandleSRCNumber=vStationVMCallHandleSRCNumber, vStationCardType=vStationCardType, vStationDeviceTable=vStationDeviceTable, vStationCardIOPortAddress=vStationCardIOPortAddress)
| 0 | 0 | 0 |
c8125af23734047c0b4eaab83b752429f4ed55f9 | 7,418 | py | Python | functions/lambda_finder/list_lambdas.py | radon-h2020/radon-function-lib | 2737e006a194021f2c8e4f793f9b99abda966175 | [
"MIT"
] | null | null | null | functions/lambda_finder/list_lambdas.py | radon-h2020/radon-function-lib | 2737e006a194021f2c8e4f793f9b99abda966175 | [
"MIT"
] | 4 | 2021-02-12T09:50:38.000Z | 2021-06-17T10:33:09.000Z | functions/lambda_finder/list_lambdas.py | radon-h2020/radon-function-lib | 2737e006a194021f2c8e4f793f9b99abda966175 | [
"MIT"
] | 2 | 2021-02-22T09:29:28.000Z | 2021-06-12T13:54:18.000Z | # MIT License
# Copyright (c) 2018 Epsagon
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# list_lambas by Epsagon, modified by zanderhavgaard
# https://github.com/epsagon/list-lambdas
# Enumerates Lambda functions from every region with interesting metadata
from __future__ import print_function
from datetime import datetime
import argparse
import codecs
import boto3
from boto3.session import Session
from botocore.exceptions import ClientError
DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S"
BYTE_TO_MB = 1024.0 * 1024.0
ALL_TABLE_HEADERS = [
"Region",
"Function",
"Memory (MB)",
"Code Size (MB)",
"Timeout (seconds)",
"Runtime",
"Description",
"Last Modified",
"Last Invocation",
]
SORT_KEYS = ["region", "last-modified", "last-invocation", "runtime"]
def list_available_lambda_regions():
"""
Enumerates list of all Lambda regions
:return: list of regions
"""
session = Session()
regions = session.get_available_regions("lambda")
# for some reason stockholm is not returned by the above call
if "eu-north-1" not in regions:
regions.append("eu-north-1")
return regions
def init_boto_client(client_name, region, args):
"""
Initiates boto's client object
:param client_name: client name
:param region: region name
:param args: arguments
:return: Client
"""
# if args.token_key_id and args.token_secret:
# boto_client = boto3.client(
# client_name,
# aws_access_key_id=args.token_key_id,
# aws_secret_access_key=args.token_secret,
# region_name=region,
# )
# elif args.profile:
# session = boto3.session.Session(profile_name=args.profile)
# boto_client = session.client(client_name, region_name=region)
# else:
# boto_client = boto3.client(client_name, region_name=region)
# TODO fix
boto_client = boto3.client(
client_name,
aws_access_key_id=args.token_key_id,
aws_secret_access_key=args.token_secret,
region_name=region,
)
return boto_client
def get_days_ago(datetime_obj):
"""
Converts a datetime object to "time ago" string
:param datetime_obj: Datetime
:return: "time ago" string
"""
days_ago = (datetime.now() - datetime_obj).days
datetime_str = "Today"
if days_ago == 1:
datetime_str = "Yesterday"
elif days_ago > 1:
datetime_str = "{0} days ago".format(days_ago)
return datetime_str
def get_last_invocation(region, args, function_name):
"""
Return last invocation timestamp (epoch) or -1 if not found.
-1 can be returned if no log group exists for Lambda,
or if there are no streams in the log.
:param region: function region
:param args: arguments
:param function_name: function name
:return: last invocation or -1
"""
logs_client = init_boto_client("logs", region, args)
last_invocation = -1
try:
logs = logs_client.describe_log_streams(
logGroupName="/aws/lambda/{0}".format(function_name), orderBy="LastEventTime", descending=True
)
except ClientError as _:
return last_invocation
log_streams_timestamp = [log.get("lastEventTimestamp", 0) for log in logs["logStreams"]]
if log_streams_timestamp:
last_invocation = max(log_streams_timestamp)
return last_invocation
def print_lambda_list(args):
"""
Main function
:return: None
"""
regions = list_available_lambda_regions()
lambdas_data = []
for region in regions:
lambda_client = init_boto_client("lambda", region, args)
next_marker = None
response = lambda_client.list_functions()
while next_marker != "":
next_marker = ""
functions = response["Functions"]
if not functions:
continue
for function_data in functions:
# Extract last modified time
last_modified = datetime.strptime(function_data["LastModified"].split(".")[0], DATETIME_FORMAT)
# Extract last invocation time from logs
last_invocation = get_last_invocation(region, args, function_data["FunctionName"])
if last_invocation != -1:
inactive_days = (datetime.now() - datetime.fromtimestamp(last_invocation / 1000)).days
if args.inactive_days_filter > inactive_days:
continue
lambdas_data.append(
{
"region": region,
"function-data": function_data,
"last-modified": last_modified,
"last-invocation": last_invocation,
"runtime": function_data["Runtime"],
}
)
# Verify if there is next marker
if "NextMarker" in response:
next_marker = response["NextMarker"]
response = lambda_client.list_functions(Marker=next_marker)
# Sort data by the given key (default: by region)
lambdas_data.sort(key=lambda x: x[args.sort_by])
# min_table_data, all_table_data = create_tables(lambdas_data, args)
formatted_lambda_data = format_lambda_data(lambdas_data, args)
return formatted_lambda_data
| 34.027523 | 111 | 0.653276 | # MIT License
# Copyright (c) 2018 Epsagon
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# list_lambas by Epsagon, modified by zanderhavgaard
# https://github.com/epsagon/list-lambdas
# Enumerates Lambda functions from every region with interesting metadata
from __future__ import print_function
from datetime import datetime
import argparse
import codecs
import boto3
from boto3.session import Session
from botocore.exceptions import ClientError
DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S"
BYTE_TO_MB = 1024.0 * 1024.0
ALL_TABLE_HEADERS = [
"Region",
"Function",
"Memory (MB)",
"Code Size (MB)",
"Timeout (seconds)",
"Runtime",
"Description",
"Last Modified",
"Last Invocation",
]
SORT_KEYS = ["region", "last-modified", "last-invocation", "runtime"]
def list_available_lambda_regions():
"""
Enumerates list of all Lambda regions
:return: list of regions
"""
session = Session()
regions = session.get_available_regions("lambda")
# for some reason stockholm is not returned by the above call
if "eu-north-1" not in regions:
regions.append("eu-north-1")
return regions
def init_boto_client(client_name, region, args):
"""
Initiates boto's client object
:param client_name: client name
:param region: region name
:param args: arguments
:return: Client
"""
# if args.token_key_id and args.token_secret:
# boto_client = boto3.client(
# client_name,
# aws_access_key_id=args.token_key_id,
# aws_secret_access_key=args.token_secret,
# region_name=region,
# )
# elif args.profile:
# session = boto3.session.Session(profile_name=args.profile)
# boto_client = session.client(client_name, region_name=region)
# else:
# boto_client = boto3.client(client_name, region_name=region)
# TODO fix
boto_client = boto3.client(
client_name,
aws_access_key_id=args.token_key_id,
aws_secret_access_key=args.token_secret,
region_name=region,
)
return boto_client
def get_days_ago(datetime_obj):
"""
Converts a datetime object to "time ago" string
:param datetime_obj: Datetime
:return: "time ago" string
"""
days_ago = (datetime.now() - datetime_obj).days
datetime_str = "Today"
if days_ago == 1:
datetime_str = "Yesterday"
elif days_ago > 1:
datetime_str = "{0} days ago".format(days_ago)
return datetime_str
def get_last_invocation(region, args, function_name):
"""
Return last invocation timestamp (epoch) or -1 if not found.
-1 can be returned if no log group exists for Lambda,
or if there are no streams in the log.
:param region: function region
:param args: arguments
:param function_name: function name
:return: last invocation or -1
"""
logs_client = init_boto_client("logs", region, args)
last_invocation = -1
try:
logs = logs_client.describe_log_streams(
logGroupName="/aws/lambda/{0}".format(function_name), orderBy="LastEventTime", descending=True
)
except ClientError as _:
return last_invocation
log_streams_timestamp = [log.get("lastEventTimestamp", 0) for log in logs["logStreams"]]
if log_streams_timestamp:
last_invocation = max(log_streams_timestamp)
return last_invocation
def format_lambda_data(lambdas_data, args):
formatted_data = []
for lambda_data in lambdas_data:
function_data = lambda_data["function-data"]
last_invocation = "N/A (no invocations?)"
if lambda_data["last-invocation"] != -1:
last_invocation = get_days_ago(datetime.fromtimestamp(lambda_data["last-invocation"] / 1000))
formatted_data.append(
{
"region": lambda_data["region"],
"function_name": str(function_data["FunctionName"]),
"function_memory": str(function_data["MemorySize"]),
"function_size": "%.2f" % (function_data["CodeSize"] / BYTE_TO_MB),
"function_timeout": str(function_data["Timeout"]),
"runtime": str(function_data["Runtime"]),
"function_description": function_data["Description"],
"last_invocation": get_days_ago(lambda_data["last-modified"]),
"num_invocations": last_invocation,
}
)
return formatted_data
def print_lambda_list(args):
"""
Main function
:return: None
"""
regions = list_available_lambda_regions()
lambdas_data = []
for region in regions:
lambda_client = init_boto_client("lambda", region, args)
next_marker = None
response = lambda_client.list_functions()
while next_marker != "":
next_marker = ""
functions = response["Functions"]
if not functions:
continue
for function_data in functions:
# Extract last modified time
last_modified = datetime.strptime(function_data["LastModified"].split(".")[0], DATETIME_FORMAT)
# Extract last invocation time from logs
last_invocation = get_last_invocation(region, args, function_data["FunctionName"])
if last_invocation != -1:
inactive_days = (datetime.now() - datetime.fromtimestamp(last_invocation / 1000)).days
if args.inactive_days_filter > inactive_days:
continue
lambdas_data.append(
{
"region": region,
"function-data": function_data,
"last-modified": last_modified,
"last-invocation": last_invocation,
"runtime": function_data["Runtime"],
}
)
# Verify if there is next marker
if "NextMarker" in response:
next_marker = response["NextMarker"]
response = lambda_client.list_functions(Marker=next_marker)
# Sort data by the given key (default: by region)
lambdas_data.sort(key=lambda x: x[args.sort_by])
# min_table_data, all_table_data = create_tables(lambdas_data, args)
formatted_lambda_data = format_lambda_data(lambdas_data, args)
return formatted_lambda_data
| 1,034 | 0 | 23 |
8169192c81ecd801e7aa24536d819be660bd10f4 | 3,188 | py | Python | upurs_usb_port/upload_to_upurs.py | kant/myelin-acorn-electron-hardware | 38fef0a38d005ae0f1b93f4a7da74b0e15f10c0d | [
"Apache-2.0"
] | null | null | null | upurs_usb_port/upload_to_upurs.py | kant/myelin-acorn-electron-hardware | 38fef0a38d005ae0f1b93f4a7da74b0e15f10c0d | [
"Apache-2.0"
] | null | null | null | upurs_usb_port/upload_to_upurs.py | kant/myelin-acorn-electron-hardware | 38fef0a38d005ae0f1b93f4a7da74b0e15f10c0d | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import glob
import gzip
import random
import serial
from serial.serialutil import SerialTimeoutException
from StringIO import StringIO
import sys
import time
import zipfile
print "Opening port"
USE_TIMEOUT=0
ser = serial.Serial(guess_port(), timeout=0, write_timeout=0.5 if USE_TIMEOUT else None)
decompress = False
fn = None
for arg in sys.argv[1:]:
if arg == '-d':
decompress = True
else:
fn = arg
if not decompress:
data = open(fn).read()
else:
data = None
# try loading a .uef out of a .zip
try:
zf = zipfile.ZipFile(fn)
for f in zf.namelist():
if f.endswith(".uef"):
print "found %s in zip" % f
data = zf.read(f)
print "read %d bytes from %s inside %s" % (len(data), f, fn)
break
except zipfile.BadZipfile:
print "not a zip file"
if data is None:
# not a zip or can't find a .uef in there
data = open(fn).read()
print "read %d bytes from %s" % (len(data), fn)
# try un-gzipping it
try:
data = gzip.GzipFile(fileobj=StringIO(data)).read()
print "after gunzipping: %d bytes" % len(data)
except IOError:
print "not gzipped"
print "Sending %s to port and verifying that it comes back" % fn
n_out = n_in = 0
received = []
n_retries = 0
print "Writing %d (%x) bytes" % (len(data), len(data))
for c in data:
while 1:
v = ord(c)
print "%02x %c" % (v, c if 32 < v < 127 else '.')
try:
n = ser.write(c)
except SerialTimeoutException:
n = 0
print n
if not USE_TIMEOUT: break
# try receiving
r = ser.read(1000)
if r:
print "RECEIVED", `r`
received.append(r)
if n: break # next char
time.sleep(0.01)
print "RETRY",
n_retries += 1
print "Waiting for final serial loopback"
start = time.time()
while (time.time() - start) < 0.5:
r = ser.read()
if not r:
time.sleep(0.1)
continue
# we got something, so reset the timeout
start = time.time()
print `r`
received.append(r)
print "ALL SENT"
received = ''.join(received)
print "This is what we received:"
print `received`
n = len(received)
print "%d (%x) bytes (%d missing). %d retries." % (n, n, len(data) - n, n_retries)
| 27.247863 | 117 | 0.6101 | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import glob
import gzip
import random
import serial
from serial.serialutil import SerialTimeoutException
from StringIO import StringIO
import sys
import time
import zipfile
def guess_port():
port = None
for pattern in "/dev/ttyACM? /dev/ttyUSB? /dev/tty.usbserial* /dev/tty.usbmodem* /dev/tty.wchusbserial*".split():
matches = glob.glob(pattern)
if matches:
return matches[0]
print "Opening port"
USE_TIMEOUT=0
ser = serial.Serial(guess_port(), timeout=0, write_timeout=0.5 if USE_TIMEOUT else None)
decompress = False
fn = None
for arg in sys.argv[1:]:
if arg == '-d':
decompress = True
else:
fn = arg
if not decompress:
data = open(fn).read()
else:
data = None
# try loading a .uef out of a .zip
try:
zf = zipfile.ZipFile(fn)
for f in zf.namelist():
if f.endswith(".uef"):
print "found %s in zip" % f
data = zf.read(f)
print "read %d bytes from %s inside %s" % (len(data), f, fn)
break
except zipfile.BadZipfile:
print "not a zip file"
if data is None:
# not a zip or can't find a .uef in there
data = open(fn).read()
print "read %d bytes from %s" % (len(data), fn)
# try un-gzipping it
try:
data = gzip.GzipFile(fileobj=StringIO(data)).read()
print "after gunzipping: %d bytes" % len(data)
except IOError:
print "not gzipped"
print "Sending %s to port and verifying that it comes back" % fn
n_out = n_in = 0
received = []
n_retries = 0
print "Writing %d (%x) bytes" % (len(data), len(data))
for c in data:
while 1:
v = ord(c)
print "%02x %c" % (v, c if 32 < v < 127 else '.')
try:
n = ser.write(c)
except SerialTimeoutException:
n = 0
print n
if not USE_TIMEOUT: break
# try receiving
r = ser.read(1000)
if r:
print "RECEIVED", `r`
received.append(r)
if n: break # next char
time.sleep(0.01)
print "RETRY",
n_retries += 1
print "Waiting for final serial loopback"
start = time.time()
while (time.time() - start) < 0.5:
r = ser.read()
if not r:
time.sleep(0.1)
continue
# we got something, so reset the timeout
start = time.time()
print `r`
received.append(r)
print "ALL SENT"
received = ''.join(received)
print "This is what we received:"
print `received`
n = len(received)
print "%d (%x) bytes (%d missing). %d retries." % (n, n, len(data) - n, n_retries)
| 217 | 0 | 23 |
3a4955569328dbcfd7e04048aa9f591781663725 | 5,531 | bzl | Python | tools/workspace/cc/repository.bzl | RobotLocomotion/drake-python3.7 | ae397a4c6985262d23e9675b9bf3927c08d027f5 | [
"BSD-3-Clause"
] | 2 | 2021-02-25T02:01:02.000Z | 2021-03-17T04:52:04.000Z | tools/workspace/cc/repository.bzl | RobotLocomotion/drake-python3.7 | ae397a4c6985262d23e9675b9bf3927c08d027f5 | [
"BSD-3-Clause"
] | null | null | null | tools/workspace/cc/repository.bzl | RobotLocomotion/drake-python3.7 | ae397a4c6985262d23e9675b9bf3927c08d027f5 | [
"BSD-3-Clause"
] | 1 | 2021-06-13T12:05:39.000Z | 2021-06-13T12:05:39.000Z | # -*- mode: python -*-
# vi: set ft=python :
"""
Identifies the C/C++ compiler by examining the presence or values of various
predefined C preprocessor macros. Identifies any compiler capable of compiling
C++ code that is supported by CMake 3.12.0.
Note that there are constraint_values @bazel_tools//tools/cpp:clang and
@bazel_tools//tools/cpp:gcc that could potentially distinguish between the
Clang and GCC compilers as an alternative to this approach, but as of Bazel
0.14.1, they appear not to be compatible with the autogenerated toolchain.
Example:
load("@drake//tools/workspace/cc:repository.bzl", "cc_repository")
cc_repository(name = "cc")
foo.bzl:
load("@cc//:compiler.bzl", "COMPILER_ID")
if "COMPILER_ID" == "AppleClang":
# Do something...
if "COMPILER_ID" == "Clang":
# Do something...
if "COMPILER_ID" == "GNU":
# Do something...
Argument:
name: A unique name for this rule.
"""
load("@bazel_tools//tools/cpp:unix_cc_configure.bzl", "find_cc")
load("@drake//tools/workspace:execute.bzl", "execute_or_fail")
cc_repository = repository_rule(
environ = [
"BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN",
"BAZEL_USE_CPP_ONLY_TOOLCHAIN",
"CC",
],
configure = True,
implementation = _impl,
)
| 32.535294 | 130 | 0.634786 | # -*- mode: python -*-
# vi: set ft=python :
"""
Identifies the C/C++ compiler by examining the presence or values of various
predefined C preprocessor macros. Identifies any compiler capable of compiling
C++ code that is supported by CMake 3.12.0.
Note that there are constraint_values @bazel_tools//tools/cpp:clang and
@bazel_tools//tools/cpp:gcc that could potentially distinguish between the
Clang and GCC compilers as an alternative to this approach, but as of Bazel
0.14.1, they appear not to be compatible with the autogenerated toolchain.
Example:
load("@drake//tools/workspace/cc:repository.bzl", "cc_repository")
cc_repository(name = "cc")
foo.bzl:
load("@cc//:compiler.bzl", "COMPILER_ID")
if "COMPILER_ID" == "AppleClang":
# Do something...
if "COMPILER_ID" == "Clang":
# Do something...
if "COMPILER_ID" == "GNU":
# Do something...
Argument:
name: A unique name for this rule.
"""
load("@bazel_tools//tools/cpp:unix_cc_configure.bzl", "find_cc")
load("@drake//tools/workspace:execute.bzl", "execute_or_fail")
def _impl(repository_ctx):
file_content = """# -*- python -*-
# DO NOT EDIT: generated by cc_repository()
# This file exists to make our directory into a Bazel package, so that our
# neighboring *.bzl file can be loaded elsewhere.
"""
repository_ctx.file(
"BUILD.bazel",
content = file_content,
executable = False,
)
# https://github.com/bazelbuild/bazel/blob/1.1.0/tools/cpp/cc_configure.bzl
if repository_ctx.os.environ.get("BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN", "0") == "1": # noqa
fail("Could NOT identify C/C++ compiler because CROSSTOOL is empty.")
if repository_ctx.os.name == "mac os x" and repository_ctx.os.environ.get("BAZEL_USE_CPP_ONLY_TOOLCHAIN", "0") != "1": # noqa
# https://github.com/bazelbuild/bazel/blob/1.1.0/tools/cpp/osx_cc_configure.bzl
cc = repository_ctx.path(Label("@local_config_cc//:wrapped_clang"))
result = execute_or_fail(repository_ctx, [
"xcode-select",
"--print-path",
])
developer_dir = result.stdout.strip()
result = execute_or_fail(repository_ctx, [
"xcrun",
"--show-sdk-path",
])
sdkroot = result.stdout.strip()
cc_environment = {
"DEVELOPER_DIR": developer_dir,
"SDKROOT": sdkroot,
}
else:
# https://github.com/bazelbuild/bazel/blob/1.1.0/tools/cpp/unix_cc_configure.bzl
cc = find_cc(repository_ctx, overriden_tools = {})
cc_environment = {}
executable = repository_ctx.path("identify_compiler")
execute_or_fail(repository_ctx, [
cc,
repository_ctx.path(
Label("@drake//tools/workspace/cc:identify_compiler.cc"),
),
"-o",
executable,
], environment = cc_environment)
result = execute_or_fail(repository_ctx, [executable])
output = result.stdout.strip().split(" ")
if len(output) != 3:
fail("Could NOT identify C/C++ compiler.")
compiler_id = output[0]
if repository_ctx.os.name == "mac os x":
supported_compilers = ["AppleClang"]
else:
supported_compilers = ["Clang", "GNU"]
# We do not fail outright here since even though we do not officially
# support them, Drake may happily compile with new enough versions of
# compilers that are compatible with GNU flags such as -std=c++17.
if compiler_id not in supported_compilers:
print("WARNING: {} is NOT a supported C/C++ compiler.".format(
compiler_id,
))
print("WARNING: Compilation of the drake WORKSPACE may fail.")
compiler_version_major = int(output[1])
compiler_version_minor = int(output[2])
# The minimum compiler versions should match those listed in both the root
# CMakeLists.txt and doc/developers.rst. We know from experience that
# compilation of Drake will certainly fail with versions lower than these,
# even if they happen to support the necessary compiler flags.
if compiler_id == "AppleClang":
if compiler_version_major < 12:
fail("AppleClang compiler version {}.{} is less than 12.0.".format(
compiler_version_major,
compiler_version_minor,
))
elif compiler_id == "Clang":
if compiler_version_major < 9:
fail("Clang compiler version {}.{} is less than 9.0".format(
compiler_version_major,
compiler_version_minor,
))
elif compiler_id == "GNU":
if compiler_version_major < 7 or (compiler_version_major == 7 and
compiler_version_minor < 5):
fail("GNU compiler version {}.{} is less than 7.5.".format(
compiler_version_major,
compiler_version_minor,
))
file_content = """# -*- python -*-
# DO NOT EDIT: generated by cc_repository()
COMPILER_ID = "{}"
COMPILER_VERSION_MAJOR = {}
COMPILER_VERSION_MINOR = {}
""".format(compiler_id, compiler_version_major, compiler_version_minor)
repository_ctx.file(
"compiler.bzl",
content = file_content,
executable = False,
)
cc_repository = repository_rule(
environ = [
"BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN",
"BAZEL_USE_CPP_ONLY_TOOLCHAIN",
"CC",
],
configure = True,
implementation = _impl,
)
| 4,177 | 0 | 23 |
ac8d55b4a4631b006db1221c62270ff9ddabfe32 | 3,107 | py | Python | src/clipper/misc.py | Sheer-Curiosity/vcdl2 | 95e3c750934df08022f5651e714e24c4d10700d7 | [
"MIT"
] | null | null | null | src/clipper/misc.py | Sheer-Curiosity/vcdl2 | 95e3c750934df08022f5651e714e24c4d10700d7 | [
"MIT"
] | null | null | null | src/clipper/misc.py | Sheer-Curiosity/vcdl2 | 95e3c750934df08022f5651e714e24c4d10700d7 | [
"MIT"
] | null | null | null | import os
# There is 100% a better way to do some of the logic in this function, but I really really do not care. | 29.037383 | 104 | 0.639846 | import os
def formatTimestamp(inputTimestamp: list):
tempTimestamp = inputTimestamp
for idx, entry in enumerate(inputTimestamp):
if len(str(entry)) < 2:
tempTimestamp[idx] = f"0{str(entry)}"
return f"{tempTimestamp[0]}:{tempTimestamp[1]}:{tempTimestamp[2]}.{tempTimestamp[3]}"
def calculatePadding(timestampPair: str, paddingInt: int):
stamps = timestampPair.split('-')
startTs = []
endTs = []
for st in stamps[0].split(':'):
startTs.append(int(st))
for st in stamps[1].split(':'):
endTs.append(int(st))
if len(startTs) == 2:
startTs.insert(0, 0)
while startTs[1] >= 60:
startTs[1] -= 60
startTs[0] += 1
if len(endTs) == 2:
endTs.insert(0, 0)
while endTs[1] >= 60:
endTs[1] -= 60
endTs[0] += 1
if startTs[2] < paddingInt and startTs[1] == 0 and startTs[0] == 0:
startTs[2] = 0
else:
startTs[2] -= paddingInt
endTs[2] += paddingInt
if startTs[2] < 0:
startTs[2] += 60
startTs[1] -= 1
if startTs[1] < 0:
startTs[1] += 60
startTs[0] -= 1
if endTs[2] >= 60:
endTs[2] -= 60
endTs[1] += 1
if endTs[1] >= 60:
endTs[1] -= 60
endTs[0] += 1
startTs.append(0)
endTs.append(0)
return[startTs, endTs]
# There is 100% a better way to do some of the logic in this function, but I really really do not care.
def parseTimestamps(timestampsInput: str, numVideoLinks: int, timePadding: int):
initSplitList = timestampsInput.split(',')
tsList = []
if numVideoLinks > 1:
quit()
else:
paddedTs = []
for i in initSplitList:
tsList.append(i.strip('[]'))
for ts in tsList:
for out in calculatePadding(ts, timePadding):
paddedTs.append(out)
for idx, val in enumerate(paddedTs):
if idx < (len(paddedTs)-1) and idx % 2 != 0:
if paddedTs[idx+1][0] <= paddedTs[idx][0]:
if paddedTs[idx+1][1] <= paddedTs[idx][1]:
if paddedTs[idx+1][2] <= paddedTs[idx][2]:
if paddedTs[idx+1][3] <= paddedTs[idx][3]:
paddedTs[idx+1] = 'OVERLAP'
paddedTs[idx] = 'OVERLAP'
print(f"[TIMESTAMPS]: Post-buffer duration overlap found, combining clips {idx-1} and {idx}")
while 'OVERLAP' in paddedTs:
paddedTs.remove('OVERLAP')
startStamps = []
endStamps = []
runtimeStamps = []
for idx2, val2 in enumerate(paddedTs):
if idx2 % 2 != 0:
endStamps.append(val2)
else:
startStamps.append(val2)
for idx3, val3 in enumerate(startStamps):
rtList = []
for x in range(0, 4):
rtList.append(endStamps[idx3][x] - val3[x])
if rtList[2] < 0:
rtList[2] += 60
rtList[1] -= 1
if rtList[1] < 0:
rtList[1] += 60
rtList[0] += 1
runtimeStamps.append(rtList)
for idx4, stmp in enumerate(startStamps):
startStamps[idx4] = formatTimestamp(stmp)
runtimeStamps[idx4] = formatTimestamp(runtimeStamps[idx4])
return startStamps, runtimeStamps
def cleanup():
tempdirFiles = [f for f in os.listdir('./vcdl_temp') if os.path.isfile(os.path.join('./vcdl_temp', f))]
print('[CLEANUP]: Clearing temp directory...')
for file in tempdirFiles:
print(f"[CLEANUP]: Removing file - {file}\r", end='')
os.remove(f"./vcdl_temp/{file}")
print()
os.rmdir('./vcdl_temp') | 2,902 | 0 | 91 |
20f2ff495097925fa6646d54d54484545bcd61c0 | 494 | py | Python | solved/q406.py | zao95/codingdojang-zao95-solving | 409d1696700b453b183947d7bda8abcbbfe95bc5 | [
"WTFPL"
] | 2 | 2021-01-06T14:02:14.000Z | 2022-02-19T17:38:16.000Z | solved/q406.py | zao95/codingdojang-zao95-solving | 409d1696700b453b183947d7bda8abcbbfe95bc5 | [
"WTFPL"
] | null | null | null | solved/q406.py | zao95/codingdojang-zao95-solving | 409d1696700b453b183947d7bda8abcbbfe95bc5 | [
"WTFPL"
] | null | null | null | # 코딩도장 문제풀이
# Question number. 406
# 게시판 페이징
# http://codingdojang.com/scode/406
# Author: Lee Jeongwoo
# Github name: zao95
# ========== Question ==========
# A씨는 게시판 프로그램을 작성하고 있다.
# A씨는 게시물의 총 건수와 한 페이지에 보여줄 게시물수를 입력으로 주었을 때 총 페이지수를 리턴하는 프로그램이 필요하다고 한다.
# 입력 : 총건수(m), 한페이지에 보여줄 게시물수(n) (단 n은 1보다 크거나 같다. n >= 1)
# 출력 : 총페이지수
# A씨가 필요한 프로그램을 작성하시오.
# ==============================
import math
m = int(input('총건수: '))
n = int(input('한페이지에 보여줄 게시물수: '))
ans = math.ceil(m/n)
print(ans) | 19.76 | 73 | 0.582996 | # 코딩도장 문제풀이
# Question number. 406
# 게시판 페이징
# http://codingdojang.com/scode/406
# Author: Lee Jeongwoo
# Github name: zao95
# ========== Question ==========
# A씨는 게시판 프로그램을 작성하고 있다.
# A씨는 게시물의 총 건수와 한 페이지에 보여줄 게시물수를 입력으로 주었을 때 총 페이지수를 리턴하는 프로그램이 필요하다고 한다.
# 입력 : 총건수(m), 한페이지에 보여줄 게시물수(n) (단 n은 1보다 크거나 같다. n >= 1)
# 출력 : 총페이지수
# A씨가 필요한 프로그램을 작성하시오.
# ==============================
import math
m = int(input('총건수: '))
n = int(input('한페이지에 보여줄 게시물수: '))
ans = math.ceil(m/n)
print(ans) | 0 | 0 | 0 |
b3d6ba8f4d2a38b761b1ccf8a43ed317164b9b13 | 1,532 | py | Python | trainings/migrations/0002_historicaltopic.py | pnwclw/cyfmazyr | dfeca513c7334335426d226ec3834af598b08b8c | [
"MIT"
] | 1 | 2020-07-18T11:20:29.000Z | 2020-07-18T11:20:29.000Z | trainings/migrations/0002_historicaltopic.py | panwaclaw/cyfmazyr | dfeca513c7334335426d226ec3834af598b08b8c | [
"MIT"
] | 8 | 2020-05-24T14:08:12.000Z | 2021-09-08T02:03:52.000Z | trainings/migrations/0002_historicaltopic.py | pnwclw/cyfmazyr | dfeca513c7334335426d226ec3834af598b08b8c | [
"MIT"
] | 1 | 2020-05-24T12:24:40.000Z | 2020-05-24T12:24:40.000Z | # Generated by Django 3.0.6 on 2020-05-24 00:32
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import simple_history.models
| 41.405405 | 156 | 0.60705 | # Generated by Django 3.0.6 on 2020-05-24 00:32
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import simple_history.models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('trainings', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='HistoricalTopic',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('title', models.CharField(max_length=256)),
('order', models.PositiveIntegerField(default=0)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical topic',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
]
| 0 | 1,323 | 23 |
26de0ef6a7751c4dbc3cbe6d80b62e307586fbc9 | 3,095 | py | Python | numba/tests/test_enums.py | rs2/numba | ee78bfe3e66439197905551a451ea264704a3cdd | [
"BSD-2-Clause"
] | 4 | 2017-06-30T14:22:30.000Z | 2021-01-11T16:47:23.000Z | numba/tests/test_enums.py | rs2/numba | ee78bfe3e66439197905551a451ea264704a3cdd | [
"BSD-2-Clause"
] | 1 | 2017-12-21T23:31:59.000Z | 2017-12-29T16:56:05.000Z | numba/tests/test_enums.py | anton-malakhov/numba | 2e6c224c88774d03a99ad9c1ec4dacf1d4dd7ac7 | [
"BSD-2-Clause"
] | null | null | null | """
Tests for enum support.
"""
from __future__ import print_function
import enum
import numba.unittest_support as unittest
from numba import jit
from .support import TestCase, tag
from .enum_usecases import *
class TestEnum(BaseEnumTest, TestCase):
"""
Tests for Enum classes and members.
"""
values = [Color.red, Color.green]
pairs = [
(Color.red, Color.red),
(Color.red, Color.green),
(Shake.mint, Shake.vanilla),
(Planet.VENUS, Planet.MARS),
(Planet.EARTH, Planet.EARTH),
]
def test_identity(self):
"""
Enum with equal values should not compare identical
"""
pyfunc = identity_usecase
cfunc = jit(nopython=True)(pyfunc)
args = (Color.blue, Color.green, Shape.square)
self.assertPreciseEqual(pyfunc(*args), cfunc(*args))
class TestIntEnum(BaseEnumTest, TestCase):
"""
Tests for IntEnum classes and members.
"""
values = [Shape.circle, Shape.square]
pairs = [
(Shape.circle, Shape.circle),
(Shape.circle, Shape.square),
(RequestError.not_found, RequestError.not_found),
(RequestError.internal_error, RequestError.not_found),
]
if __name__ == '__main__':
unittest.main()
| 24.959677 | 74 | 0.627141 | """
Tests for enum support.
"""
from __future__ import print_function
import enum
import numba.unittest_support as unittest
from numba import jit
from .support import TestCase, tag
from .enum_usecases import *
def compare_usecase(a, b):
return a == b, a != b, a is b, a is not b
def global_usecase(a):
# Lookup of a enum member on its class
return a is Color.red
def identity_usecase(a, b, c):
return (a is Shake.mint,
b is Shape.circle,
c is RequestError.internal_error,
)
def make_constant_usecase(const):
def constant_usecase(a):
return a is const
return constant_usecase
def return_usecase(a, b, pred):
return a if pred else b
def int_coerce_usecase(x):
# Implicit coercion of intenums to ints
if x > RequestError.internal_error:
return x - RequestError.not_found
else:
return x + Shape.circle
class BaseEnumTest(object):
def test_compare(self):
pyfunc = compare_usecase
cfunc = jit(nopython=True)(pyfunc)
for args in self.pairs:
self.assertPreciseEqual(pyfunc(*args), cfunc(*args))
def test_return(self):
"""
Passing and returning enum members.
"""
pyfunc = return_usecase
cfunc = jit(nopython=True)(pyfunc)
for pair in self.pairs:
for pred in (True, False):
args = pair + (pred,)
self.assertIs(pyfunc(*args), cfunc(*args))
def check_constant_usecase(self, pyfunc):
cfunc = jit(nopython=True)(pyfunc)
for arg in self.values:
self.assertPreciseEqual(pyfunc(arg), cfunc(arg))
def test_constant(self):
self.check_constant_usecase(global_usecase)
self.check_constant_usecase(make_constant_usecase(self.values[0]))
class TestEnum(BaseEnumTest, TestCase):
"""
Tests for Enum classes and members.
"""
values = [Color.red, Color.green]
pairs = [
(Color.red, Color.red),
(Color.red, Color.green),
(Shake.mint, Shake.vanilla),
(Planet.VENUS, Planet.MARS),
(Planet.EARTH, Planet.EARTH),
]
def test_identity(self):
"""
Enum with equal values should not compare identical
"""
pyfunc = identity_usecase
cfunc = jit(nopython=True)(pyfunc)
args = (Color.blue, Color.green, Shape.square)
self.assertPreciseEqual(pyfunc(*args), cfunc(*args))
class TestIntEnum(BaseEnumTest, TestCase):
"""
Tests for IntEnum classes and members.
"""
values = [Shape.circle, Shape.square]
pairs = [
(Shape.circle, Shape.circle),
(Shape.circle, Shape.square),
(RequestError.not_found, RequestError.not_found),
(RequestError.internal_error, RequestError.not_found),
]
def test_int_coerce(self):
pyfunc = int_coerce_usecase
cfunc = jit(nopython=True)(pyfunc)
for arg in [300, 450, 550]:
self.assertPreciseEqual(pyfunc(arg), cfunc(arg))
if __name__ == '__main__':
unittest.main()
| 1,201 | 427 | 188 |
9bc4ae0eafe6d50697025168f1947b51dec6320f | 1,186 | py | Python | source/python/Deque.py | JoHyukJun/algorithm-analysis | 3eda22ce0eeb52490702206d73c04cff1eb3e72d | [
"Apache-2.0"
] | null | null | null | source/python/Deque.py | JoHyukJun/algorithm-analysis | 3eda22ce0eeb52490702206d73c04cff1eb3e72d | [
"Apache-2.0"
] | null | null | null | source/python/Deque.py | JoHyukJun/algorithm-analysis | 3eda22ce0eeb52490702206d73c04cff1eb3e72d | [
"Apache-2.0"
] | null | null | null | '''
main.py
Created by Jo Hyuk Jun on 2020
Copyright © 2020 Jo Hyuk Jun. All rights reserved.
'''
import sys
n = int(sys.stdin.readline().rstrip())
cmd = []
deque = []
for _ in range(n):
cmd.append(list(map(str, sys.stdin.readline().rstrip().split(' '))))
for i in range(n):
if (cmd[i][0] == 'push_front'):
deque.insert(0, cmd[i][1])
elif (cmd[i][0] == 'push_back'):
deque.append(cmd[i][1])
elif (cmd[i][0] == 'pop_front'):
if (len(deque) == 0):
print(-1)
else:
print(deque[0])
deque.pop(0)
elif (cmd[i][0] == 'pop_back'):
if (len(deque) == 0):
print(-1)
else:
print(deque[-1])
deque.pop()
elif (cmd[i][0] == 'size'):
print(len(deque))
elif (cmd[i][0] == 'empty'):
if (len(deque) == 0):
print(1)
else:
print(0)
elif (cmd[i][0] == 'front'):
if (len(deque) == 0):
print(-1)
else:
print(deque[0])
elif (cmd[i][0] == 'back'):
if (len(deque) == 0):
print(-1)
else:
print(deque[-1]) | 21.563636 | 72 | 0.441821 | '''
main.py
Created by Jo Hyuk Jun on 2020
Copyright © 2020 Jo Hyuk Jun. All rights reserved.
'''
import sys
n = int(sys.stdin.readline().rstrip())
cmd = []
deque = []
for _ in range(n):
cmd.append(list(map(str, sys.stdin.readline().rstrip().split(' '))))
for i in range(n):
if (cmd[i][0] == 'push_front'):
deque.insert(0, cmd[i][1])
elif (cmd[i][0] == 'push_back'):
deque.append(cmd[i][1])
elif (cmd[i][0] == 'pop_front'):
if (len(deque) == 0):
print(-1)
else:
print(deque[0])
deque.pop(0)
elif (cmd[i][0] == 'pop_back'):
if (len(deque) == 0):
print(-1)
else:
print(deque[-1])
deque.pop()
elif (cmd[i][0] == 'size'):
print(len(deque))
elif (cmd[i][0] == 'empty'):
if (len(deque) == 0):
print(1)
else:
print(0)
elif (cmd[i][0] == 'front'):
if (len(deque) == 0):
print(-1)
else:
print(deque[0])
elif (cmd[i][0] == 'back'):
if (len(deque) == 0):
print(-1)
else:
print(deque[-1]) | 0 | 0 | 0 |
231943aae2567eb84cdccf69cb0d6a85b0191995 | 4,038 | py | Python | cinder/tests/scheduler/test_host_filters.py | Thingee/cinder | 721e657073d73d639619f839d935a463d32b59b5 | [
"Apache-2.0"
] | 1 | 2015-11-25T10:18:28.000Z | 2015-11-25T10:18:28.000Z | cinder/tests/scheduler/test_host_filters.py | NeCTAR-RC/cinder | e01da23febc530de218ed8eed6737add150c1587 | [
"Apache-2.0"
] | null | null | null | cinder/tests/scheduler/test_host_filters.py | NeCTAR-RC/cinder | e01da23febc530de218ed8eed6737add150c1587 | [
"Apache-2.0"
] | null | null | null | # Copyright 2011 OpenStack Foundation # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests For Scheduler Host Filters.
"""
import mock
from cinder import context
from cinder.openstack.common import jsonutils
from cinder.openstack.common.scheduler import filters
from cinder import test
from cinder.tests.scheduler import fakes
class HostFiltersTestCase(test.TestCase):
"""Test case for host filters."""
@mock.patch('cinder.utils.service_is_up')
@mock.patch('cinder.utils.service_is_up')
@mock.patch('cinder.utils.service_is_up')
@mock.patch('cinder.utils.service_is_up')
| 43.419355 | 78 | 0.610451 | # Copyright 2011 OpenStack Foundation # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests For Scheduler Host Filters.
"""
import mock
from cinder import context
from cinder.openstack.common import jsonutils
from cinder.openstack.common.scheduler import filters
from cinder import test
from cinder.tests.scheduler import fakes
class HostFiltersTestCase(test.TestCase):
"""Test case for host filters."""
def setUp(self):
super(HostFiltersTestCase, self).setUp()
self.context = context.RequestContext('fake', 'fake')
self.json_query = jsonutils.dumps(
['and',
['>=', '$free_capacity_gb', 1024],
['>=', '$total_capacity_gb', 10 * 1024]])
# This has a side effect of testing 'get_filter_classes'
# when specifying a method (in this case, our standard filters)
filter_handler = filters.HostFilterHandler('cinder.scheduler.filters')
classes = filter_handler.get_all_classes()
self.class_map = {}
for cls in classes:
self.class_map[cls.__name__] = cls
@mock.patch('cinder.utils.service_is_up')
def test_capacity_filter_passes(self, _mock_serv_is_up):
_mock_serv_is_up.return_value = True
filt_cls = self.class_map['CapacityFilter']()
filter_properties = {'size': 100}
service = {'disabled': False}
host = fakes.FakeHostState('host1',
{'free_capacity_gb': 200,
'updated_at': None,
'service': service})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
@mock.patch('cinder.utils.service_is_up')
def test_capacity_filter_fails(self, _mock_serv_is_up):
_mock_serv_is_up.return_value = True
filt_cls = self.class_map['CapacityFilter']()
filter_properties = {'size': 100}
service = {'disabled': False}
host = fakes.FakeHostState('host1',
{'free_capacity_gb': 120,
'reserved_percentage': 20,
'updated_at': None,
'service': service})
self.assertFalse(filt_cls.host_passes(host, filter_properties))
@mock.patch('cinder.utils.service_is_up')
def test_capacity_filter_passes_infinite(self, _mock_serv_is_up):
_mock_serv_is_up.return_value = True
filt_cls = self.class_map['CapacityFilter']()
filter_properties = {'size': 100}
service = {'disabled': False}
host = fakes.FakeHostState('host1',
{'free_capacity_gb': 'infinite',
'updated_at': None,
'service': service})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
@mock.patch('cinder.utils.service_is_up')
def test_capacity_filter_passes_unknown(self, _mock_serv_is_up):
_mock_serv_is_up.return_value = True
filt_cls = self.class_map['CapacityFilter']()
filter_properties = {'size': 100}
service = {'disabled': False}
host = fakes.FakeHostState('host1',
{'free_capacity_gb': 'unknown',
'updated_at': None,
'service': service})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
| 2,753 | 0 | 131 |
3cd563272852f18a189a12f8188b8f91d1bab57d | 1,330 | py | Python | day001-014/day005/day-5-1-exercise.py | sysarghir/100-days-of-python | 8edc7ce102429333d4abd80ac3f4067e4028c66d | [
"MIT"
] | null | null | null | day001-014/day005/day-5-1-exercise.py | sysarghir/100-days-of-python | 8edc7ce102429333d4abd80ac3f4067e4028c66d | [
"MIT"
] | null | null | null | day001-014/day005/day-5-1-exercise.py | sysarghir/100-days-of-python | 8edc7ce102429333d4abd80ac3f4067e4028c66d | [
"MIT"
] | null | null | null | # Average Height
# Instructions
# You are going to write a program that calculates the average student height from a List of heights.
# e.g. `student_heights = [180, 124, 165, 173, 189, 169, 146]`
# The average height can be calculated by adding all the heights together and dividing by the total number of heights.
# e.g.
# 180 + 124 + 165 + 173 + 189 + 169 + 146 = **1146**
# There are a total of **7** heights in `student_heights`
# 1146 ÷ 7 = **163.71428571428572**
# Average height rounded to the nearest whole number = **164**
# **Important** You should not use the `sum()` or `len()` functions in your answer. You should try to replicate their functionality using what you have learnt about for loops.
# Example Input
# ```
# 156 178 165 171 187
# ```
# In this case, student_heights would be a list that looks like: [156, 178, 165, 171, 187]
# Example Output
# ```
# 171
# ```
#
#
# 🚨 Don't change the code below 👇
student_heights = input("Input a list of student heights ").split()
for n in range(0, len(student_heights)):
student_heights[n] = int(student_heights[n])
# 🚨 Don't change the code above 👆
#Write your code below this row 👇
sum = 0
num = 0
for student in student_heights:
num += 1
sum += student
average = round(sum / num)
print(average)
| 28.297872 | 176 | 0.661654 | # Average Height
# Instructions
# You are going to write a program that calculates the average student height from a List of heights.
# e.g. `student_heights = [180, 124, 165, 173, 189, 169, 146]`
# The average height can be calculated by adding all the heights together and dividing by the total number of heights.
# e.g.
# 180 + 124 + 165 + 173 + 189 + 169 + 146 = **1146**
# There are a total of **7** heights in `student_heights`
# 1146 ÷ 7 = **163.71428571428572**
# Average height rounded to the nearest whole number = **164**
# **Important** You should not use the `sum()` or `len()` functions in your answer. You should try to replicate their functionality using what you have learnt about for loops.
# Example Input
# ```
# 156 178 165 171 187
# ```
# In this case, student_heights would be a list that looks like: [156, 178, 165, 171, 187]
# Example Output
# ```
# 171
# ```
#
#
# 🚨 Don't change the code below 👇
student_heights = input("Input a list of student heights ").split()
for n in range(0, len(student_heights)):
student_heights[n] = int(student_heights[n])
# 🚨 Don't change the code above 👆
#Write your code below this row 👇
sum = 0
num = 0
for student in student_heights:
num += 1
sum += student
average = round(sum / num)
print(average)
| 0 | 0 | 0 |
3ff0f7ca13cf83cd7659cdf2a61fdb4cd161d57c | 89,955 | py | Python | punkemon v-0-0-6.py | mgavrin/Punkemon | cf7831021ef7bb977a65f61f93e029080ef26ff8 | [
"MIT"
] | null | null | null | punkemon v-0-0-6.py | mgavrin/Punkemon | cf7831021ef7bb977a65f61f93e029080ef26ff8 | [
"MIT"
] | null | null | null | punkemon v-0-0-6.py | mgavrin/Punkemon | cf7831021ef7bb977a65f61f93e029080ef26ff8 | [
"MIT"
] | null | null | null | ################################# PUNKEMON YO!!!!! ###############################
########## Imports from elsewhere
import pygame
from pygame.locals import *
import random
from random import *
import math
from math import *
import os
import string
fontLoaderCode=open("menu sprite code.py")
exec(fontLoaderCode.read())
battlesCode=open("Battles.py")
exec(battlesCode)
monCode=open("Mons.py")
exec(monCode.read())
worldCode=open("World.py")
exec(worldCode.read())
######### this thing
fakeAPress=pygame.event.Event(KEYDOWN,{"key":K_a})
fakeBPress=pygame.event.Event(KEYDOWN,{"key":K_s})
fakeUpPress=pygame.event.Event(KEYDOWN,{"key":K_UP})
fakeDownPress=pygame.event.Event(KEYDOWN,{"key":K_DOWN})
fakeLeftPress=pygame.event.Event(KEYDOWN,{"key":K_LEFT})
fakeRightPress=pygame.event.Event(KEYDOWN,{"key":K_RIGHT})
###################### Action Items for future work sessions #############
###bugfixes
#stop duplicate revive effect messages.
#add null messages so you can take out HP notifications without a no-messages error
###feature expansions
#numerical HP display
#XP to next level display
#menus in different places on the screen?
#write the catch formula
#expand on start menu, detail team screen and wikidex
#make the team overview a special menu subclass
#choice menu widths (what is this? I was too vague)
#holding down buttons
###Ambitious stuff!
#background music (less ambitious than frivolous but still)
#make a level generator!
#do the sprite work for battles (requires an Artist)
#saving and loading games
############Story notes muthafuckas!
###The first pokemon pusher (local equivalent of pokemart) is named Marty. The others are named increasingly weird spellings of Marty.
###The Martys get their shit from 3d printers
###
###Pokedex and PC system belong to Typha's university; she sneaks you access and
###charges you to store mons above the party five.
###Dex is Typha's thesis project; you and rival are collecting data for her.
###Rival's and your seen-punkemon/caught-punkemon appear in the same dex with a mark for who caught them;
###contest between you and Rival to get the most--for <s>SCIENCE</s> bragging rights!
###"Oak: "Now's not the time for that!""-->"Typha: "What are you smoking? You can't use that now!""
###
###Gym type-->city name:normal=granite, fire=citrine, water=sapphire, grass=peridot, poison=amethyst,
###electric=topaz, psychic=alexandrite, dragon=opal, ice=diamond, flying=lapis,
###ground=axinite, rock=geode, bug=emerald, fighting=stibnite, ghost=obsidian
###some random other town: malachite
###town you start out in: pearl
###4 are mandatory and then pick 4 of the other 11
###
###one of the gyms is in the middle of nowhere by the side of the route,
###in a place too small to show up on your town map. The gym leader is a hermit.
###Maybe this is the ground gym or the fighting gym or something.
###
###Electric gym is locked with a sign saying "gone to grocery store, back in 15 minutes" (and pointer npc near gym)
###back of note has grocery list, which you automatically take on second reading
###go to grocery store, gym leader is trying to remember what he wanted
###give him list, he runs off and reappears at gym
###if you only read it once, or never read it, he comes back 20 minutes after you enter town
###if you helped him find his list, "thanks for helping me! I got the stuff to make my awesome barbeque!"
###if he came back on timer, "Aw man! I forgot the stuff I needed to make my awesome barbeque!"
###either way: but that won't stop me from FRYING YOU ALIVE!" -->Fight
###dex puzzle inside electric gym: visible live wires you have to navigate around in a maze,
###accidentally walk into one and you get knocked out and wake up at maze start
###
###Fire gym leader is Azula, Water gym leader is Akula, they have the same sprite
###
###Cities are named after birthstones, one of them is Alexandrite and it has a big library
###Typha is in the library at a conference and sends you to go get a book
###And then maybe she gives you the Master Ball her advisor's been developing
###
###The optional hometown (late-unlock) gym leader is your mom (credit to imzoruaok from tumblr)
###If your reputation stat is too low, you auto-lose and are Grounded Forever and have to reload your last save.
###five or six trainers along the way make "Your mom" jokes and one or two of them are shit like "you fight like your mom" that turn out to be true
################### Global functions
################### Menu stuff ############################
allMenus=[]
########## Incidental menu-class functions that get run during menus at some point in the game. The execOnAs and execOnSs of various menus.
########## These will probably proliferate. It's alright. Didn't Tom Lehrer tell you? Proliferation is the word of the day.
########## Battle menus: simplified menus for use in battle.
########## Currently used to display most of the available battle information, because graphics aren't done yet.
#################### Puttin' on the MOVES ###################
moveCode=open("Moves.py")
exec(moveCode.read())
#don't forget to INIT IT!
typeDict=typeMatchup()
############ People (player and other trainers and NPCs)
##############Items ##############################
itemDict={}
potion=item("Potion",item.usePotion,item.potionLegal,True,True)
superPotion=item("Super potion",item.useSuperPotion,item.potionLegal,True,True)
repel=item("Repel",item.useRepel,item.repelLegal,False,False)
revive=item("Revive",item.useRevive,item.reviveLegal,True,True)
punkeball=item("Punkeball",item.usePunkeball,item.ballLegal,True,False)
#more balls
waterStone=item("Water stone",item.useWaterStone,item.waterStoneLegal,True,True)
fireStone=item("Fire stone",item.useFireStone,item.fireStoneLegal,True,True)
thunderStone=item("Thunder stone",item.useThunderStone,item.thunderStoneLegal,True,True)
moonStone=item("Moon stone",item.useMoontone,item.moonStoneLegal,True,True)
leafStone=item("Leaf stone",item.useLeafStone,item.leafStoneLegal,True,True)
##############Sprites! ###########################
terrainDict={"T":1,"x":1,"-":0,"G":3,"w":4,"O":2," ":0,"B1":1}
#human-readable terrain dict: T=tree, x=rock, G=tall grass, w=water
##class sprite:
## def __init__(self,longName,screenName,fileLocation,screen,pos,terrain=False,destination=False):
## self.longName=longName #(e.g. "tree", "mart", "rock")
## self.screenName=screenName#(e.g. T, X, @)
## self.fileLoc=fileLocation #file name and any necessary path
## self.screen=screen
## self.pos=pos #A two-element list with the coordinates of the top left corner (from top left of screen, +y is down)
## self.drawMap=self.screen.drawMap
## self.drawMap[pos[0]][pos[1]]=self.screenName #put the square in the draw map
## self.terrain=terrain
## #0=passable, 1=impassable, 2=screen change, 3=encounter, 4=water
## self.destination=destination
## #if not False, this will be a 2-element list with new world instance and initial player coordinates upon entering that world
## self.terrainMap=self.screen.terrainMap
## self.terrainMap[pos[0]][pos[1]]=self.terrain #put the square in the terrain map
######################### Building and its subclasses
##We need to work out what we were doing here and make sure it isn't stupid or half-finished.
############################# screenChanger
############# Actionables npcs, enemy trainers, cuttable trees, item balls, wandering punkemon, stuff you walk up to and press A basically
################ init central for worlds, buildings, entrances, and associate maps
#general policy: initialize all portals with false,
#then the buildings on the maps,
#then the mon seeds,
#then the maps themselves,
#then create the worlds containing the buildings,
#then init the portals again with their destinations set.
#Otherwise you get circular dependencies.
########## Entrances with FALSE
O1=screenChanger("O1","door",False,[1,1])
########## Buildings
B1=testBuilding(O1)
########## Maps
testMap=[ #obsolete one-char map design
["x","x","x","x","x","x"],
["x","G","G","G","G","x"],
["x"," "," "," "," ","x"],
["x"," "," "," "," ","x"],
["x","T","T","-","-","x"],
["x",B1," "," "," ","x"],
["x"," "," "," "," ","x"],
["x",O1," "," "," ","x"],
["x","x","x","x","x","x"]
]
blankMap=[ #obsolete one-char map design
["x","x","x","x","x","x"],
["x"," "," "," "," ","x"],
["x"," "," "," "," ","x"],
["x"," "," "," "," ","x"],
["x"," "," "," "," ","x"],
["x"," "," "," "," ","x"],
["x"," "," "," "," ","x"],
["x"," "," "," "," ","x"],
["x","x","x","x","x","x"]
]
doubleMap=[
[" G"," G"," G"," G"," G"," G"],
["xG"," G"," G"," G"," G","xG"],
["x-"," -"," -"," -"," -","x-"],
["x-"," -"," -"," -"," -","x-"],
["x-","T ","T "," -"," -","x-"],
["x "," "," "," "," ","x "],
["x "," "," "," "," ","x "],
["x "," "," "," "," ","x "],
["x ","x ","x ","x "," ","x "]
]
buildingMap=[
[" G"," G"," G"," G"," G"," G"],
["xG"," G"," G"," G"," G","xG"],
["x-"," -"," -"," -"," -","x-"],
["x-"," -"," -"," -"," -","x-"],
["x-","T ","T "," -"," -","x-"],
["x ",(B1," ")," "," "," ","x "],
["x "," "," "," "," ","x "],
["x "," "," "," "," ","x "],
["x ","x ","x ","x "," ","x "]
]
############Screen and high-level "running the game" stuff##############
#runs at start of screen, conducts background setup before first loop
#find the first valid input and pass to input processor
#if no valid input, pass Null
#process the input
#################Generating individual things
###### Global variables (semi-permanent)
IDnum=0 #increment this when a new punkemon is generated
numMoves=4
pixel=15 #side length of sprite grid unit in pixels
screenWidth=25
screenHeight=21
encourageList=["It's not over!","Get 'em!","I choose you!","You can do it!"]
placeholderSquirtle=Squirtle(8,"Squirtle")
Red=PC("Red","female",[placeholderSquirtle],20) # Squirtle is a placeholder. You needn't start with Squirtle if you don't want to. *coughbutyoushouldcough*
pokeball_team=pygame.image.load(os.path.join("sprites","pokeball_team.png"))
pokeball_injured=pygame.image.load(os.path.join("sprites","pokeball_injured.png"))
pokeball_faint=pygame.image.load(os.path.join("sprites","pokeball_faint.png"))
poisonFlag=pygame.image.load(os.path.join("sprites","flagPsn.png"))
burnedFlag=pygame.image.load(os.path.join("sprites","flagBrn.png"))
frozenFlag=pygame.image.load(os.path.join("sprites","flagFrz.png"))
sleepFlag=pygame.image.load(os.path.join("sprites","flagSlp.png"))
paralyzedFlag=pygame.image.load(os.path.join("sprites","flagPar.png"))
confusedFlag=pygame.image.load(os.path.join("sprites","flagCon.png"))
statusFlags={"poisoned":poisonFlag,"burned":burnedFlag,"frozen":frozenFlag,
"sleep":sleepFlag,"paralyzed":paralyzedFlag,"confused":confusedFlag}
rivalName="Should Not Display"
###### Menu instances (self,oplist,mode,execOnA,execOnS,rollable=False,oplistConstructor=False,screen=False) sorted by world or speaker
placeholderMenu=menu(["You should never see this."],"dialog","self.screen.switchTo('world')","self.screen.switchTo('world')")
########### Typha menus
falseChoice=menu(["Boy","Girl"],"choice","Red.gender=self.oplist[self.curPos-1]\nself.replaceMenu(boy)","pass")
nickChoice=menu(["Choose a nickname:","ASSHAT","ASSFACE","BUTTHAT","BUTTFACE","FACEHAT","ASSBUTT",'"GARY"'],"titledChoice","garyActionable.trainer.name=self.oplist[self.curPos-1]\ngaryBefore.oplist[0]=garyActionable.trainer.name+garyBefore.oplist[0]\nself.replaceMenu(menuDict[self.oplist[self.curPos-1]])","pass")
starterMonChoice=menu(["Bulbasaur","Charmander","Squirtle"],"choice","self.pickStarter(self.oplist[self.curPos-1])","pass")
noDice=menu(["Since it seems I can't talk either of you two out of it~","Your adventure in the world of PUNKEMON fighting starts NOW. Grab a mon and get going!"],"dialog","self.replaceMenu(starterMonChoice)","pass")
doItAnyway=menu(["You can't scare me.","I'll be the best!"],"choice","self.replaceMenu(noDice)","pass")
talkOut=menu(["I'll tell you what I told him:\nThe fighting circuit ain't no nursery school.","You've got a better chance of ending up in jail or a body bag than as a PUNKEMON CHAMPION."],"dialog","self.replaceMenu(doItAnyway)","pass")
Intro=menu(["Yo!\nWelcome to the world of Punkemon~","My name is TYPHA.\nPeople in this hood, they call me the PUNKEMON PROFESSA.",
"There are creatures called PUNKEMON all up in dis world.","Some people think PUNKEMON are monsters.\nAin't totally wrong~","Some people keep 'em as pets.\nOthers use them in fights.",
"Me, I used to do that.\nNow I'm goin' straight.","I'm gonna study PUNKEMON as a profession.\nLab coat and everything.","When you're hiding behind that computer, it's hard to tell who you are.",
"Are you a boy, or a girl?"],"dialog","self.replaceMenu(falseChoice)","pass")
boy=menu(["You remember my little bro.\nYou've been at each other's throats ever since you were kids.","What was your charming nickname for him again?"],"dialog","self.replaceMenu(nickChoice)","pass")
girl=boy #code as political statement, or lazy programmer? #bothisgood
#The above line is dead code, but I haven't deleted it because I want to keep the joke.
asshat=menu(['Oh, yeah. "Asshat."Ha! You have such a way with words~'],"dialog","self.replaceMenu(talkOut)","pass")
assface=menu(['Oh, yeah. "Assface."Ha! You have such a way with words~'],"dialog","self.replaceMenu(talkOut)","pass")
butthat=menu(['Oh, yeah. "Butthat." Ha! You have such a way with words~'],"dialog","self.replaceMenu(talkOut)","pass")
buttface=menu(['Oh, yeah. "Buttface." Ha! You have such a way with words~'],"dialog","self.replaceMenu(talkOut)","pass")
facehat=menu(['Oh, yeah. "Facehat." Ha! You have such a way with words~'],"dialog","self.replaceMenu(talkOut)","pass")
assbutt=menu(['Oh, yeah. "Assbutt." Ha! You have such a way with words~'],"dialog","self.replaceMenu(talkOut)","pass")
GaryNickname=menu(['Oh, yeah. "Gary". Ha! You have such a way with words~'],"dialog","self.replaceMenu(talkOut)","pass")
########### Start menu and its descendents
start=menu(["Punkemon","Wikidex","Items","longtest","longtesttitled","Stats","Save"],"choice","self.addToMenuStack(menuDict[self.oplist[self.curPos-1]])","self.screen.switchTo('world')",True)
startPunkemon=menu(False,"choice","pass","self.backUpMenuStack()",True,"list(Red.teamAsString())")
startWikidex=menu(False,"dialog","pass","self.backUpMenuStack()",True,"Red.wikidexAsList()")
startItems=menu(False,"choice","self.selectItemOutsideBattle()","self.backUpMenuStack()",True,"start.displayItemsList()")
itemChooseMon=menu(False,"choice","self.itemOutsideBattle(self.curPos-1)","self.backUpMenuStack()",True,"Red.teamAsString()")
longtest=menu(["1","2","3","4","5","6","7","8","9","10","11","12","13","14","15","16","17","18","19","20","21","22","23","24","25","26","27","28","29"],"choice","self.backUpMenuStack()","self.backUpMenuStack()")
longtesttitled=menu(["Title","1","2","3","4","5","6","7","8","9","10","11","12","13","14","15","16","17","18","19","20","21","22","23","24","25","26","27","28","29"],"titledChoice","self.backUpMenuStack()","self.backUpMenuStack()")
startStats=menu(["Stats not implemented yet"],"dialog","self.backUpMenuStack()","self.backUpMenuStack()")
saveGame=menu(["all is vanity\nand the pursuit\n of the wind\nyou cannot save yet"],"dialog","self.backUpMenuStack()","self.backUpMenuStack()")
########### Menus from the inescapableHellscape test world
despairSign=menu(["There is no escape from the inescapable hellscape.","Not for you~\n ~not for him."],"dialog","self.screen.switchTo('world')","self.screen.switchTo('world')")
garyBefore=menu([": Hey! How did you get here?"],"dialog","self.screen.processResponse(('battle',Gary))","self.screen.processResponse(('battle',Gary))",False)
garyAfter=menu(["Gary: Aww, man!"],"dialog","self.screen.switchTo('world')","self.screen.switchTo('world')")
menuDict={"Boy": boy,"Girl":girl,"FalseChoice":falseChoice,
"nickChoice":nickChoice,"ASSHAT":asshat,"ASSFACE":assface,"BUTTHAT":butthat,"BUTTFACE":buttface,"FACEHAT":facehat,"ASSBUTT":assbutt,'"GARY"':GaryNickname,
"talkOut":talkOut,"doItAnyway":doItAnyway,"noDice":noDice, "You can't scare me.":noDice,"I'm gonna be the best!":noDice,
"Punkemon":startPunkemon,"Wikidex":startWikidex,"Items":startItems,"longtest":longtest,"longtesttitled":longtesttitled,"Stats":startStats,"Save":saveGame}
###check double type later
######Pokemon instance creation
##Initialize all pokemon with: level, name (optional), trainer (optional)
starterBulbasaur=Bulbasaur(8,"Bulbasaur")
betaBulbasaur=Bulbasaur(20,"Ivysaur")
powerBulbasaur=Bulbasaur(50,"Venusaur")
starterCharmander=Charmander(8,"Charmander")
betaCharmander=Charmander(20,"Charmeleon")
powerCharmander=Charmander(50,"Charizard")
starterSquirtle=Squirtle(8,"Squirtle")
betaSquirtle=Squirtle(20,"Wortortle")
powerSquirtle=Squirtle(50,"Blastoise")
derpy=Derp(30,"derpy")
Rattata6=Rattata(6,"Rattata")
Pidgey6=Pidgey(6,"Pidgey")
hovis=Hovisquirrel(6,"Hovisquirrel")
hypnotoad=Hypnotoad(6,"Hypnotoad")
########## Mon seeds
Rattata5=Rattata(5,"Rattata")
Pidgey5=Pidgey(5,"Pidgey")
basicRouteSeed=monSeed({Pidgey5:1,Rattata5:1},10)
allRattataSeed=monSeed({Rattata:1},10) #fuck pidgeys, I'm trying to debug here
starterSeed=monSeed({starterBulbasaur:1,starterCharmander:1,starterSquirtle:1},20)
########## Worlds
#inescapableHellscape=world(False,testMap,6,9,basicRouteSeed,False)
emptyHellscape=world(False,blankMap,6,9)
doubleHellscape=world(False,doubleMap,6,9,basicRouteSeed,False," w")
inescapableHellscape=world(False,buildingMap,6,9,starterSeed,False," w") #change back to basicRouteSeed later
########## Entrances with INSIDES
O1.destination=inescapableHellscape
######Hard sets of things that should be dynamically generated (Yeah testing!)
Red.inventory["Potion"]=5
Red.inventory["Super potion"]=5
Red.inventory["Repel"]=1
Red.inventory["Revive"]=4
Red.inventory["Punkeball"]=5
Red.inventory["Water stone"]=1
Red.inventory["Fire stone"]=1
Gary=character([starterBulbasaur],"Gary","wait for it",100,garyBefore,garyAfter,"normal")
garyActionable=NPCTrainer([4,0],"red",Gary,"West")
signActionable=NPC("sign",[0,0],despairSign,"sign","none")
inescapableHellscape.actionables.append(garyActionable)
inescapableHellscape.actionables.append(signActionable)
game=screen(screenWidth,screenHeight,inescapableHellscape) #START
#############List of Abominations Unto Nuggan
#Squirtles that think they're Charmanders
#Charmanders that know electric moves
#Everything is named Bulbasaur
#The number of times my computer crashed while I was coding this
#Rattatas attacking themselves
#bool("False")=True
#circular dependencies, because they involve circular dependence
#up and down arrows being interpreted as weird non-ASCII characters
#trying to navigate the battle menus based on the first letter of each of the first two options
#C h a r m a n d e r r a n a w a y !
#Charmander learning Splash...twice.
#eternal rival battle
#Two Garys. That's 100% more Garys than we had yesterday and 100% more Garys than we want. (And none of them is Garys Vakarian.)
#healh bar aspect ratios of 12:1 or 12:5
| 44.731477 | 314 | 0.61264 | ################################# PUNKEMON YO!!!!! ###############################
########## Imports from elsewhere
import pygame
from pygame.locals import *
import random
from random import *
import math
from math import *
import os
import string
fontLoaderCode=open("menu sprite code.py")
exec(fontLoaderCode.read())
battlesCode=open("Battles.py")
exec(battlesCode)
monCode=open("Mons.py")
exec(monCode.read())
worldCode=open("World.py")
exec(worldCode.read())
######### this thing
fakeAPress=pygame.event.Event(KEYDOWN,{"key":K_a})
fakeBPress=pygame.event.Event(KEYDOWN,{"key":K_s})
fakeUpPress=pygame.event.Event(KEYDOWN,{"key":K_UP})
fakeDownPress=pygame.event.Event(KEYDOWN,{"key":K_DOWN})
fakeLeftPress=pygame.event.Event(KEYDOWN,{"key":K_LEFT})
fakeRightPress=pygame.event.Event(KEYDOWN,{"key":K_RIGHT})
###################### Action Items for future work sessions #############
###bugfixes
#stop duplicate revive effect messages.
#add null messages so you can take out HP notifications without a no-messages error
###feature expansions
#numerical HP display
#XP to next level display
#menus in different places on the screen?
#write the catch formula
#expand on start menu, detail team screen and wikidex
#make the team overview a special menu subclass
#choice menu widths (what is this? I was too vague)
#holding down buttons
###Ambitious stuff!
#background music (less ambitious than frivolous but still)
#make a level generator!
#do the sprite work for battles (requires an Artist)
#saving and loading games
############Story notes muthafuckas!
###The first pokemon pusher (local equivalent of pokemart) is named Marty. The others are named increasingly weird spellings of Marty.
###The Martys get their shit from 3d printers
###
###Pokedex and PC system belong to Typha's university; she sneaks you access and
###charges you to store mons above the party five.
###Dex is Typha's thesis project; you and rival are collecting data for her.
###Rival's and your seen-punkemon/caught-punkemon appear in the same dex with a mark for who caught them;
###contest between you and Rival to get the most--for <s>SCIENCE</s> bragging rights!
###"Oak: "Now's not the time for that!""-->"Typha: "What are you smoking? You can't use that now!""
###
###Gym type-->city name:normal=granite, fire=citrine, water=sapphire, grass=peridot, poison=amethyst,
###electric=topaz, psychic=alexandrite, dragon=opal, ice=diamond, flying=lapis,
###ground=axinite, rock=geode, bug=emerald, fighting=stibnite, ghost=obsidian
###some random other town: malachite
###town you start out in: pearl
###4 are mandatory and then pick 4 of the other 11
###
###one of the gyms is in the middle of nowhere by the side of the route,
###in a place too small to show up on your town map. The gym leader is a hermit.
###Maybe this is the ground gym or the fighting gym or something.
###
###Electric gym is locked with a sign saying "gone to grocery store, back in 15 minutes" (and pointer npc near gym)
###back of note has grocery list, which you automatically take on second reading
###go to grocery store, gym leader is trying to remember what he wanted
###give him list, he runs off and reappears at gym
###if you only read it once, or never read it, he comes back 20 minutes after you enter town
###if you helped him find his list, "thanks for helping me! I got the stuff to make my awesome barbeque!"
###if he came back on timer, "Aw man! I forgot the stuff I needed to make my awesome barbeque!"
###either way: but that won't stop me from FRYING YOU ALIVE!" -->Fight
###dex puzzle inside electric gym: visible live wires you have to navigate around in a maze,
###accidentally walk into one and you get knocked out and wake up at maze start
###
###Fire gym leader is Azula, Water gym leader is Akula, they have the same sprite
###
###Cities are named after birthstones, one of them is Alexandrite and it has a big library
###Typha is in the library at a conference and sends you to go get a book
###And then maybe she gives you the Master Ball her advisor's been developing
###
###The optional hometown (late-unlock) gym leader is your mom (credit to imzoruaok from tumblr)
###If your reputation stat is too low, you auto-lose and are Grounded Forever and have to reload your last save.
###five or six trainers along the way make "Your mom" jokes and one or two of them are shit like "you fight like your mom" that turn out to be true
################### Global functions
def safeCopy(source):
copy=[]
for line in source:
copy.append(list(line))
return copy
################### Menu stuff ############################
allMenus=[]
class menu:
def __init__(self,oplist,mode,execOnA,execOnS,rollable=False,oplistConstructor=False,screen=False):
#rollable means being on the bottom option and hitting "down" gets you the top option
self.oplist=oplist
self.mode=mode #"choice", "titledChoice", or "dialog", controls whether there's a moving cursor and if the first thing is a title or an option
self.curPos=1 #current position of cursor, ONE-indexed
if self.mode=="titledChoice":
self.curPos=2
self.rollable=rollable
self.execOnA=execOnA
self.execOnS=execOnS
self.oplistConstructor=oplistConstructor
if self.oplistConstructor:
self.tempOplist=eval(self.oplistConstructor)
else:
self.tempOplist=self.oplist
self.curSlide=1
self.maxChars=screenWidth-2
self.maxLines=4
self.maxSlides=5
self.screen=screen
self.frame=self.getFrame()
#Frame should be in terms of option numbers, NOT list index
allMenus.append(self)
def getFrame(self): #only call this in init or when curPos is set to 1
if self.mode=="choice":
return[1,screenHeight-2]
elif self.mode=="titledChoice":
return [1,screenHeight-2]
elif self.mode=="dialog":
return False
def getArray(self,top):
if self.mode=="choice":
array=self.getArrayChoice(top)
elif self.mode=="titledChoice":
array=self.getArrayTitledChoice(top)
elif self.mode=="dialog":
array=self.getArrayDialog()
else:
print self.curMode+"? That's not even a menu type."
return array
def getArrayChoice(self,top): #generates array with set of menu options for sprite generations
#find length of longest menu item
maxLength=2
for op in self.tempOplist: #op needs to be a string
if len(op)>maxLength:
maxLength=len(op)
maxLength+=1 #allows space on the right for the scroll cues
#find section of menu that can currently fit on the screen
oplistInFrame=self.tempOplist[self.frame[0]-1:self.frame[1]]
#top border line
opAr=[["*TL"]]
for i in range(0,maxLength+1): #+1 for cursor
opAr[0].append("T=")
opAr[0].append("*TR")
#assemble menu line for a given entry
for op in oplistInFrame:
tmp=["L|"," "] #open line with pipe and space for cursor
tmpStr=op.ljust(maxLength)#buffer item to max length, +1 for cursor
for char in tmpStr:#stick in one char at a time
tmp.append(char)
tmp.append("R|")#close line with pipe
opAr.append(tmp)
lastLine=["*BL"]
for i in range(0,maxLength+1): #+1 for cursor
lastLine.append("B=")
lastLine.append("*BR")
opAr.append(lastLine)
#draw cursor
cursorIndex=self.curPos-self.frame[0]+1
if top:
opAr[cursorIndex][1]=">"
else:
opAr[cursorIndex][1]="cOp"
#draw scroll-arrow if necessary
if self.frame[0]!=1: #can scroll up
opAr[1][-2]="cUp"
if len(self.tempOplist)>screenHeight-2 and self.frame[1]!=len(self.tempOplist): #can scroll down
opAr[-2][-2]="cDn"
return(opAr)
def getArrayTitledChoice(self,top): #generates array with set of menu options for sprite generations
#find length of longest menu item
maxLength=2
for op in self.tempOplist: #op needs to be a string
if len(op)>maxLength:
maxLength=len(op)
maxLength+=1
#find section of menu that can currently fit on the screen
oplistInFrame=self.tempOplist[1:][self.frame[0]-1:self.frame[1]-1]#heaven forgive me
#top border line
opAr=[["*TL"]]
for i in range(0,maxLength+1): #+1 for cursor
opAr[0].append("T=")
opAr[0].append("*TR")
#assemble first line (title)
firstLine=["L|"]
paddedTitle=self.tempOplist[0].ljust(maxLength+1)
for char in paddedTitle:
firstLine.append(char)
firstLine.append("R|")
opAr.append(firstLine)
for op in oplistInFrame:
tmp=["L|"," "] #open line with pipe and space for cursor
tmpStr=op.ljust(maxLength)#buffer item to max length, +1 for cursor
for char in tmpStr:#stick in one char at a time
tmp.append(char)
tmp.append("R|")#close line with pipe
opAr.append(tmp)
lastLine=["*BL"]
for i in range(0,maxLength+1): #+1 for cursor
lastLine.append("B=")
lastLine.append("*BR")
opAr.append(lastLine)
#draw cursor
cursorIndex=self.curPos-self.frame[0]+1
if top:
opAr[cursorIndex][1]=">"
else:
opAr[cursorIndex][1]="cOp"
#draw scroll-arrow if necessary
if self.frame[0]!=1: #can scroll up
opAr[1][-2]="cUp"
if len(self.tempOplist)>screenHeight-2 and self.frame[1]!=len(self.tempOplist): #can scroll down
opAr[-2][-2]="cDn"
return(opAr)
def getArrayDialog(self): #generates array with dialog characters in a box
#get raw string of dialog and break it up into lines
diastring=self.tempOplist[self.curSlide-1] #-1 bc curSlide is 1-indexed
sentences=diastring.split("\n")
finalLines=[] #will contain the final dialog, with each item being a line
for sentence in sentences:
if len(sentence)<=self.maxChars:
finalLines.append(sentence)
else:
words=sentence.split()
newLine=""
for word in words:
#if you can fit one more word on the line, add it
if len(newLine)+len(word)<self.maxChars:
newLine=newLine+word+" "
#if you can't, finalize the line and start a new one
else:
finalLines.append(newLine[0:-1])
newLine=word+" "
finalLines.append(newLine[0:-1])
while len(finalLines)<4:
finalLines.append("")
for i in range(0,len(finalLines)):
finalLines[i]=finalLines[i].ljust(self.maxChars)
##Characterize and print the array
#top border line
diAr=[["*TL"]]
#make the menu box the same size (just big enough to accomodate the longest allowable line) every time
for i in range(0,self.maxChars):
diAr[0].append("T=")
diAr[0].append("*TR")
#assemble menu line for a given entry
for line in finalLines:
tmp=["L|"]
for char in line: #break line into individual characters
tmp.append(char)
tmp.append("R|")
diAr.append(tmp)
lastLine=["*BL"]
#make the menu box the same size (just big enough to accomodate the longest allowable line) every time
for i in range(0,self.maxChars):
lastLine.append("B=")
lastLine.append("*BR")
diAr.append(lastLine)
return(diAr)
def moveCursor(self,direction):
if self.mode=="choice":
if direction=="up":
if self.curPos>1: #curPos=1 means cursor on top option
self.curPos-=1
if self.curPos<self.frame[0]:
self.frame[0]-=1
self.frame[1]-=1
elif self.rollable:
self.curPos=len(self.tempOplist)
newTop=max(len(self.tempOplist)-screenHeight+2,1)#ensure no 0 or negatives
self.frame=[newTop,len(self.tempOplist)]
elif direction=="down":
if self.curPos<len(self.tempOplist):
self.curPos+=1
if self.curPos>self.frame[1]:
self.frame[0]+=1
self.frame[1]+=1
elif self.rollable:
self.curPos=1
self.frame=self.getFrame()
elif self.mode=="titledChoice":
if direction=="up":
if self.curPos>2: #curPos=2 means cursor on top option
self.curPos-=1
if self.curPos<=self.frame[0]:
self.frame[0]-=1
self.frame[1]-=1
elif self.rollable:
self.curPos=len(self.tempOplist)
newTop=max(len(self.tempOplist)-screenHeight+3,1)
self.frame=[newTop,len(self.tempOplist)]
elif direction=="down":
if self.curPos<len(self.tempOplist):
self.curPos+=1
if self.curPos>self.frame[1]:
self.frame[0]+=1
self.frame[1]+=1
elif self.rollable:
self.curPos=2
self.frame=self.getFrame
def processInput(self,event,screen):
if self.mode=="choice" or self.mode=="titledChoice":
self.processInputChoice(event,screen)
elif self.mode=="dialog":
self.processInputDialog(event,screen)
def processInputChoice(self, event, screen):
if event.type==KEYDOWN:
if event.key==K_UP:
self.moveCursor("up")
elif event.key==K_DOWN:
self.moveCursor("down")
elif event.key==K_a:
exec(self.execOnA)
elif event.key==K_s:
exec(self.execOnS)
def processInputDialog(self, event, screen):
if event.type==KEYDOWN:
if self.curSlide<len(self.tempOplist):
self.curSlide+=1
elif event.key==K_a:
exec(self.execOnA)
elif event.key==K_s:
exec(self.execOnS)
def mutateToNewMenu(self,newOptions,newMode=False):#Deprecated.
#mutates the mode and oplist while retaining the stack and the execOn code.
#Deprecated.
if newMode:
self.curMode=newMode
if isinstance(newOptions,str):
self.oplist=eval(newOptions)
else:
self.oplist=newOptions
self.length=len(self.tempOplist)
self.curPos=1
def replaceMenu(self,newMenuInstance):
self.screen.activeMenus[-1]=newMenuInstance
def backUpMenuStack(self):
self.screen.activeMenus=self.screen.activeMenus[:-1]
def addToMenuStack(self,newMenu=False):
#Gets a new menu from the menudict and adds it to the screen, while keeping the old menu visible behind it.
#Use when going to a menu that should revert to the previous menu on pressing "S".
oldMenu=self
if not newMenu:
newMenu=menuDict[self.tempOplist[self.curPos-1]]
newMenu=newMenu.evaluatedCopy()
self.screen.activeMenus.append(newMenu)
def evaluatedCopy(self):
#Returns a menu that is the same as self, but with the oplist evaluated if necessary.
#Use for menus where the oplist depends on the game state and you don't want to mutate.
#will probably be deprecated after the oplist constructor is its own argument
if isinstance(self.oplist,str):
print "evaluatedCopy got used"
newOplist=eval(self.oplist)
return menu(newOplist,self.mode,self.execOnA,self.execOnS,self.rollable,self.screen)
#If you have changed the init arguments for menu and now things are breaking, this is probably the problem
#Heed this prophecy, for it came true the first time within a day of its writing!
else:
return self
########## Incidental menu-class functions that get run during menus at some point in the game. The execOnAs and execOnSs of various menus.
########## These will probably proliferate. It's alright. Didn't Tom Lehrer tell you? Proliferation is the word of the day.
def pickStarter(self,name):
if name=="Bulbasaur":
starterMon=Bulbasaur(15,"Bulbasaur")
#secondMon=Diglett(20,"Diglett")
garyMon=Charmander(5,"Charmander")
elif name=="Charmander":
starterMon=Charmander(50,"Charmander")
garyMon=Squirtle(5,"Squirtle")
elif name=="Squirtle":
starterMon=Squirtle(5,"Squirtle")
garyMon=Bulbasaur(5,"Bulbasaur")
starterMon.trainer=self.screen.player
#secondMon.trainer=self.screen.player
#thirdMon.trainer=self.screen.player
self.screen.player.team=[starterMon]#,secondMon]#,thirdMon]
self.screen.player.monsSeen.append(starterMon.species)
self.screen.player.monsCaught.append(starterMon.species)
garyMon.trainer=Gary
Gary.team=[garyMon]
self.screen.switchTo('world')
def displayItemsList(self):
result=[]
names=Red.inventory.keys()
for name in names:
dispStr=str(self.screen.player.inventory[name]).rjust(2,"0")+"x"+name
result.append(dispStr)
return result
def selectItemOutsideBattle(self):
#get the current oplist
if self.oplistConstructor:
self.tempOplist=eval(self.oplistConstructor)
else:
self.tempOplist=self.oplist
self.screen.player.selectedItemName=self.tempOplist[self.curPos-1].split("x",1)[1] #take only the part of "itemName: numberAvailable" before the colon
if itemDict[self.screen.player.selectedItemName].targetsMon:
self.addToMenuStack(itemChooseMon)
else:
self.itemOutsideBattle(False)
def itemOutsideBattle(self,targetMonTeamIndex):
itemName=self.screen.player.selectedItemName
chosenItem=itemDict[itemName]
targetMon=self.screen.player.team[targetMonTeamIndex] #mon instance or False if none is needed
legal=chosenItem.isLegal(chosenItem,targetMon,False)
if legal:
useResultMessage=chosenItem.use(chosenItem,self.screen.player,targetMon)
self.addToMenuStack(menu([useResultMessage],"dialog","self.backUpMenuStack()","self.backUpMenuStack()",False,False,self.screen))
self.screen.player.selectedItemName=False
else:
self.addToMenuStack(menu(["What are you smoking? Now's not the time for that!"],"dialog","self.backUpMenuStack()","self.backUpMenuStack()",False,False,self.screen))
########## Battle menus: simplified menus for use in battle.
########## Currently used to display most of the available battle information, because graphics aren't done yet.
class battleMenu:
def __init__(self,curMode,oplist):
self.curMode=curMode #either "choice" or "dialog"
self.oplist=oplist #list of messages if dialog, list of choices if choice
self.length=len(self.oplist)
self.curPos=1 #choice # or place in the messages list
self.drawArray=[] #list of sprite
self.maxChars=screenWidth-2
self.maxLines=3
self.canGoBack=True
def switchMenu(self,newOptions,newMode=False):#switches to a new menu, in a new mode if necessary
if newMode:
self.curMode=newMode
self.oplist=newOptions
self.length=len(self.oplist)
self.curPos=1
def getArray(self):
if self.curMode=="choice":
array=self.getArrayChoice() #modify this to deal with null messages?
elif self.curMode=="dialog":
array=self.getArrayDialog() #modify this to deal with null messages?
else:
print self.curMode+"? That's not even a battle menu type."
return array
def getArrayChoice(self): #generates array with set of menu options for sprite generations
#find length of longest menu item
maxLength=2
for op in self.oplist: #op needs to be a string
opString=str(op)
if len(opString)>maxLength:
maxLength=len(opString)
#top border line
opAr=[["*TL"]]
for i in range(0,maxLength+1): #+1 for cursor
opAr[0].append("T=")
opAr[0].append("*TR")
#assemble menu line for a given entry
for op in self.oplist:
opString=str(op)
tmp=["L|"," "] #open line with left-edge pipe and space for cursor
tmpStr=opString.ljust(maxLength)#buffer item to max length, +1 for cursor
for char in tmpStr:#stick in one char at a time
tmp.append(char)
tmp.append("R|")#close line with pipe
opAr.append(tmp)
lastLine=["*BL"]
for i in range(0,maxLength+1): #+1 for cursor
lastLine.append("B=")
lastLine.append("*BR")
opAr.append(lastLine) #bottom border is same as top
#draw cursor
opAr[self.curPos][1]=">"
return(opAr)
def getArrayDialog(self):#generates array with dialog characters in a box
#get raw string of dialog and break it up into lines
diastring=self.oplist[self.curPos-1] #-1 bc curPos is 1-indexed
sentences=diastring.split("\n")
finalLines=[] #will contain the final dialog, with each item being a line
for sentence in sentences:
if len(sentence)<=self.maxChars:
finalLines.append(sentence)
else:
words=sentence.split()
newLine=""
for word in words:
#if you can fit one more word on the line,
if len(newLine)+len(word)<self.maxChars:
newLine=newLine+word+" "
#if you can't, finalize the line and start a new one
else:
finalLines.append(newLine[0:-1])
newLine=word+" "
finalLines.append(newLine[0:-1])
for i in range(0,len(finalLines)):
finalLines[i]=finalLines[i].ljust(self.maxChars)
##Characterize and print the array
#top border line
diAr=[["*TL"]]
#make the menu box the same size (just big enough to accomodate the longest allowable line) every time
for i in range(0,self.maxChars):
diAr[0].append("T=")
diAr[0].append("*TR")
#assemble menu line for a given entry
for line in finalLines:
tmp=["L|"]
for char in line: #break line into individual characters
tmp.append(char)
tmp.append("R|")
diAr.append(tmp)
lastLine=["*BL"]
#make the menu box the same size (just big enough to accomodate the longest allowable line) every time
for i in range(0,self.maxChars):
lastLine.append("B=")
lastLine.append("*BR")
diAr.append(lastLine)
return(diAr)
def getNext(self):
if self.curMode=="dialog":
self.curPos+=1
if self.curPos>len(self.oplist):
return False
else:
return True
if self.curMode=="choice":
#Figure out the next menu and return appropriate string to feed to switchTo().
return self.oplist[self.curPos-1]
#################### Puttin' on the MOVES ###################
class move:
def __init__(self,name,basePwr, baseAcc, maxPP, nation, special, sideEffect, message=False, fastMove=False, critRate=1):#sideEffect can be string or False
self.name=name
self.basePwr=basePwr
self.baseAcc=baseAcc
self.maxPP=maxPP
self.nation=nation
self.special=special
self.sideEffect=sideEffect
self.message=message
self.fastMove=fastMove #pre-empts Speed duel (quick attack, swift)
self.critRate=critRate
def __repr__(self):
return self.name #hahahaha!
def __str__(self):
return self.name
def getCurPP(self,attacker):
for i in range(0,numMoves):
if self.name==attacker.curMoves[i].name:
return attacker.curPP[i]
def getHit(self,attacker,defender):
messages=[]
if (defender.curMove!="item" and defender.curMove.sideEffect and "offscreen" in defender.curMove.sideEffect):
if defender.status["charging"]:
hit=False,[] #auto-miss on dig and fly
if attacker.status["confused"] and randint(0,1)==0:
messages.append(attacker.name+" hurt itself in its confusion!")
attacker.tempStats["HP"]-=hitYourself.getDamage(attacker,defender)
hit=False
else:
if attacker.status["confused"]:
messages.append(attacker.name+" attacked despite confusion!")
hitChance=float(self.baseAcc)*attacker.accuracy/defender.evasion
if randint(0,99)<hitChance:
hit=True
else:
messages.append("... but it missed!")
hit=False
return (hit,messages)
def getModifier(self,attacker,defender):
#put some messages in here later
#STAB
if self.nation in attacker.nation:
stab=1.5
else:
stab=1
#Type matchup multiplier
typeMod=self.getTypeModifier(attacker,defender)
#critical
critChance=self.critRate*attacker.critRate
if random() <critChance:
#critical hit!
crit=2
else:
crit=1
#damage range
randomFactor=uniform(.85,1)
modifier=stab*typeMod*crit*randomFactor
return modifier
def getTypeModifier(self,attacker,defender):
typeMod=1
typeMod*=typeDict[self.nation,defender.nation[0]]
if len(defender.nation)>1:
typeMod*=typeDict[self.nation,defender.nation[1]]
return typeMod
def getDamage(self,attacker,defender):
modifier=self.getModifier(attacker, defender)
if attacker.curMove.special:
attackerAttack=attacker.tempStats["Special"]
defenderDefense=defender.tempStats["Special"]
else:
attackerAttack=attacker.tempStats["Attack"]
defenderDefense=defender.tempStats["Defense"]
damage=float(2*attacker.level+10)/250*attackerAttack/defenderDefense*self.basePwr*modifier
return ceil(damage)
def getEffect(self,attacker,defender):
#The logic behind this is complicated.
#Some moves' effects are 3-word phrases of the form "[pokemon][accuracy/evasion/critRate][degree of change]".
#Others are 3-word phrases of the form "[pokemon][stat][degree of change].
#Still others are 2-word phrases of the form "[percent chance][possible status effect]",
#And a final group are 2-word phrases of the form "[charging or multiple][number of turns or whether enemy is trapped]".
#These possibilities are handled in the above order, with the exception of charging and multiple,
#which must be handled before the damage is calculated and, therefore, before getEffect.
messages=[]
#check if an effect can succeed with the current type matchup, add relevant message
typeModifier=self.getTypeModifier(attacker, defender)
if typeModifier==0:
return ["It has no effect!"] #effect will fail if there is one, do nothing
elif typeModifier==0.5:
messages.append("It's not very effective~")
elif typeModifier==2:
messages.append("It's super effective!")
if self.sideEffect:#if there is an effect
#break effect down into words
effectWords=self.sideEffect.split()
#handle weirdass arbitrary shit
if effectWords[0]=="exec":
exec(effectWords[1])
#handle accuracy/evasion effects
elif effectWords[1]=="accuracy" or effectWords[1]=="evasion":
if effectWords[0]=="self":
call="attacker"
messages.append(attacker.name+self.message)
else:
call="defender"
messages.append(defender.name+self.message)
call+="."+effectWords[1]
call+="*=1.4**"+effectWords[2]
exec(call)
#handle critRate effects
elif effectWords[1]=="critRate":
if effectWords[0]=="self":
call="attacker"
messages.append(attacker.name+self.message)
else:
call="defender"
messages.append(defender.name+self.message)
call+=".critRate="+call+".permStats['Speed']/512.0*"+effectWords[2]
exec(call)
#handle all other stat effects
elif len(effectWords)==3:
#if effect is a stat move
if effectWords[0]=="self":
attacker.tempStats[effectWords[1]]*=1.4**float(effectWords[2])
messages.append(attacker.name+self.message)
elif effectWords[0]=="enemy":
defender.tempStats[effectWords[1]]*=1.4**float(effectWords[2])
messages.append(defender.name+self.message)
#handle 2-word status effects
elif effectWords[0] not in ["charging","multiple","chain"]: #e.g. sleep, burn, etc.
if effectWords[1] in defender.tempStats: #moves that probabilistically change stats
if randint(0,99)<int(effectWords[0]):
defender.tempStats[effectWords[1]]*=1.4**(-1.0)
messages.append(defender.name+self.message)
else:
fail=False
if self.basePwr==0 and not defender.getEffectSuccess(effectWords[1]):
fail=True
if randint(0,99)<int(effectWords[0]) and not fail:
defender.status[effectWords[1]]=True
messages.append(defender.name+self.message)
if fail:
messages.append("But it failed!")
#handle multiple
elif effectWords[0]=="multiple" and attacker.status["multiple"]: #multiple move
enemyCanMove=not(effectWords[1]=="False" or effectWords[1]=="false")
if not enemyCanMove: #enemy cannot move
defender.status["trapped"]=True
return messages
moveCode=open("Moves.py")
exec(moveCode.read())
class typeMatchup:
def __init__(self):
self.order=["Normal","Fighting","Flying","Poison","Ground","Rock","Bug",
"Ghost","Fire","Water","Grass","Electric","Psychic","Ice","Dragon"]
self.typeDict={"Normal":[1,1,1,1,1,.5,1,0,1,1,1,1,1,1,1,1],
"Fighting":[2,1,.5,.5,1,2,.5,0,1,1,1,1,.5,2,1,1],
"Flying":[1,2,1,1,1,.5,2,1,1,1,2,.5,1,1,1,1],
"Poison":[1,1,1,.5,.5,.5,2,.5,1,1,2,1,1,1,1,1],
"Ground":[1,1,0,2,1,2,.5,1,2,1,.5,2,1,1,1,1],
"Rock":[0,.5,2,1,.5,1,2,1,2,1,1,1,1,2,1,1],
"Bug":[1,.5,.5,2,1,1,1,.5,.5,1,2,1,2,1,1,1],
"Ghost":[0,1,1,1,1,1,1,2,1,1,1,1,0,1,1,1],
"Fire":[1,1,1,1,1,.5,2,1,.5,.5,2,1,1,2,.5,1],
"Water":[1,1,1,1,2,2,1,1,2,.5,.5,1,1,1,.5,1],
"Grass":[1,1,.5,.5,2,2,.5,1,.5,2,.5,1,1,1,.5,1],
"Electric":[1,1,2,1,0,1,1,1,1,2,.5,.5,1,1,.5,1],
"Psychic":[1,2,1,2,1,1,1,1,1,1,1,1,.5,1,1,1],
"Ice":[1,1,2,1,2,1,1,1,1,.5,2,1,1,.5,2,1],
"Dragon":[1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,1],
"Fail":[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0]}
#keys in dict are attacker move type,
#entries in values are type modifiers for each defender type
def lookup(self,attType,defType):
row=self.typeDict[attType]
colIndex=self.order.index(defType)
typeMod=row[colIndex]
return typeMod
def __getitem__(self,k):
#This is an amusingly versatile function. It can take a list. It can take a tuple. It can even take a dict, if you want to be disgusting.
attType=k[0]
defType=k[1]
return self.lookup(attType,defType)
#don't forget to INIT IT!
typeDict=typeMatchup()
############ People (player and other trainers and NPCs)
class character:
def __init__(self,team,name,sprite,money,beforeDialog=False,afterDialog=False,AI="normal",rewardItem=False):
self.team=team
self.name=name
self.sprite=sprite
self.money=money #money you get for beating them, or your money if it's you
self.beforeDialog=beforeDialog
self.afterDialog=afterDialog
self.AI=AI #whether character is player, opponent, bad-AI opponent or good-AI opponent
self.curMon=team[0] #mon that goes out when battle starts
self.nextMon=False #the mon you're in the middle of switching to due to vagaries of control structure and turn timing
self.inventory={}#the inventory used for display. Maps names to # you have.
self.facingDict={"North":"_U","South":"_D","East":"_R","West":"_L","none":""}
def getNumUnfainted(self):
num=0
for mon in self.team:
if mon.tempStats["HP"]>0:
num+=1
return num
def getNextMon(self): #returns new mon when one faints
messages=[]
#this is intended only for normal and bad AIs.
#Better (gym leader/rival) AI and players should overload this in their subclasses.
for mon in self.team:
if mon.tempStats["HP"]>0:
return(mon,messages)
return (False,messages)
def getNextMove(self):
if self.curMon.status["multiple"]:
return self.curMon.curMove
if self.curMon.status["charging"]:
return self.curMon.curMove
moveList=self.curMon.curMoves
nextMove=False
if all([PP == 0 for PP in self.curMon.curPP]):
return struggle
if self.AI=="normal":
while not nextMove:
randomIndex=randint(0,len(moveList)-1)
if self.curMon.curPP[randomIndex]>0:
nextMove=moveList[randomIndex]
return nextMove
if self.AI=="lousy":
stackedMoveList=list(moveList)
for move in moveList:
if move.basePwr==0:
stackedMoveList.append(move)
stackedMoveList.append(move) #add really shitty moves twice
elif move.basePwr<40:
stackedMoveList.append(moveP) #add kinda shitty moves once
while not nextMove:
randomIndex=randint(0,len(stackedMoveList)-1)
tryMove=stackedMoveList[randomIndex]
moveIndex=moveList.index(tryMove)
if self.curMon.curPP[moveIndex]>0:
nextMove=tryMove
return nextMove
if self.AI=="awesome":
stackedMoveList=list(moveList)
for move in moveList:
if move.basePwr>=60:
stackedMoveList.append(move)
stackedMoveList.append(move)
while not nextMove:
randomIndex=randint(0,len(stackedMoveList)-1)
tryMove=stackedMoveList[randomIndex]
moveIndex=moveList.index(tryMove)
if self.curMon.curPP[moveIndex]>0:
nextMove=tryMove
return nextMove
class PC(character):
def __init__(self,name,gender,starterTeam,money):
self.gender=gender
if gender=="female":
self.sprite="PCfemale"
else:
self.sprite="PCmale"
character.__init__(self,starterTeam,name,self.sprite,0,"player")
self.money=money
self.monsSeen=[]
self.monsCaught=[]
self.encounterModifier=0#positive=more encounters, negative=fewer encounters
self.activeItem=False #last still-active long-duration item used
self.lastItem=False #last instantaneous item used
self.stepsToItemEnd=0
self.totalSteps=0
self.facingDirection="South"
def takeStep(self):
self.totalSteps+=1
if self.activeItem:
self.stepsToItemEnd-=1
if self.stepsToItemEnd==0:
self.activeItem=False
self.encounterModifier=0
#add other lines here to neutralize other possible active items as they appear
def teamAsString(self):
res=[]
for i in self.team:
res.append(str(i))
return res
def wikidexAsList(self):
statsString='Punkemon seen: '+str(len(self.monsSeen))+'\n'+'Punkemon caught: '+str(len(self.monsCaught))
return [statsString]
def hasUnfaintedMons(self):
for i in self.team:
if i.tempStats["HP"]>0:
return True
return False
class wildPunkemon(character):
def __init__(self,name,team):
self.name=name
self.team=team
character.__init__(self,self.team,name,False,0,"normal")
##############Items ##############################
itemDict={}
class item:
def __init__(self,name,useFunction,legalFunction,battleUseable,targetsMon=True):
#name as it appears in the inventory
self.name=name
#function that takes a player and performs the action of that player using that item
#unique to each of potion, super potion, repel, water stone, etc.
self.use=useFunction
self.isLegal=legalFunction
self.battleUseable=battleUseable
#targetsMon is true if the item needs to be used on a specific punkemon (e.g. potions or stones), false otherwise (e.g. repel, escape rope)
self.targetsMon=targetsMon
itemDict[self.name]=self
def healMon(self,player,target,numHP):
if player.inventory[self.name]>0:
#make sure this can only be called on an item that has been in the inventory at least once
#otherwise it will throw a keyError
player.inventory[self.name]-=1
oldHP=target.tempStats["HP"]
newHP=min(target.tempStats["HP"]+numHP,target.permStats["HP"])
target.tempStats["HP"]=newHP
return target.name+" recovered "+str(newHP-oldHP)+" HP."
else:
return "You don't have any!"
def usePotion(self,player,target):
return self.healMon(player,target,20)
def useSuperPotion(self,player,target):
return healMon(player,target,50)
def potionLegal(self,target,inBattle):
return target.permStats["HP"]>target.tempStats["HP"]>0
def repel(self,player,modifier,duration):
if player.inventory[self.name]>0:
player.inventory[self.name]-=1
player.encounterModifier=modifier
player.activeItem=self
player.stepsToItemEnd=duration
return "Repel was used successfully."
else:
return "You don't have any!"
def useRepel(self,player,target):
return self.repel(player,-10,500) #sanity-check these numbers!
def repelLegal(self,target,inBattle):
return not inBattle
def useRevive(self,player,target):
if player.inventory[self.name]>0:
player.inventory[self.name]-=1
target.tempStats["HP"]=0.5*target.permStats["HP"]
return target.name+" was revived!"
else:
return "You don't have any!"
def reviveLegal(self,target,inBattle):
return target.tempStats["HP"]<=0
def useBall(self,player,target,ballLevel):
if player.inventory[self.name]>0:
player.inventory[self.name]-=1
catchSuccess=True #replace this with some math using ballLevel and the target mon and the phase of the moon and stuff
if catchSuccess:
target.duplicate(player,"Steve")
return catchSuccess
else:
return "You don't have any!"
def ballLegal(self,target,inBattle):
return isinstance(target.trainer,wildPunkemon)
def usePunkeball(self,player,target):
return self.useBall(player,target,1) #change 1 to the number associated with standard balls in the formula
def useStone(self,player,target,stoneType):
if player.inventory[self.name]>0:
player.inventory[self.name]-=1
if len(target.evolveStone)==1:
nextEvolution=target.nextEvolution
target.duplicate(False,nextEvolution,nextEvolution)
else: #special case for eevee, because eevee is a special snowflake
nextEvolution=target.evolveDict[stoneType]
target.duplicate(False,nextEvolution,nextEvolution)
return target.name+" evolved into "+nextEvolution+"!"
else:
return "You don't have any!"
def stoneLegal(self,target,inBattle,stoneType):
return (target.evolveStone and stoneType in target.evolveStone)
def useWaterStone(self,player,target):
return self.useStone(player,target,"water")
def waterStoneLegal(self,target,inBattle):
return self.stoneLegal(target,inBattle,"water")
def useFireStone(self,player,target):
return self.useStone(player,target,"fire")
def fireStoneLegal(self,target,inBattle):
return self.stoneLegal(target,inBattle,"fire")
def useThunderStone(self,player,target):
return self.useStone(player,target,"thunder")
def thunderStoneLegal(self,target,inBattle):
return self.stoneLegal(target,inBattle,"thunder")
def useMoontone(self,player,target):
return self.useStone(player,target,"moon")
def moonStoneLegal(self,target,inBattle):
return self.stoneLegal(target,inBattle,"moon")
def useLeafStone(self,player,target):
return self.useStone(player,target,"leaf")
def leafStoneLegal(self,target,inBattle):
return self.stoneLegal(target,inBattle,"leaf")
potion=item("Potion",item.usePotion,item.potionLegal,True,True)
superPotion=item("Super potion",item.useSuperPotion,item.potionLegal,True,True)
repel=item("Repel",item.useRepel,item.repelLegal,False,False)
revive=item("Revive",item.useRevive,item.reviveLegal,True,True)
punkeball=item("Punkeball",item.usePunkeball,item.ballLegal,True,False)
#more balls
waterStone=item("Water stone",item.useWaterStone,item.waterStoneLegal,True,True)
fireStone=item("Fire stone",item.useFireStone,item.fireStoneLegal,True,True)
thunderStone=item("Thunder stone",item.useThunderStone,item.thunderStoneLegal,True,True)
moonStone=item("Moon stone",item.useMoontone,item.moonStoneLegal,True,True)
leafStone=item("Leaf stone",item.useLeafStone,item.leafStoneLegal,True,True)
##############Sprites! ###########################
terrainDict={"T":1,"x":1,"-":0,"G":3,"w":4,"O":2," ":0,"B1":1}
#human-readable terrain dict: T=tree, x=rock, G=tall grass, w=water
##class sprite:
## def __init__(self,longName,screenName,fileLocation,screen,pos,terrain=False,destination=False):
## self.longName=longName #(e.g. "tree", "mart", "rock")
## self.screenName=screenName#(e.g. T, X, @)
## self.fileLoc=fileLocation #file name and any necessary path
## self.screen=screen
## self.pos=pos #A two-element list with the coordinates of the top left corner (from top left of screen, +y is down)
## self.drawMap=self.screen.drawMap
## self.drawMap[pos[0]][pos[1]]=self.screenName #put the square in the draw map
## self.terrain=terrain
## #0=passable, 1=impassable, 2=screen change, 3=encounter, 4=water
## self.destination=destination
## #if not False, this will be a 2-element list with new world instance and initial player coordinates upon entering that world
## self.terrainMap=self.screen.terrainMap
## self.terrainMap[pos[0]][pos[1]]=self.terrain #put the square in the terrain map
######################### Building and its subclasses
##We need to work out what we were doing here and make sure it isn't stupid or half-finished.
class building:
def __init__(self,inputMap,shortName): #defaults are for houses you can't go in
self.inputMap=inputMap
self.shortName=shortName
#Example:
##B1=[
## [1,1,1]
## [1,"O1",1]
## ]
def addToMap(self,terrainMap, spriteMap,xpos,ypos,backTerrain):
#Starting at the provided x and y positions [upper left corner of building]...
curX=xpos
curY=ypos
#iterate over the huilding's input map.
for row in self.inputMap:
for char in row:
#First, figure out if it's a door.
if isinstance(char,screenChanger):
#set terrain there to be a screen change
terrainMap[curY][curX]=2
#add the door to the world's sprite map
spriteMap[curY][curX]=(char,backTerrain)
else:
# just set terrain to the appropriate type:
terrainMap[curY][curX]=char
curX+=1 #increment the x position as we move across the row
curY+=1 #increment row at end of each row...
curX=xpos #...and reset the x to the position of the lefthand side of the building
#Once done with that, add ourself to the appropriate spot in the spriteMap:
spriteMap[ypos][xpos]=(self,backTerrain)
#Now done with the map, end this function...
def addToSurface(self,screen,pos):
#Draws the building's sprite to the provided screen at the given pos (x,y)
screen.blit(pygame.image.load(os.path.join("sprites",self.shortName+".png")),pos)
class testBuilding(building):
def __init__(self,entrance):
testBuildingMap=[[1,1,1],
[1,entrance,1]]
building.__init__(self,testBuildingMap,"B1") #B1.jpg will be the sprite
############################# screenChanger
class screenChanger:
def __init__(self,name,sprite,destination,startPos): #sprite will be the filename of the sprite, e.g. "stairUp"
self.name=name #the unique name of an individual ladder or stair or cave or whatever
self.sprite=sprite #sprite will be the filename of the sprite, e.g. "stairUp"
self.destination=destination
self.startPos=startPos
def addToSurface(self,screen,pos):
screen.blit(pygame.image.load(os.path.join("sprites",self.sprite+".png")),pos)
class monSeed:
def __init__(self,ticketDict,encounterRate):
self.ticketDict=ticketDict#maps mon instances to number of lottery tickets that mon gets
self.encounterRate=encounterRate #between 0 and 100
self.allMons=self.ticketDict.keys()
def getFight(self,encounterModifier):
tempEncounterRate=self.encounterRate+encounterModifier
if randint(0,100)<tempEncounterRate:
#if encounterRate=100, always picks a mon
#if encounterRate=0, never picks a mon
return self.pickMon()
else:
return False
def pickMon(self): #returns each mon with probability proportional to its share of the tickets
totalTickets=0
for mon in self.allMons:
totalTickets+=self.ticketDict[mon]
winner=randint(1,totalTickets)
curTicket=0
for mon in self.allMons:
curTicket+=self.ticketDict[mon]
if curTicket>=winner:
return mon
############# Actionables npcs, enemy trainers, cuttable trees, item balls, wandering punkemon, stuff you walk up to and press A basically
class actionable:
def __init__(self,pos,sprite,sightLine):
self.permPos=pos
self.tempPos=list(self.permPos)
self.sprite=sprite
self.sightLine=sightLine#the distance they can see
self.sightLineSquares=[] #this will get populated when the world inits
self.facingDict={"North":"_U","South":"_D","East":"_R","West":"_L","none":""}
def getSightLine(self,terrainMap): #returns a list of 2-item locations that are their sightline
sightLineSquares=[]
curPos=self.tempPos #[X,Y]
blocked=False
counter=0
while not blocked and counter<=self.sightLine:
counter+=1
if self.facingDirection=="North":
nextPos=[curPos[0],curPos[1]-1]
elif self.facingDirection=="South":
nextPos=[curPos[0],curPos[1]+1]
elif self.facingDirection=="East":
nextPos=[curPos[0]+1,curPos[1]]
elif self.facingDirection=="West":
nextPos=[curPos[0]-1,curPos[1]]
elif self.facingDirection=="none":
nextPos=[curPos[0],curPos[1]]
blocked=True
if nextPos[0]<0 or nextPos[1]<0 or nextPos[0]>len(terrainMap[0]) or nextPos[1]>len(terrainMap):
blocked=True
if not blocked:
newTerr=terrainMap[nextPos[1]][nextPos[0]]
if newTerr in (0,3,4,6):
sightLineSquares.append(nextPos)
curPos=nextPos
self.sightLineSquares=sightLineSquares
return sightLineSquares
def respond(self):
return #This function gets called when you walk up to an actionable and press A. Remember to overwrite it for every subclass
def addToSurface(self,screen,pos):
#Draws the actionable's sprite to the provided screen at the given pos (x,y)
screen.blit(pygame.image.load(os.path.join("sprites",self.sprite+self.facingDict[self.facingDirection]+".png")),pos)
class NPC(actionable): #randos who stand around and talk and don't fight
def __init__(self,name,pos,dialog,sprite,facingDirection="South",item=False,sightLine=0):
self.name=name
self.dialog=dialog #a dialog menu that ends with switching to World
self.sprite=pygame.image.load(os.path.join("sprites","npc.png")) #change this when you give npcs their own individual sprites
self.permPos=pos#2-element list of their position in the world [X,Y]
self.tempPos=list(self.permPos)
self.facingDirection=facingDirection
self.sightLine=sightLine
#do something about telling the world about the NPCs and characters in it
self.item=item #the item they give you, if they give you an item
actionable.__init__(self,pos,sprite,sightLine)
def respond(self):
return ("menu",self.dialog)
#bring up a menu with self.dialog
class NPCTrainer(actionable): #randos who stand around and DO fight
def __init__(self,pos,sprite,trainer,facingDirection="South",foughtDialog=["I did my best! I have no regrets!"]):
self.trainer=trainer
self.sprite=pygame.image.load(os.path.join("sprites","enemy.png")) #change this when you give trainers their own individual sprites
self.permPos=pos#2-element list of their position in the world
self.tempPos=list(self.permPos)
self.facingDirection=facingDirection
self.trainer.fought=False
self.beforeDialog=self.trainer.beforeDialog
self.afterDialog=self.trainer.afterDialog
self.foughtDialog=menu(foughtDialog,"dialog","self.screen.switchTo('world')","self.screen.switchTo('world')")
actionable.__init__(self,pos,sprite,5)
def getSightLine(self,terrainMap): #returns a list of 2-item locations that are their sightline
if self.trainer.fought:
return []
else:
return actionable.getSightLine(self,terrainMap)
def respond(self):
if not self.trainer.fought:
opponent=self.trainer
if self.trainer.beforeDialog:
return ("menu",self.beforeDialog)
else:
return ("battle",opponent)
else:
return ("menu",self.foughtDialog)
def activate(self,playerPos):
if self.facingDirection=="North":
while self.tempPos[1]!=playerPos[1]-1:
self.tempPos[1]+=1
elif self.facingDirection=="South":
while self.tempPos[1]!=playerPos[1]+1:
self.tempPos[1]-=1
elif self.facingDirection=="West":
while self.tempPos[0]!=playerPos[0]+1:
self.tempPos[0]-=1
elif self.facingDirection=="East":
while self.tempPos[0]!=playerPos[0]-1:
self.tempPos[0]+=1
return self.respond()
################ init central for worlds, buildings, entrances, and associate maps
#general policy: initialize all portals with false,
#then the buildings on the maps,
#then the mon seeds,
#then the maps themselves,
#then create the worlds containing the buildings,
#then init the portals again with their destinations set.
#Otherwise you get circular dependencies.
########## Entrances with FALSE
O1=screenChanger("O1","door",False,[1,1])
########## Buildings
B1=testBuilding(O1)
########## Maps
testMap=[ #obsolete one-char map design
["x","x","x","x","x","x"],
["x","G","G","G","G","x"],
["x"," "," "," "," ","x"],
["x"," "," "," "," ","x"],
["x","T","T","-","-","x"],
["x",B1," "," "," ","x"],
["x"," "," "," "," ","x"],
["x",O1," "," "," ","x"],
["x","x","x","x","x","x"]
]
blankMap=[ #obsolete one-char map design
["x","x","x","x","x","x"],
["x"," "," "," "," ","x"],
["x"," "," "," "," ","x"],
["x"," "," "," "," ","x"],
["x"," "," "," "," ","x"],
["x"," "," "," "," ","x"],
["x"," "," "," "," ","x"],
["x"," "," "," "," ","x"],
["x","x","x","x","x","x"]
]
doubleMap=[
[" G"," G"," G"," G"," G"," G"],
["xG"," G"," G"," G"," G","xG"],
["x-"," -"," -"," -"," -","x-"],
["x-"," -"," -"," -"," -","x-"],
["x-","T ","T "," -"," -","x-"],
["x "," "," "," "," ","x "],
["x "," "," "," "," ","x "],
["x "," "," "," "," ","x "],
["x ","x ","x ","x "," ","x "]
]
buildingMap=[
[" G"," G"," G"," G"," G"," G"],
["xG"," G"," G"," G"," G","xG"],
["x-"," -"," -"," -"," -","x-"],
["x-"," -"," -"," -"," -","x-"],
["x-","T ","T "," -"," -","x-"],
["x ",(B1," ")," "," "," ","x "],
["x "," "," "," "," ","x "],
["x "," "," "," "," ","x "],
["x ","x ","x ","x "," ","x "]
]
############Screen and high-level "running the game" stuff##############
class screen:
#runs at start of screen, conducts background setup before first loop
def __init__(self,xDim,yDim,curWorld):
pygame.init()
self.xDim=xDim #screen width in sprites, must be odd to keep player in center
self.yDim=yDim #screen height in sprites must be odd to keep player in center
if xDim%2==0:
print "xDim is even, make it odd."
if yDim%2==0:
print "yDim is even, make it odd."
self.screenSize=(pixel*xDim,pixel*yDim)
self.gameScreen=pygame.display.set_mode(self.screenSize,0,32)
self.backgroundColor=pygame.Color(255,255,255)
self.gameScreen.fill(self.backgroundColor)
self.gameSlice=pygame.Surface(self.screenSize)
self.clock=pygame.time.Clock()
self.fps=36
self.curMenu=Intro
self.activeMenus=[Intro]
self.playerIsSurfing=False
self.changeWorld=False
self.mode="newMenu"
self.curWorld=curWorld
self.playerPos=[1,1] #Initial position [x,y] of the player.
self.switchTo(self.mode)
self.player=Red
Red.facingDirection="South"
self.curBattle=False #battle(Red,Gary,self)
for menu in allMenus:
menu.screen=self
self.running=True
#The following needs to be the last line in __init__!
self.mainloop()
def switchTo(self,mode):
self.mode=mode
if mode=="menu":
print "vanilla menu mode"
self.processInput=self.menuInput
self.drawScreen=self.drawMenu
elif mode=="newMenu":
self.needNewSlice=True
self.processInput=self.newMenuInput
self.drawScreen=self.drawNewMenu
elif mode=="world":
self.activeMenus=[placeholderMenu]
self.processInput=self.worldInput
self.drawScreen=self.drawWorld
self.worldUpdate()#make sure there's a world slice for your world.
self.terrainDebugMode=False
self.drawWorld()
elif mode=="battle":
self.activeMenus=[placeholderMenu]
self.processInput=self.battleInput
self.drawScreen=self.drawBattle
else:
print "I don't know how to switch to "+mode+". You and your expectations."
def mainloop(self):
while self.running:
if self.mode=="menu":
#find out what the user has done
event=self.getInput()
#deal with it, updating gamestate accordingly
self.processInput(event)#this will be a different function depending on what's going on
#draw
self.drawScreen()#this will be a different function depending on what's going on
self.clock.tick(self.fps)
elif self.mode=="newMenu":
self.needNewSlice=False
event=self.getInput()
self.processInput(event)
self.drawScreen()
self.clock.tick(self.fps)
elif self.mode=="battle":
event=self.getInput() #if in battle mode, then self.getInput=self.getBattleInput
self.processInput(event)
self.drawScreen()
self.clock.tick(self.fps)
elif self.mode=="world":
event=self.getInput()
self.processInput(event)
self.worldUpdate()
if event:
self.drawScreen()
self.clock.tick(self.fps)
pygame.display.quit() #after you quit and running turns off, the while will exit and the display will quit
#find the first valid input and pass to input processor
#if no valid input, pass Null
def getInput(self):
goodKeys=[K_a, K_s, K_m, K_SPACE, K_UP, K_DOWN, K_RIGHT, K_LEFT]
#add > and < later for time warp and p for pause
events = pygame.event.get()
for event in events:
if event.type == QUIT:
self.running=False
#WE SHOT DOWN A WINDOW
#pygame.display.quit()#shh!
#wE shot down a WINdow
break
elif event.type==KEYDOWN:
if event.key in goodKeys:
return event
return False
#process the input
def menuInput(self,event):
if event is fakeAPress:
print "Processing a FAKE KEY!? That's unethical!"
if not event:
return #if the player has done nothing worth noting, do nothing.
else:
self.activeMenus[-1].processInput(event, self)
def newMenuInput(self,event):
if not event:
return #if the player has done nothing worth noting, do nothing.
elif event.key==K_SPACE and self.activeMenus[-1]==start:
self.activeMenus=[placeholderMenu]
self.switchTo("world")
else:
self.activeMenus[-1].processInput(event, self)
def battleInput(self,event):
if not event:
return
else:
self.curBattle.curAction(event)
def worldInput(self,event):
#You should probably move all the world functions to their own class like with Battle
if not event:
return #if the player has done nothing worth noting, do nothing.
elif event.key==K_a:
if self.player.facingDirection=="North":
actSquare=[self.playerPos[0],self.playerPos[1]-1]
if self.player.facingDirection=="South":
actSquare=[self.playerPos[0],self.playerPos[1]+1]
if self.player.facingDirection=="West":
actSquare=[self.playerPos[0]-1,self.playerPos[1]]
if self.player.facingDirection=="East":
actSquare=[self.playerPos[0]+1,self.playerPos[1]]
thingFound=self.curWorld.tempSpriteMap[actSquare[1]][actSquare[0]][0]
if isinstance(thingFound,actionable):
if thingFound.facingDirection!="none":
if self.player.facingDirection=="North":
thingFound.facingDirection=="South"
if self.player.facingDirection=="South":
thingFound.facingDirection=="North"
if self.player.facingDirection=="East":
thingFound.facingDirection=="West"
if self.player.facingDirection=="West":
thingFound.facingDirection=="East"
response=thingFound.respond()
self.processResponse(response)
elif event.key==K_SPACE:
self.activeMenus=[start]
self.switchTo("newMenu")
elif event.key==K_s:
return
elif event.key==K_m:#toggle terrain-debug mode
self.terrainDebugMode=not self.terrainDebugMode
else: #move the player around
if event.key==K_UP:
self.player.facingDirection="North"
tempPos=[self.playerPos[0],self.playerPos[1]-1]
elif event.key==K_DOWN:
self.player.facingDirection="South"
tempPos=[self.playerPos[0],self.playerPos[1]+1]
elif event.key==K_LEFT:
self.player.facingDirection="West"
tempPos=[self.playerPos[0]-1,self.playerPos[1]]
elif event.key==K_RIGHT:
self.player.facingDirection="East"
tempPos=[self.playerPos[0]+1,self.playerPos[1]]
self.playerPos=self.checkMove(tempPos) #checkMove should return tempPos if the attempted square is passable, playerPos otherwise
if self.newOpponent and self.player.hasUnfaintedMons():
self.newOpponent.heal()
wildTrainer=wildPunkemon("Wild "+self.newOpponent.name,[self.newOpponent])
self.newOpponent.trainer=wildTrainer
self.curBattle=battle(self.player,wildTrainer,self)
self.switchTo("battle")
def processResponse(self,response):
if response[0]=="menu":
self.activeMenus[-1]=response[1]
self.activeMenus[-1].curSlide=1
self.switchTo("newMenu")
elif response[0]=="battle":
if self.player.hasUnfaintedMons():
self.curBattle=battle(self.player,response[1],self)
self.switchTo("battle")
else:
self.switchTo("world")
def checkMove(self,tempPos):
#Returns the new position after an attempted move
#If player can move into attempted square, returns new position
#if player cannot move into attempted square, returns old position
#if player stepped on a screenChanger, return False to signal to worldUpdate() that it needs to handle a world change
tryX=tempPos[0]
tryY=tempPos[1]
self.changeWorld=False
self.newOpponent=False
if tryX>=len(self.curWorld.tempTerrainMap[0]) or tryX<0:
return self.playerPos
if tryY>=len(self.curWorld.tempTerrainMap) or tryY<0:
return self.playerPos
newTerrain=self.curWorld.tempTerrainMap[tryY][tryX] #gets terrain type of square you're trying to move to
#demons happen ##meta-comment: I have no idea to what the first comment on this line refers.
if newTerrain==0:
self.player.takeStep()
return tempPos #can always walk on walkable terrain
elif newTerrain==1:
return self.playerPos #can never walk on impassable terrain
elif newTerrain==2:
self.player.takeStep()
self.changeWorld=True
return tempPos
elif newTerrain==3:
self.player.takeStep()
self.newOpponent=self.curWorld.landMonSeed.getFight(self.player.encounterModifier) #returns mon instance or False
return tempPos #can move into square, encounters are possible
elif newTerrain==4: #new terrain is water
if self.playerIsSurfing:
self.player.takeStep()
self.newOpponent=self.curWorld.waterMonSeed.getFight(self.player.encounterModifier)
return tempPos #if surfing, they can step into water
else:
return self.playerPos #cannot into water and covered in fleas
elif newTerrain==5: #an actionable is here
return self.playerPos
elif newTerrain==6:
for item in self.curWorld.actionables:
if tempPos in item.sightLineSquares:
response=item.activate(tempPos)
self.processResponse(response)
return tempPos
def getWorldSlice(self,whichMap="draw"):#pads and crops the world map.
#[This world map has been modified from its original version. It has been formatted to fit your screen.]
#self.curWorld,self.xDim,self.yDim,self.playerPos
xDim=self.xDim
yDim=self.yDim
paddingChar=self.curWorld.paddingChar
if whichMap=="draw":
startDrawMap=safeCopy(self.curWorld.tempDrawMap)
elif whichMap=="sprite":
startDrawMap=safeCopy(self.curWorld.tempSpriteMap)
playerX=self.playerPos[0]#might need to change this
playerY=self.playerPos[1]#might need to change this
topRow=playerY-(self.yDim-1)/2
paddingRow=[paddingChar]*len(startDrawMap[0])
#pad the top
startDrawMap=-topRow*[paddingRow]+startDrawMap
#if we padded the top, adjust the topRow; otherwise leave it
if topRow<0:
topRow=0
bottomRow=topRow+yDim #NO -1 HERE
bottomPad=bottomRow-len(startDrawMap)
#pad the bottom
startDrawMap+=bottomPad*[paddingRow]
leftCol=playerX-(xDim-1)/2
for i in range(0,len(startDrawMap)):
startDrawMap[i]=-leftCol*[paddingChar]+startDrawMap[i]
#if we padded the left, adjust the leftCol; otherwise leave it
if leftCol<0:
leftCol=0
rightCol=xDim+leftCol
rightPad=rightCol-len(startDrawMap[0])
for i in range(0,len(startDrawMap)):
startDrawMap[i]+=rightPad*[paddingChar]
choppedMap=[]
for line in startDrawMap[topRow:bottomRow]:
choppedMap.append(line[leftCol:rightCol])
centerX=(xDim-1)/2
centerY=(yDim-1)/2
choppedMap[centerY][centerX]="@"+choppedMap[centerY][centerX][1]
return choppedMap
def worldUpdate(self):
if self.changeWorld:
#this means we need to change the curWorld
self.curWorld.resetWorld()
warpSquare=self.curWorld.tempSpriteMap[self.playerPos[1]][self.playerPos[0]][0] #warpSquare is the screenChanger instance you stepped on
self.playerPos=warpSquare.startPos
self.curWorld=warpSquare.destination
self.changeWorld=False
self.curWorld.updateMaps()#WRITE THIS FUNCTION
self.worldSlice=self.getWorldSlice()
self.spriteSlice=self.getWorldSlice("sprite")
def drawWorld(self,surface=False):
if not surface:
surface=self.gameScreen
surface.fill(self.backgroundColor)
drawPos=[0,0]
drawArray=self.spriteSlice
secondList=[]
for row in drawArray:
drawPos[0]=0
for cell in row:
if isinstance(cell,tuple):
secondList.append(tuple(drawPos)) #handle its sprite later, but do blit the background
surface.blit(worldBGSpriteDict[cell[1]],dest=[drawPos[0]*pixel, drawPos[1]*pixel])
elif not self.terrainDebugMode:
surface.blit(worldBGSpriteDict[cell[1]],dest=[drawPos[0]*pixel, drawPos[1]*pixel])
if cell[0]=="@":
surface.blit(worldFGSpriteDict[cell[0]+self.player.facingDict[self.player.facingDirection]],dest=[drawPos[0]*pixel, drawPos[1]*pixel])
else:
surface.blit(worldFGSpriteDict[cell[0]],dest=[drawPos[0]*pixel, drawPos[1]*pixel])
else:
player=(cell[0]=="@")
surface.blit(self.getTerrainTile(self.curWorld.getTerrain(cell),player),dest=[drawPos[0]*pixel, drawPos[1]*pixel])
drawPos[0]+=1
drawPos[1]+=1
for item in secondList:
sprite = self.spriteSlice[item[1]][item[0]][0]
drawPos=[item[0]*pixel, item[1]*pixel]
if isinstance(sprite,(building,screenChanger,actionable)):
sprite.addToSurface(surface,drawPos)
else:
print "not adding", sprite
if surface==self.gameScreen:
pygame.display.flip() #otherwise we're drawing to a stored image
else:
return surface
def getTerrainTile(self,terrNum,player=False):
canvas=pygame.Surface((pixel,pixel),0,32)
colorDict={0:(200,200,200),1:(0,0,0),2:(255,0,0),3:(0,255,0),4:(0,0,255)}
canvas.fill(colorDict[terrNum])
if player:
pygame.draw.circle(canvas, (255,165,0), (pixel/2,pixel/2), int(pixel*.4), 0)
return canvas
def drawMenu(self):
self.gameScreen.fill(self.backgroundColor)
for menu in self.activeMenus:
if menu.oplistConstructor:
menu.tempOplist=eval(menu.oplistConstructor)
drawPos=[0,0]
drawArray=menu.getArray()
for row in drawArray:
drawPos[0]=0
for cell in row:
self.gameScreen.blit(menuSpriteDict[cell],dest=[drawPos[0]*pixel, drawPos[1]*pixel])
drawPos[0]+=1
drawPos[1]+=1
pygame.display.flip()
def drawNewMenu(self):
#if the screen has changed, update the background image
if self.needNewSlice:
self.gameSlice=self.drawWorld(self.gameSlice)
#draw the background image
self.gameScreen.blit(self.gameSlice,dest=(0,0))
#DRAW ALL THE MENUS
for menu in self.activeMenus:
if menu==self.activeMenus[-1]:
top=True
else:
top=False
if menu.oplistConstructor:
menu.tempOplist=eval(menu.oplistConstructor)
drawPos=[0,0]
drawArray=menu.getArray(top)
for row in drawArray:
drawPos[0]=0
for cell in row:
self.gameScreen.blit(menuSpriteDict[cell],dest=[drawPos[0]*pixel, drawPos[1]*pixel])
drawPos[0]+=1
drawPos[1]+=1
pygame.display.flip()
def drawBattle(self):
self.drawBattleMenu()
self.drawBattleGraphics()
pygame.display.flip()
def drawBattleMenu(self): #modify this to deal with null messages?
self.gameScreen.fill(self.backgroundColor)
drawPos=[0,0]
drawArray=self.curBattle.getArray()
for row in drawArray:
drawPos[0]=0
for cell in row:
self.gameScreen.blit(menuSpriteDict[cell],dest=[drawPos[0]*pixel, drawPos[1]*pixel])
drawPos[0]+=1
drawPos[1]+=1
def drawBattleGraphics(self):
#health bars
if not self.player.curMon:
playerHealthFraction=0.0
else:
playerHealthFraction=(self.player.curMon.tempStats["HP"]+0.0)/self.player.curMon.permStats["HP"]
if not self.curBattle.enemy.curMon:
enemyHealthFraction=0.0
else:
enemyHealthFraction=(self.curBattle.enemy.curMon.tempStats["HP"]+0.0)/self.curBattle.enemy.curMon.permStats["HP"]
playerHealthBarPos=(15*pixel,15*pixel)
enemyHealthBarPos=(2*pixel,8*pixel)
healthBarWidth=9
self.gameScreen.blit(self.getHealthBar(healthBarWidth,1,playerHealthFraction),playerHealthBarPos)
self.gameScreen.blit(self.getHealthBar(healthBarWidth,1,enemyHealthFraction),enemyHealthBarPos)
#xp bar
mon=self.player.curMon
if mon:
XPSinceLast=mon.XP-mon.getXP()#XP earned since last levelup
XPLastToNext=mon.getXP(mon.level+1)-mon.getXP()#XP between last levelup and next levelup
XPFraction=(XPSinceLast+0.0)/(XPLastToNext)
XPBarBlue=pygame.Rect(playerHealthBarPos[0],playerHealthBarPos[1]+pixel,healthBarWidth*pixel*XPFraction,5)
XPBarGray=pygame.Rect(playerHealthBarPos[0]+healthBarWidth*pixel*XPFraction,playerHealthBarPos[1]+pixel,healthBarWidth*pixel*(1-XPFraction),5)
pygame.draw.rect(self.gameScreen,(0,50,200),XPBarBlue)
pygame.draw.rect(self.gameScreen,(150,150,150),XPBarGray)
#mon names
if self.player.curMon:
playerMonNameSurface=self.getTextSurface(self.player.curMon.name)
self.gameScreen.blit(playerMonNameSurface,((screenWidth-len(self.player.curMon.name)-1)*pixel,playerHealthBarPos[1]-2*pixel))
if self.curBattle.enemy.curMon:
enemyMonNameSurface=self.getTextSurface(self.curBattle.enemy.curMon.name)
self.gameScreen.blit(enemyMonNameSurface,(enemyHealthBarPos[0],enemyHealthBarPos[1]-2*pixel))
#total mons and mons remaining
playerMonsUnfainted=[]
enemyMonsUnfainted=[]
for mon in self.curBattle.player.team:
if mon.tempStats["HP"]==mon.permStats["HP"]:
playerMonsUnfainted.append(2)
elif mon.tempStats["HP"]>0:
playerMonsUnfainted.append(1)
else:
playerMonsUnfainted.append(0)
for mon in self.curBattle.enemy.team:
if mon.tempStats["HP"]==mon.permStats["HP"]:
enemyMonsUnfainted.append(2)
elif mon.tempStats["HP"]>0:
enemyMonsUnfainted.append(1)
else:
enemyMonsUnfainted.append(0)
for i in range(0,len(playerMonsUnfainted)):
if playerMonsUnfainted[i]==2: #undamaged mon, blit regular punkeball
ballSprite=pokeball_team
elif playerMonsUnfainted[i]: #damaged mon, blit gray punkeball
ballSprite=pokeball_injured
else: #fainted mon, blit darkened punkeball
ballSprite=pokeball_faint
self.gameScreen.blit(ballSprite,(playerHealthBarPos[0]+i*1.5*pixel,playerHealthBarPos[1]-pixel))
for i in range(0,len(enemyMonsUnfainted)):
if enemyMonsUnfainted[i]==2: #unfainted mon, blit regular punkeball
ballSprite=pokeball_team
elif enemyMonsUnfainted[i]:
ballSprite=pokeball_injured
else: #fainted mon, blit darkened punkeball
ballSprite=pokeball_faint
self.gameScreen.blit(ballSprite,(enemyHealthBarPos[0]+i*1.5*pixel,enemyHealthBarPos[1]-pixel))
#status markers
markerWidth=35
playerMonStatusColors=[]
enemyMonStatusColors=[]
playerMon=self.curBattle.player.curMon
enemyMon=self.curBattle.enemy.curMon
for status in statusFlags: #list of potentially displayed status ailments
if playerMon and playerMon.status[status]:
playerMonStatusColors.append(statusFlags[status])
if enemyMon and enemyMon.status[status]:
enemyMonStatusColors.append(statusFlags[status])
for i in range(0,len(playerMonStatusColors)):
flag=playerMonStatusColors[i]
position=(playerHealthBarPos[0]-markerWidth*(i+1),playerHealthBarPos[1])
self.gameScreen.blit(flag,position)
for i in range(0,len(enemyMonStatusColors)):
flag=enemyMonStatusColors[i]
position=(enemyHealthBarPos[0]+pixel*healthBarWidth+markerWidth*i,enemyHealthBarPos[1])
self.gameScreen.blit(flag,position)
def getHealthBar(self,cellsLong,cellsHigh,value):
#takes a length and height in cells and a floating point (0,1] fraction-of-health-left
#returns a surface for blitting
healthBarSurface=pygame.Surface((pixel*cellsLong,pixel*cellsHigh),0,32)
barLength=(cellsLong-2*cellsHigh)*pixel+2*cellsHigh*pixel*11/15
posBarLength=ceil(barLength*value) #length in (screen, not game) pixels that is full
negBarLength=barLength-posBarLength
if value>=0.5:
color=(78,171,24)
elif value>=0.2:
color=(244,232,61)
else:
color=(227,85,14)
white=(255,255,255)
offsetX=4
offsetY=4
#draw the positive bar
healthBarSurface.fill(color,Rect(offsetX*cellsHigh,offsetY*cellsHigh,posBarLength,6*cellsHigh))
#draw the negative bar
healthBarSurface.fill(white,Rect(offsetX*cellsHigh+posBarLength,offsetY*cellsHigh,negBarLength,6*cellsHigh))
#draw the left end
scaledBarLeft=pygame.transform.smoothscale(healthBarLeft,(pixel*cellsHigh,pixel*cellsHigh))
healthBarSurface.blit(scaledBarLeft,(0,0))
#draw the right end
scaledBarRight=pygame.transform.smoothscale(healthBarRight,(pixel*cellsHigh,pixel*cellsHigh))
healthBarSurface.blit(scaledBarRight,(pixel*(cellsLong-cellsHigh),0))
#draw the middle
scaledBarMiddle=pygame.transform.smoothscale(healthBarMiddle,(pixel*(cellsLong-2*cellsHigh),pixel*cellsHigh))
healthBarSurface.blit(scaledBarMiddle,(pixel*cellsHigh,0))
return healthBarSurface
def getTextSurface(self,string):
textSurface=pygame.Surface((pixel*len(string),pixel),0,32)
for i in range(0,len(string)):
charImage=menuSpriteDict[string[i]]
textSurface.blit(charImage,(pixel*i,0))
return textSurface
#################Generating individual things
###### Global variables (semi-permanent)
IDnum=0 #increment this when a new punkemon is generated
numMoves=4
pixel=15 #side length of sprite grid unit in pixels
screenWidth=25
screenHeight=21
encourageList=["It's not over!","Get 'em!","I choose you!","You can do it!"]
placeholderSquirtle=Squirtle(8,"Squirtle")
Red=PC("Red","female",[placeholderSquirtle],20) # Squirtle is a placeholder. You needn't start with Squirtle if you don't want to. *coughbutyoushouldcough*
pokeball_team=pygame.image.load(os.path.join("sprites","pokeball_team.png"))
pokeball_injured=pygame.image.load(os.path.join("sprites","pokeball_injured.png"))
pokeball_faint=pygame.image.load(os.path.join("sprites","pokeball_faint.png"))
poisonFlag=pygame.image.load(os.path.join("sprites","flagPsn.png"))
burnedFlag=pygame.image.load(os.path.join("sprites","flagBrn.png"))
frozenFlag=pygame.image.load(os.path.join("sprites","flagFrz.png"))
sleepFlag=pygame.image.load(os.path.join("sprites","flagSlp.png"))
paralyzedFlag=pygame.image.load(os.path.join("sprites","flagPar.png"))
confusedFlag=pygame.image.load(os.path.join("sprites","flagCon.png"))
statusFlags={"poisoned":poisonFlag,"burned":burnedFlag,"frozen":frozenFlag,
"sleep":sleepFlag,"paralyzed":paralyzedFlag,"confused":confusedFlag}
rivalName="Should Not Display"
###### Menu instances (self,oplist,mode,execOnA,execOnS,rollable=False,oplistConstructor=False,screen=False) sorted by world or speaker
placeholderMenu=menu(["You should never see this."],"dialog","self.screen.switchTo('world')","self.screen.switchTo('world')")
########### Typha menus
falseChoice=menu(["Boy","Girl"],"choice","Red.gender=self.oplist[self.curPos-1]\nself.replaceMenu(boy)","pass")
nickChoice=menu(["Choose a nickname:","ASSHAT","ASSFACE","BUTTHAT","BUTTFACE","FACEHAT","ASSBUTT",'"GARY"'],"titledChoice","garyActionable.trainer.name=self.oplist[self.curPos-1]\ngaryBefore.oplist[0]=garyActionable.trainer.name+garyBefore.oplist[0]\nself.replaceMenu(menuDict[self.oplist[self.curPos-1]])","pass")
starterMonChoice=menu(["Bulbasaur","Charmander","Squirtle"],"choice","self.pickStarter(self.oplist[self.curPos-1])","pass")
noDice=menu(["Since it seems I can't talk either of you two out of it~","Your adventure in the world of PUNKEMON fighting starts NOW. Grab a mon and get going!"],"dialog","self.replaceMenu(starterMonChoice)","pass")
doItAnyway=menu(["You can't scare me.","I'll be the best!"],"choice","self.replaceMenu(noDice)","pass")
talkOut=menu(["I'll tell you what I told him:\nThe fighting circuit ain't no nursery school.","You've got a better chance of ending up in jail or a body bag than as a PUNKEMON CHAMPION."],"dialog","self.replaceMenu(doItAnyway)","pass")
Intro=menu(["Yo!\nWelcome to the world of Punkemon~","My name is TYPHA.\nPeople in this hood, they call me the PUNKEMON PROFESSA.",
"There are creatures called PUNKEMON all up in dis world.","Some people think PUNKEMON are monsters.\nAin't totally wrong~","Some people keep 'em as pets.\nOthers use them in fights.",
"Me, I used to do that.\nNow I'm goin' straight.","I'm gonna study PUNKEMON as a profession.\nLab coat and everything.","When you're hiding behind that computer, it's hard to tell who you are.",
"Are you a boy, or a girl?"],"dialog","self.replaceMenu(falseChoice)","pass")
boy=menu(["You remember my little bro.\nYou've been at each other's throats ever since you were kids.","What was your charming nickname for him again?"],"dialog","self.replaceMenu(nickChoice)","pass")
girl=boy #code as political statement, or lazy programmer? #bothisgood
#The above line is dead code, but I haven't deleted it because I want to keep the joke.
asshat=menu(['Oh, yeah. "Asshat."Ha! You have such a way with words~'],"dialog","self.replaceMenu(talkOut)","pass")
assface=menu(['Oh, yeah. "Assface."Ha! You have such a way with words~'],"dialog","self.replaceMenu(talkOut)","pass")
butthat=menu(['Oh, yeah. "Butthat." Ha! You have such a way with words~'],"dialog","self.replaceMenu(talkOut)","pass")
buttface=menu(['Oh, yeah. "Buttface." Ha! You have such a way with words~'],"dialog","self.replaceMenu(talkOut)","pass")
facehat=menu(['Oh, yeah. "Facehat." Ha! You have such a way with words~'],"dialog","self.replaceMenu(talkOut)","pass")
assbutt=menu(['Oh, yeah. "Assbutt." Ha! You have such a way with words~'],"dialog","self.replaceMenu(talkOut)","pass")
GaryNickname=menu(['Oh, yeah. "Gary". Ha! You have such a way with words~'],"dialog","self.replaceMenu(talkOut)","pass")
########### Start menu and its descendents
start=menu(["Punkemon","Wikidex","Items","longtest","longtesttitled","Stats","Save"],"choice","self.addToMenuStack(menuDict[self.oplist[self.curPos-1]])","self.screen.switchTo('world')",True)
startPunkemon=menu(False,"choice","pass","self.backUpMenuStack()",True,"list(Red.teamAsString())")
startWikidex=menu(False,"dialog","pass","self.backUpMenuStack()",True,"Red.wikidexAsList()")
startItems=menu(False,"choice","self.selectItemOutsideBattle()","self.backUpMenuStack()",True,"start.displayItemsList()")
itemChooseMon=menu(False,"choice","self.itemOutsideBattle(self.curPos-1)","self.backUpMenuStack()",True,"Red.teamAsString()")
longtest=menu(["1","2","3","4","5","6","7","8","9","10","11","12","13","14","15","16","17","18","19","20","21","22","23","24","25","26","27","28","29"],"choice","self.backUpMenuStack()","self.backUpMenuStack()")
longtesttitled=menu(["Title","1","2","3","4","5","6","7","8","9","10","11","12","13","14","15","16","17","18","19","20","21","22","23","24","25","26","27","28","29"],"titledChoice","self.backUpMenuStack()","self.backUpMenuStack()")
startStats=menu(["Stats not implemented yet"],"dialog","self.backUpMenuStack()","self.backUpMenuStack()")
saveGame=menu(["all is vanity\nand the pursuit\n of the wind\nyou cannot save yet"],"dialog","self.backUpMenuStack()","self.backUpMenuStack()")
########### Menus from the inescapableHellscape test world
despairSign=menu(["There is no escape from the inescapable hellscape.","Not for you~\n ~not for him."],"dialog","self.screen.switchTo('world')","self.screen.switchTo('world')")
garyBefore=menu([": Hey! How did you get here?"],"dialog","self.screen.processResponse(('battle',Gary))","self.screen.processResponse(('battle',Gary))",False)
garyAfter=menu(["Gary: Aww, man!"],"dialog","self.screen.switchTo('world')","self.screen.switchTo('world')")
menuDict={"Boy": boy,"Girl":girl,"FalseChoice":falseChoice,
"nickChoice":nickChoice,"ASSHAT":asshat,"ASSFACE":assface,"BUTTHAT":butthat,"BUTTFACE":buttface,"FACEHAT":facehat,"ASSBUTT":assbutt,'"GARY"':GaryNickname,
"talkOut":talkOut,"doItAnyway":doItAnyway,"noDice":noDice, "You can't scare me.":noDice,"I'm gonna be the best!":noDice,
"Punkemon":startPunkemon,"Wikidex":startWikidex,"Items":startItems,"longtest":longtest,"longtesttitled":longtesttitled,"Stats":startStats,"Save":saveGame}
###check double type later
######Pokemon instance creation
##Initialize all pokemon with: level, name (optional), trainer (optional)
starterBulbasaur=Bulbasaur(8,"Bulbasaur")
betaBulbasaur=Bulbasaur(20,"Ivysaur")
powerBulbasaur=Bulbasaur(50,"Venusaur")
starterCharmander=Charmander(8,"Charmander")
betaCharmander=Charmander(20,"Charmeleon")
powerCharmander=Charmander(50,"Charizard")
starterSquirtle=Squirtle(8,"Squirtle")
betaSquirtle=Squirtle(20,"Wortortle")
powerSquirtle=Squirtle(50,"Blastoise")
derpy=Derp(30,"derpy")
Rattata6=Rattata(6,"Rattata")
Pidgey6=Pidgey(6,"Pidgey")
hovis=Hovisquirrel(6,"Hovisquirrel")
hypnotoad=Hypnotoad(6,"Hypnotoad")
########## Mon seeds
Rattata5=Rattata(5,"Rattata")
Pidgey5=Pidgey(5,"Pidgey")
basicRouteSeed=monSeed({Pidgey5:1,Rattata5:1},10)
allRattataSeed=monSeed({Rattata:1},10) #fuck pidgeys, I'm trying to debug here
starterSeed=monSeed({starterBulbasaur:1,starterCharmander:1,starterSquirtle:1},20)
########## Worlds
#inescapableHellscape=world(False,testMap,6,9,basicRouteSeed,False)
emptyHellscape=world(False,blankMap,6,9)
doubleHellscape=world(False,doubleMap,6,9,basicRouteSeed,False," w")
inescapableHellscape=world(False,buildingMap,6,9,starterSeed,False," w") #change back to basicRouteSeed later
########## Entrances with INSIDES
O1.destination=inescapableHellscape
######Hard sets of things that should be dynamically generated (Yeah testing!)
Red.inventory["Potion"]=5
Red.inventory["Super potion"]=5
Red.inventory["Repel"]=1
Red.inventory["Revive"]=4
Red.inventory["Punkeball"]=5
Red.inventory["Water stone"]=1
Red.inventory["Fire stone"]=1
Gary=character([starterBulbasaur],"Gary","wait for it",100,garyBefore,garyAfter,"normal")
garyActionable=NPCTrainer([4,0],"red",Gary,"West")
signActionable=NPC("sign",[0,0],despairSign,"sign","none")
inescapableHellscape.actionables.append(garyActionable)
inescapableHellscape.actionables.append(signActionable)
game=screen(screenWidth,screenHeight,inescapableHellscape) #START
#############List of Abominations Unto Nuggan
#Squirtles that think they're Charmanders
#Charmanders that know electric moves
#Everything is named Bulbasaur
#The number of times my computer crashed while I was coding this
#Rattatas attacking themselves
#bool("False")=True
#circular dependencies, because they involve circular dependence
#up and down arrows being interpreted as weird non-ASCII characters
#trying to navigate the battle menus based on the first letter of each of the first two options
#C h a r m a n d e r r a n a w a y !
#Charmander learning Splash...twice.
#eternal rival battle
#Two Garys. That's 100% more Garys than we had yesterday and 100% more Garys than we want. (And none of them is Garys Vakarian.)
#healh bar aspect ratios of 12:1 or 12:5
| 66,337 | 45 | 3,553 |
b1aa1068d9ac2711832dc5aa7aa89185bdf31d98 | 15,321 | py | Python | src/common_utils_data/regex_functions.py | Mi524/common_utils_data | 400baf3be1bc96a06bd1f3d70abaf8dd749cfb85 | [
"MIT"
] | null | null | null | src/common_utils_data/regex_functions.py | Mi524/common_utils_data | 400baf3be1bc96a06bd1f3d70abaf8dd749cfb85 | [
"MIT"
] | null | null | null | src/common_utils_data/regex_functions.py | Mi524/common_utils_data | 400baf3be1bc96a06bd1f3d70abaf8dd749cfb85 | [
"MIT"
] | null | null | null | import re
from collections import defaultdict
from string import punctuation
import string
from flashtext import KeywordProcessor
# keyword_processor = KeywordProcessor()
# for i in ['tests','testss','test','5G is(not','ok','100%','不可能吧','优势','在哪里','哪里']:
# keyword_processor.add_keyword(i)
# print(keyword_processor.get_all_keywords())
# text = 'tests 100% are do.ne testss/5G is(not ok'
# # text = '你觉得5G或者优势在哪里'
# kw_found = keyword_processor.extract_keywords(text)
# print(kw_found)
# exit()
#数字模糊匹配函数,数字连着命中才算符合条件,即128对应的必须是128才算命中一个,命中12不算
def strB2Q(ustring):
"""把字符串半角转全角"""
ss = []
for s in ustring:
rstring = ""
for uchar in s:
inside_code = ord(uchar)
# 全角空格直接转换
if inside_code == 32:
inside_code = 12288
# 全角字符(除空格)根据关系转化
elif (inside_code >= 33 and inside_code <= 126):
inside_code += 65248
rstring += chr(inside_code)
ss.append(rstring)
#顿号要转成逗号
return ''.join(ss)
def strQ2B(ustring):
"""把字符串全角转半角"""
ss = []
for s in ustring:
rstring = ""
for uchar in s:
inside_code = ord(uchar)
if inside_code == 12288: # 全角空格直接转换
inside_code = 32
elif (inside_code >= 65281 and inside_code <= 65374): # 全角字符(除空格)根据关系转化
inside_code -= 65248
rstring += chr(inside_code)
ss.append(rstring)
return ''.join(ss)
def strQ2B(ustring):
"""把字符串全角转半角"""
halfwidth_symbol = '!\"\"#$%&\'\'()*+,-./:;<=>?@[]_{|}~ '
fullwidth_symbol = '!“”#$%&‘’()*+,-。/:;《=》?@【】_{|}~ '
translator = str.maketrans(fullwidth_symbol, halfwidth_symbol)
ustring = ustring.translate(translator)
return ustring
def replace_multi_symbol(string, symbol):
"""把多个符号替换成单个,比如多个换行符 替换成 一个换行符,replace('\n\n','\n')并不能解决问题"""
symbol_double = symbol + symbol
while symbol_double in string:
string = string.replace(symbol_double,symbol)
return string
def search_en(combine_string):
"""提取出中英文混合字符串中的英文部分"""
non_cn_pat = "[^\u4e00-\u9fa5]+"
en_pat = ".*(\w)+.*"
found_all = re.findall(non_cn_pat,combine_string)
en_found_all = []
if found_all : #定位有英文的部分
en_found_all = [re.search(en_pat,x).group() for x in found_all if re.search(en_pat,x) != None]
if en_found_all :
return en_found_all[0]
return None
def partial_match_pct(short_str,long_str,special_syn=['pro','plus','max','youth']):
"""
short_str : original string
long_str : standard target string
匹配机型,不要放入两个一样的字符串,获取短字符串在长字符串中是否存在,并且占了多少个字符,
从开头开始匹配,不替换空格不用in的方式查找,带有括号的机型匹配优先级最高,
通常Y15S和Y15(4+64G)后者更容易被缩写成Y15"""
#20200107 :检查长字符串是否包含有特殊字符
#暂时不懂怎么把IQOO 3 4G 优先匹配到 IQOO 3
default_result = (0,long_str)
if type(short_str) != str:
return default_result
short_str = short_str.strip().lower().replace('\n','').replace(' ',' ')
new_long_str = long_str.strip().lower().replace('\n','').replace(' ',' ')
#去掉换行符和多空格之后相等的话 直接返回长字符串
if short_str == new_long_str:
return (99,long_str)
# #防止放入同一字符串
# if short_str == new_long_str:
# return default_result
#括号和空格都要分割处理
if '(' in new_long_str or '(' in new_long_str :
new_long_str = new_long_str.replace('(','(').split('(')[0]
# elif ' ' in new_long_str :
# new_long_str = new_long_str.split()[0]
""" 匹配可能包含错误拼写,包括漏写,多写,错写的机型名, 机型名一般有NEX, S1, X3S, V1Max, V15Pro,
允许的错别字条件是:不允许数字写错,不允许前面的字母写错,当字母大于等于3个时,允许漏写或者错写,多写2个字母,
比如pro写成pr ,prou, max写成ma, V15Pro 写成 V15P(如果有V15P应该在之前就可以关联上,所以排除他是V15P的可能,
更大可能是想写V15Pro)"""
#从头开始匹配的时候,如果完整的short_str是准确拼写的,正常返回,如果有错别字,采用else以下的匹配方式
match_short_str = ''
#前两/三位字母相同,方便后面elif写条件
first2_letter_equal = short_str[:2] == new_long_str[:3].split(' ')[0][:2]
first3_letter_equal = short_str[:3] == new_long_str[:3].split(' ')[0][:3]
#前3位字符串包含2个数字的情况 必须满足前3个字母相同,
first3_letter_2num_short = re.search('[0-9]{2,}',short_str[:3]) != None
first3_letter_2num_long = re.search('[0-9]{2,}',new_long_str[:3]) != None
first3_letter_less2num_short = not first3_letter_2num_short
first3_letter_less2num_long = not first3_letter_2num_long
#一个字符直接返回0,两个字符,去掉空格之后的前两个字符要完全相等,并且要确保长字符串里没有special_syn的字符
if len(short_str) == 2 :
if not check_syn_str(new_long_str,special_syn):
new_long_str = new_long_str.split(' ')[0].split('(')[0]
if short_str == new_long_str:
return (2/len(long_str),long_str)
else:
return default_result
else: #如果长字符串含有pro等字符 直接判断不匹配
return default_result
#至少出现3个字符,并且前两个字母(如果前两位是字母+数字,后面不再有数字),如果前3位包含2个数字,前3个字符要相同,规避掉V 15这种空格的情况
elif (len(short_str) >= 3 and first2_letter_equal) and \
(
(first3_letter_2num_short and first3_letter_2num_long and first3_letter_equal) \
or (first3_letter_less2num_short and first3_letter_less2num_long and first2_letter_equal)
):
for i in range(2,len(short_str)+1) :
if short_str[:i].replace(' ','') in new_long_str.replace(' ',''):
match_short_str = short_str[:i]
continue
#优先计算匹配率大的字符串,并且为了实现区分V11i 优先匹配到V11而不是V11Pro的情况,而外加一个长字符串的比率
#--计算结果相同(0.75, 'V11Pro') (0.75, 'V11'),后面的sort比较难实现long_str的顺序排列
if ' ' in long_str or '(' in long_str:
long_str_bias = len(match_short_str)/len(long_str.split(' ')[0].split('(')[0])/100 #比例需要调小
#如果去掉空格和( 符号之后的long_str_bias仍然相等,就将原来的标准机型全部去掉这些字符,对比整体全部的匹配度,做一个bias
# long_str_bias += len(match_sequence_str(short_str,long_string)) / (len(short_str) + len(long_string)) / 1000 #比例更小
else: #没有出现空格和( 的 不带pro的应该优先
long_str_bias = len(match_short_str)/len(long_str)/100 + 0.00001
#确保短字符和长字符同时有或者同时没有special_syn_str
if check_syn_str(short_str,special_syn) == check_syn_str(new_long_str,special_syn):
return (len(match_short_str)/len(short_str) + long_str_bias ,long_str)
return default_result
def re_sub_replace(pattern,string,repace_symbols=('-','_'),group_name=1):
"""
当re.sub(pattern,repl,string)内置的repl = "g<1>" 不能满足替换需求的时候,
比如需要将group目标内的文字中的某个符号替换掉, 使用的时候要注意替换代码内的sub replace符号
:param pattern : re pattern
:param string : original string
"""
new_string = re.sub(pattern,replace_match,string,repace_symbols=('-','_'))
return new_string
def re_findall_replace(pattern,string,replace_symbols=('-','_')):
"""
当re.sub(pattern,repl,string)内置的repl = "g<1>" 不能满足替换需求的时候,
比如需要将group目标内的文字中的某个符号替换掉,并且需要match多个group目标
"""
matched_list = re.findall(pattern,string)
new_string = string
for mat in matched_list:
new_string = new_string.replace(mat,mat.replace(replace_symbols[0],replace_symbols[1]))
return new_string
def re_findall_sub(pattern,string,replace_symbols=('-','_')):
"""
当re.sub(pattern,repl,string)内置的repl = "g<1>" 不能满足替换需求的时候,
比如
"""
matched_list = re.findall(pattern,string)
new_string = string
for mat in matched_list:
new_string = new_string.replace(mat,mat.replace(replace_symbols[0],replace_symbols[1]))
return new_string
def split_wrong_combine(pattern,sub_pattern,string):
"""
需要处理爬虫换行符的问题, 抓取的时候把换行符去掉了,导致单词连接成错误的形式
比如 axxBx, AxxxCONs, usagePROgreat, slow3.Great sunglass
(注意 3app , 3PlayStore只能算是拼写错误,不需要拆分, LED3 拆不了,陷入无限循环)
需要将两个错误合并的单词用换行符替换拆分,
:param pattern : the original pattern that we want to find out
:param sub_pattern : sub pattern to extract from pattern,
will be replaced with original + '\n' or '\n' + original
:param string : target string
e.g
pattern = '[A-Z]?[a-z]+[A-Z0-9]+[a-zA-Z]*'
sub_pattern = '[A-Z0-9]+[a-z]*'
"""
#记录需要修改的部分
new_string = string
new_string_dict = defaultdict(str)
matched_list = re.findall(pattern,new_string)
if matched_list :
for mat in matched_list:
match_sub = re.search(sub_pattern,mat)
#需要确保sub_pattern 能匹配出 pattern 匹配到的内容的部分目标
if match_sub != None:
match_sub = match_sub.group()
replace_match = mat.replace(match_sub,'\n' + match_sub)
#如果替换的是第一个单词,需要特殊处理。不要替换前面的符号为换行符,而是需要保持原来单词自带的 “空”
#并且换行符 应该是加在第一个单词后面
if [ x for x in replace_match].index('\n') == 0:
replace_match = replace_match.replace('\n','')
replace_match = mat.replace(match_sub, match_sub + '\n')
replace_matched = split_wrong_combine(pattern,sub_pattern,replace_match)
new_string_dict[mat] = replace_matched
else:
return new_string
for k,v in new_string_dict.items():
new_string = new_string.replace(k,v)
return new_string
#++++++++++++++++++以下废弃函数++++++++++++++++++++++++
# def standardize_country_by_cn_similarty(short_str, standard_str_list):
# #处理中文国家缩写和完整国家名称无法匹配到的情况
# standard_str_list = list(set([str(x).strip() for x in standard_str_list]))
# standard_str_list = sorted(standard_str_list, key=len, reverse= False)
# #通过前面字符串匹配 马来 -- > 马来西亚
# standard_similarity_list = [ (s,1) if short_str in s else (s,0) for s in standard_str_list ]
# if standard_similarity_list[0][1] > 0 :
# return standard_similarity_list
# else:
# standard_similarity_list = [ ]
# for ss in standard_str_list:
# short_match_counter = 0
# for ss_each_letter in ss:
# for s in short_str:
# if s == ss_each_letter:
# short_match_counter += 1
# #至少能匹配上两个字
# str_similarity = short_match_counter / len(short_str) + short_match_counter / min([len(short_str),len(ss)])
# if short_match_counter >= 2 :
# standard_similarity_list.append([ss,str_similarity])
# else:
# standard_similarity_list.append([ss, 0 ])
# standard_similarity_list = sorted(standard_similarity_list, key=lambda x:x[1],reverse=True)
# return standard_similarity_list
| 37.186893 | 142 | 0.621826 | import re
from collections import defaultdict
from string import punctuation
import string
from flashtext import KeywordProcessor
# keyword_processor = KeywordProcessor()
# for i in ['tests','testss','test','5G is(not','ok','100%','不可能吧','优势','在哪里','哪里']:
# keyword_processor.add_keyword(i)
# print(keyword_processor.get_all_keywords())
# text = 'tests 100% are do.ne testss/5G is(not ok'
# # text = '你觉得5G或者优势在哪里'
# kw_found = keyword_processor.extract_keywords(text)
# print(kw_found)
# exit()
def get_keyword_pat(keyword_list):
keyword_list = sorted(set(keyword_list), key=len, reverse=True)
keyword_pat = u'('+ '|'.join(keyword_list) + ')'
return keyword_pat
#数字模糊匹配函数,数字连着命中才算符合条件,即128对应的必须是128才算命中一个,命中12不算
def number_similarity(a,b, common_pattern):
#该函数需要传入两者同时共存的内存模式,如果不填就简单地判断两个列表存在相同的匹配到的数字即可(y3 16G+128)可能会匹配到(3+16G)
if common_pattern != '':
a_match = re.search(common_pattern, a, flags=re.I)
if a_match == None:
return [ ]
else:
b_match = re.search(common_pattern, b, flags=re.I)
#b是标准结果
number_list_a = re.findall('\d+',a_match.group())
number_list_b = re.findall('\d+',b_match.group())
if len(number_list_a) > 0 :
intersection = set(number_list_a) & set(number_list_b)
if intersection and set(number_list_b) == intersection:
return number_list_b
return [ ]
def check_syn_str_regex(string_a, string_b,special_syn_list):
string_a = string_a.lower()
string_b = string_b.lower()
match_result_a = [ re.search(x, string_a, flags=re.I).group() for x in special_syn_list if re.search(x, string_a, flags=re.I) != None ]
match_result_b = [ re.search(x, string_b, flags=re.I).group() for x in special_syn_list if re.search(x, string_b, flags=re.I) != None ]
intersection = set(match_result_a) & set(match_result_b)
return len(intersection) == len(match_result_a)
def check_syn_str_regex_number(string_a, string_b, special_syn_list):
#数字的不需要判断intersection,确认两者同时存在即可,即两者都存在
#x = check_syn_str_regex_number('(128G+8G)','8+28G',['(\d{1,4}[GB]?\+\d{1,4}[GB]?)|(\d{1,4}G{1}B?)']) -->True因为都符合
string_a = string_a.lower()
string_b = string_b.lower()
match_result_a = [ True if re.search(x, string_a, flags=re.I) != None else False for x in special_syn_list ]
match_result_b = [ True if re.search(x, string_b, flags=re.I) != None else False for x in special_syn_list ]
if len(match_result_a) != len(match_result_b):
return False
else:
for a, b in zip(match_result_a,match_result_b):
if a == True and b == True :
return True
return False
def strB2Q(ustring):
"""把字符串半角转全角"""
ss = []
for s in ustring:
rstring = ""
for uchar in s:
inside_code = ord(uchar)
# 全角空格直接转换
if inside_code == 32:
inside_code = 12288
# 全角字符(除空格)根据关系转化
elif (inside_code >= 33 and inside_code <= 126):
inside_code += 65248
rstring += chr(inside_code)
ss.append(rstring)
#顿号要转成逗号
return ''.join(ss)
def strQ2B(ustring):
"""把字符串全角转半角"""
ss = []
for s in ustring:
rstring = ""
for uchar in s:
inside_code = ord(uchar)
if inside_code == 12288: # 全角空格直接转换
inside_code = 32
elif (inside_code >= 65281 and inside_code <= 65374): # 全角字符(除空格)根据关系转化
inside_code -= 65248
rstring += chr(inside_code)
ss.append(rstring)
return ''.join(ss)
def strQ2B(ustring):
"""把字符串全角转半角"""
halfwidth_symbol = '!\"\"#$%&\'\'()*+,-./:;<=>?@[]_{|}~ '
fullwidth_symbol = '!“”#$%&‘’()*+,-。/:;《=》?@【】_{|}~ '
translator = str.maketrans(fullwidth_symbol, halfwidth_symbol)
ustring = ustring.translate(translator)
return ustring
def strip_puntuations(input_string):
#清空字符串两边的所有符号
for x in punctuation:
input_string = input_string.strip(x)
return input_string
def replace_punctuations(input_string, replace_to_symbol=' ', exclude = [ ] ):
#将字符串中的所有符号统一替换成空格
if type(exclude) != list:
exclude = [ exclude ]
if type(input_string) != str:
return input_string
mapping_result = ''.join([replace_to_symbol if s not in exclude else s for s in string.punctuation])
translator = str.maketrans(string.punctuation, mapping_result)
input_string = input_string.translate(translator)
return input_string
def replace_multi_symbol(string, symbol):
"""把多个符号替换成单个,比如多个换行符 替换成 一个换行符,replace('\n\n','\n')并不能解决问题"""
symbol_double = symbol + symbol
while symbol_double in string:
string = string.replace(symbol_double,symbol)
return string
def symbol_to_spaces(string):
string = replace_multi_symbol(replace_punctuations(strQ2B(string)).strip(),' ')
return string
def normalize_punctuations(string):
#用来做拆分(匹配是需要把符号全变成空格,拆分不能改变结果符号)
#把string的所有符号标准化(全角全部统一转成半角,连续重复的符号变成单个,两边都不留空格或者换行符)
#转成半角
# string = strQ2B(string)
# for s in punctuation:
# string = replace_multi_symbol(string, s)
# #确保两边没有特殊符号
# string = string.strip()
return string
def replace_re_special(word):
#注意\要写在前面,因为后面循环替换了\\进去
for special_symbol in r'\-+()[]{}.*^$~|?^,':
new_special_symbol = '\\' + special_symbol
word = word.replace(special_symbol, new_special_symbol)
return word
def search_en(combine_string):
"""提取出中英文混合字符串中的英文部分"""
non_cn_pat = "[^\u4e00-\u9fa5]+"
en_pat = ".*(\w)+.*"
found_all = re.findall(non_cn_pat,combine_string)
en_found_all = []
if found_all : #定位有英文的部分
en_found_all = [re.search(en_pat,x).group() for x in found_all if re.search(en_pat,x) != None]
if en_found_all :
return en_found_all[0]
return None
def partial_match_pct(short_str,long_str,special_syn=['pro','plus','max','youth']):
"""
short_str : original string
long_str : standard target string
匹配机型,不要放入两个一样的字符串,获取短字符串在长字符串中是否存在,并且占了多少个字符,
从开头开始匹配,不替换空格不用in的方式查找,带有括号的机型匹配优先级最高,
通常Y15S和Y15(4+64G)后者更容易被缩写成Y15"""
#20200107 :检查长字符串是否包含有特殊字符
def check_syn_str(new_str,special_syn):
new_str = new_str.lower()
match_syn_list = [x for x in special_syn if x in new_str]
if match_syn_list:
return match_syn_list[0]
else:
return 'NA'
#暂时不懂怎么把IQOO 3 4G 优先匹配到 IQOO 3
def match_sequence_str(short_string,long_string):
#获取一个字符串对应另一个字符串匹配的字母
# input 'iQOO 3 4G', IQOO 3 (5G) --> IQOO 3
# input 'iQOO 3 4G', IQOO 3 (4G) --> IQOO 3 4
#复制相同的一个
long_counter = 0
temp_record = ''
for s1 in short_string:
for s2 in long_string[long_counter:] :
if s1 == s2:
long_counter += 1
temp_record += s2
break
return temp_record
default_result = (0,long_str)
if type(short_str) != str:
return default_result
short_str = short_str.strip().lower().replace('\n','').replace(' ',' ')
new_long_str = long_str.strip().lower().replace('\n','').replace(' ',' ')
#去掉换行符和多空格之后相等的话 直接返回长字符串
if short_str == new_long_str:
return (99,long_str)
# #防止放入同一字符串
# if short_str == new_long_str:
# return default_result
#括号和空格都要分割处理
if '(' in new_long_str or '(' in new_long_str :
new_long_str = new_long_str.replace('(','(').split('(')[0]
# elif ' ' in new_long_str :
# new_long_str = new_long_str.split()[0]
""" 匹配可能包含错误拼写,包括漏写,多写,错写的机型名, 机型名一般有NEX, S1, X3S, V1Max, V15Pro,
允许的错别字条件是:不允许数字写错,不允许前面的字母写错,当字母大于等于3个时,允许漏写或者错写,多写2个字母,
比如pro写成pr ,prou, max写成ma, V15Pro 写成 V15P(如果有V15P应该在之前就可以关联上,所以排除他是V15P的可能,
更大可能是想写V15Pro)"""
#从头开始匹配的时候,如果完整的short_str是准确拼写的,正常返回,如果有错别字,采用else以下的匹配方式
match_short_str = ''
#前两/三位字母相同,方便后面elif写条件
first2_letter_equal = short_str[:2] == new_long_str[:3].split(' ')[0][:2]
first3_letter_equal = short_str[:3] == new_long_str[:3].split(' ')[0][:3]
#前3位字符串包含2个数字的情况 必须满足前3个字母相同,
first3_letter_2num_short = re.search('[0-9]{2,}',short_str[:3]) != None
first3_letter_2num_long = re.search('[0-9]{2,}',new_long_str[:3]) != None
first3_letter_less2num_short = not first3_letter_2num_short
first3_letter_less2num_long = not first3_letter_2num_long
#一个字符直接返回0,两个字符,去掉空格之后的前两个字符要完全相等,并且要确保长字符串里没有special_syn的字符
if len(short_str) == 2 :
if not check_syn_str(new_long_str,special_syn):
new_long_str = new_long_str.split(' ')[0].split('(')[0]
if short_str == new_long_str:
return (2/len(long_str),long_str)
else:
return default_result
else: #如果长字符串含有pro等字符 直接判断不匹配
return default_result
#至少出现3个字符,并且前两个字母(如果前两位是字母+数字,后面不再有数字),如果前3位包含2个数字,前3个字符要相同,规避掉V 15这种空格的情况
elif (len(short_str) >= 3 and first2_letter_equal) and \
(
(first3_letter_2num_short and first3_letter_2num_long and first3_letter_equal) \
or (first3_letter_less2num_short and first3_letter_less2num_long and first2_letter_equal)
):
for i in range(2,len(short_str)+1) :
if short_str[:i].replace(' ','') in new_long_str.replace(' ',''):
match_short_str = short_str[:i]
continue
#优先计算匹配率大的字符串,并且为了实现区分V11i 优先匹配到V11而不是V11Pro的情况,而外加一个长字符串的比率
#--计算结果相同(0.75, 'V11Pro') (0.75, 'V11'),后面的sort比较难实现long_str的顺序排列
if ' ' in long_str or '(' in long_str:
long_str_bias = len(match_short_str)/len(long_str.split(' ')[0].split('(')[0])/100 #比例需要调小
#如果去掉空格和( 符号之后的long_str_bias仍然相等,就将原来的标准机型全部去掉这些字符,对比整体全部的匹配度,做一个bias
# long_str_bias += len(match_sequence_str(short_str,long_string)) / (len(short_str) + len(long_string)) / 1000 #比例更小
else: #没有出现空格和( 的 不带pro的应该优先
long_str_bias = len(match_short_str)/len(long_str)/100 + 0.00001
#确保短字符和长字符同时有或者同时没有special_syn_str
if check_syn_str(short_str,special_syn) == check_syn_str(new_long_str,special_syn):
return (len(match_short_str)/len(short_str) + long_str_bias ,long_str)
return default_result
def re_sub_replace(pattern,string,repace_symbols=('-','_'),group_name=1):
"""
当re.sub(pattern,repl,string)内置的repl = "g<1>" 不能满足替换需求的时候,
比如需要将group目标内的文字中的某个符号替换掉, 使用的时候要注意替换代码内的sub replace符号
:param pattern : re pattern
:param string : original string
"""
def replace_match(matched):
#re.sub会自动传入matched结果
original_string = matched.group()
#pattern支持填入group_name
matched_string = matched.group(group_name)
replace_string = original_string\
.replace(matched_string,matched_string.replace(replace_symbols[0],replace_symbols[1]))
return replace_string
new_string = re.sub(pattern,replace_match,string,repace_symbols=('-','_'))
return new_string
def re_findall_replace(pattern,string,replace_symbols=('-','_')):
"""
当re.sub(pattern,repl,string)内置的repl = "g<1>" 不能满足替换需求的时候,
比如需要将group目标内的文字中的某个符号替换掉,并且需要match多个group目标
"""
matched_list = re.findall(pattern,string)
new_string = string
for mat in matched_list:
new_string = new_string.replace(mat,mat.replace(replace_symbols[0],replace_symbols[1]))
return new_string
def re_findall_sub(pattern,string,replace_symbols=('-','_')):
"""
当re.sub(pattern,repl,string)内置的repl = "g<1>" 不能满足替换需求的时候,
比如
"""
matched_list = re.findall(pattern,string)
new_string = string
for mat in matched_list:
new_string = new_string.replace(mat,mat.replace(replace_symbols[0],replace_symbols[1]))
return new_string
def split_wrong_combine(pattern,sub_pattern,string):
"""
需要处理爬虫换行符的问题, 抓取的时候把换行符去掉了,导致单词连接成错误的形式
比如 axxBx, AxxxCONs, usagePROgreat, slow3.Great sunglass
(注意 3app , 3PlayStore只能算是拼写错误,不需要拆分, LED3 拆不了,陷入无限循环)
需要将两个错误合并的单词用换行符替换拆分,
:param pattern : the original pattern that we want to find out
:param sub_pattern : sub pattern to extract from pattern,
will be replaced with original + '\n' or '\n' + original
:param string : target string
e.g
pattern = '[A-Z]?[a-z]+[A-Z0-9]+[a-zA-Z]*'
sub_pattern = '[A-Z0-9]+[a-z]*'
"""
#记录需要修改的部分
new_string = string
new_string_dict = defaultdict(str)
matched_list = re.findall(pattern,new_string)
if matched_list :
for mat in matched_list:
match_sub = re.search(sub_pattern,mat)
#需要确保sub_pattern 能匹配出 pattern 匹配到的内容的部分目标
if match_sub != None:
match_sub = match_sub.group()
replace_match = mat.replace(match_sub,'\n' + match_sub)
#如果替换的是第一个单词,需要特殊处理。不要替换前面的符号为换行符,而是需要保持原来单词自带的 “空”
#并且换行符 应该是加在第一个单词后面
if [ x for x in replace_match].index('\n') == 0:
replace_match = replace_match.replace('\n','')
replace_match = mat.replace(match_sub, match_sub + '\n')
replace_matched = split_wrong_combine(pattern,sub_pattern,replace_match)
new_string_dict[mat] = replace_matched
else:
return new_string
for k,v in new_string_dict.items():
new_string = new_string.replace(k,v)
return new_string
#++++++++++++++++++以下废弃函数++++++++++++++++++++++++
# def standardize_country_by_cn_similarty(short_str, standard_str_list):
# #处理中文国家缩写和完整国家名称无法匹配到的情况
# standard_str_list = list(set([str(x).strip() for x in standard_str_list]))
# standard_str_list = sorted(standard_str_list, key=len, reverse= False)
# #通过前面字符串匹配 马来 -- > 马来西亚
# standard_similarity_list = [ (s,1) if short_str in s else (s,0) for s in standard_str_list ]
# if standard_similarity_list[0][1] > 0 :
# return standard_similarity_list
# else:
# standard_similarity_list = [ ]
# for ss in standard_str_list:
# short_match_counter = 0
# for ss_each_letter in ss:
# for s in short_str:
# if s == ss_each_letter:
# short_match_counter += 1
# #至少能匹配上两个字
# str_similarity = short_match_counter / len(short_str) + short_match_counter / min([len(short_str),len(ss)])
# if short_match_counter >= 2 :
# standard_similarity_list.append([ss,str_similarity])
# else:
# standard_similarity_list.append([ss, 0 ])
# standard_similarity_list = sorted(standard_similarity_list, key=lambda x:x[1],reverse=True)
# return standard_similarity_list
| 4,955 | 0 | 314 |
67a933614fef20c96c426d9446f6a66d829171e4 | 15,150 | py | Python | clustering.py | kelicht/cet | 6e4a764e9a0c1a152582b744b277b1e4493daeeb | [
"MIT"
] | null | null | null | clustering.py | kelicht/cet | 6e4a764e9a0c1a152582b744b277b1e4493daeeb | [
"MIT"
] | null | null | null | clustering.py | kelicht/cet | 6e4a764e9a0c1a152582b744b277b1e4493daeeb | [
"MIT"
] | null | null | null | import numpy as np
import time
import pulp
from utils import flatten, Cost
from ce import ActionExtractor
from sklearn.cluster import KMeans
from sklearn.neighbors import KNeighborsClassifier
from sklearn.decomposition import PCA
from sklearn.manifold import TSNE
from matplotlib import pyplot as plt
if(__name__ == '__main__'):
_check(dataset='d', N=10)
| 51.883562 | 316 | 0.571023 | import numpy as np
import time
import pulp
from utils import flatten, Cost
from ce import ActionExtractor
from sklearn.cluster import KMeans
from sklearn.neighbors import KNeighborsClassifier
from sklearn.decomposition import PCA
from sklearn.manifold import TSNE
from matplotlib import pyplot as plt
class Clustering():
def __init__(self, mdl, X, Y=[],
clustering_object='instance', n_clusters=4, max_candidates=50, print_centers=True, tol=1e-6,
lime_approximation=False, n_samples=10000, alpha=1.0,
feature_names=[], feature_types=[], feature_categories=[], feature_constraints=[], target_name='Output', target_labels = ['Good','Bad']):
self.mdl_ = mdl
self.extractor_ = ActionExtractor(mdl, X, Y=Y, lime_approximation=lime_approximation, n_samples=n_samples, alpha=alpha,
feature_names=feature_names, feature_types=feature_types, feature_categories=feature_categories,
feature_constraints=feature_constraints, max_candidates=max_candidates, tol=tol, target_name=target_name, target_labels=target_labels)
self.cluster_ = KMeans(n_clusters=n_clusters, init='k-means++', n_init=10, max_iter=300, tol=0.0001,
precompute_distances='deprecated', verbose=0, random_state=None, copy_x=True, n_jobs='deprecated', algorithm='auto')
self.n_clusters_ = n_clusters
self.cost_ = Cost(X, Y, feature_types=feature_types, feature_categories=feature_categories, feature_constraints=feature_constraints, max_candidates=max_candidates, tol=tol)
self.print_centers_ = print_centers
self.clustering_object_ = clustering_object
if(clustering_object=='action'):
self.neighbors_ = KNeighborsClassifier(n_neighbors=1, weights='uniform', algorithm='auto', leaf_size=30, p=2, metric='minkowski', metric_params=None, n_jobs=None)
self.lime_approximation_ = lime_approximation
self.feature_names_ = feature_names if len(feature_names)==X.shape[1] else ['x_{}'.format(d) for d in range(X.shape[1])]
self.feature_types_ = feature_types if len(feature_types)==X.shape[1] else ['C' for d in range(X.shape[1])]
self.feature_categories_ = feature_categories
self.feature_categories_flatten_ = flatten(feature_categories)
self.feature_constraints_ = feature_constraints if len(feature_constraints)==X.shape[1] else ['' for d in range(X.shape[1])]
self.target_name_ = target_name
self.target_labels_ = target_labels
self.tol_ = tol
self.infeasible_ = False
self.feature_categories_inv_ = []
for d in range(X.shape[1]):
g = -1
if(self.feature_types_[d]=='B'):
for i, cat in enumerate(self.feature_categories_):
if(d in cat):
g = i
break
self.feature_categories_inv_.append(g)
def fit(self, X, max_change_num=4, cost_type='TLPS', gamma=1.0, dataset_name='',
solver='cplex', time_limit=180, log_stream=False, mdl_name='', log_name='', init_sols={}, verbose=False):
self.X_ = X
self.N_, self.D_ = X.shape
self.max_change_num_ = max_change_num
self.cost_type_ = cost_type
self.gamma_ = gamma
self.time_limit_ = time_limit
start = time.perf_counter()
if(self.clustering_object_=='instance'):
self.cluster_ = self.cluster_.fit(X)
self.centers_ = self.cluster_.cluster_centers_
K = self.cluster_.predict(X)
elif(self.clustering_object_=='action'):
A = np.zeros([self.N_, self.D_])
for n in range(self.N_):
action_dict = self.extractor_.extract(X[n].reshape(1,-1), max_change_num=self.max_change_num_, cost_type=self.cost_type_, tradeoff_parameter=self.gamma_, solver=solver, time_limit=self.time_limit_)
A[n] = action_dict['action']
self.cluster_ = self.cluster_.fit(A)
self.centers_ = self.cluster_.cluster_centers_
K = self.cluster_.predict(A)
self.neighbors_ = self.neighbors_.fit(X, K)
# if(len(dataset_name)!=0): self.scatter_decomposed(X, A, K, filename=dataset_name)
self.actions_ = []
for k in range(self.n_clusters_):
X_k = X[K==k]
action_dict = self.extractor_.extract(X_k, max_change_num=self.max_change_num_, cost_type=self.cost_type_, tradeoff_parameter=self.gamma_, solver=solver, time_limit=self.time_limit_)
action_dict['center'] = self.centers_[k] if self.clustering_object_=='instance' else X_k.mean(axis=0)
# action_dict['center'] = self.centers_[k]
self.actions_ += [ action_dict ]
self.time_ = time.perf_counter()-start;
return self
def feasify(self, a, x):
for d in [d for d in range(self.D_) if self.feature_types_[d]=='B']:
x_d = x[d] + a[d]
if(x_d not in [0,1]):
# print(self.feature_names_[d], x_d)
a[d]=0
for G in self.feature_categories_:
x_G = x[G] + a[G]
if(x_G.sum()!=1):
# for d in G: print(self.feature_names_[d], x[d]+a[d])
a[G]=0
return a
def predict(self, X):
K = self.cluster_.predict(X) if self.clustering_object_=='instance' else self.neighbors_.predict(X)
A = [self.actions_[k]['action'] for k in K]
return np.array([self.feasify(a, x) for a,x in zip(A, X)])
def predict_random(self, X):
K = self.cluster_.predict(X) if self.clustering_object_=='instance' else self.neighbors_.predict(X)
K_random = [np.random.choice([k_ for k_ in range(self.n_clusters_) if k_!=k]) for k in K]
A = [self.actions_[k]['action'] for k in K_random]
return np.array([self.feasify(a, x) for a,x in zip(A, X)])
def cost(self, X, cost_type='TLPS', random=False):
A = self.predict_random(X) if random else self.predict(X)
return np.array([self.cost_.compute(x, a, cost_type=cost_type) for x,a in zip(X, A)]).mean()
def loss(self, X, target=0, random=False):
A = self.predict_random(X) if random else self.predict(X)
return (self.mdl_.predict(X+A)!=target).mean()
def scatter_decomposed(self, X, A, K, filename=''):
plt.figure(figsize=(10,8))
if(len(filename)!=0): plt.suptitle(filename)
plt.subplot(2,2,1)
method = 'PCA'
decom = PCA(n_components=2, copy=True, whiten=False, svd_solver='auto', tol=0.0, iterated_power='auto', random_state=1)
x = decom.fit_transform(A)
plt.title('Actions in 2D ({})'.format(method))
plt.scatter(x[:,0], x[:,1], c=K)
plt.subplot(2,2,2)
method = 't-SNE'
decom = TSNE(n_components=2, perplexity=30.0, early_exaggeration=12.0, learning_rate=200.0, n_iter=1000, n_iter_without_progress=300, min_grad_norm=1e-07, metric='euclidean', init='random', verbose=0, random_state=1, method='barnes_hut', angle=0.5, n_jobs=None)
x = decom.fit_transform(A)
plt.title('Actions in 2D ({})'.format(method))
plt.scatter(x[:,0], x[:,1], c=K)
plt.subplot(2,2,3)
method = 'PCA'
decom = PCA(n_components=2, copy=True, whiten=False, svd_solver='auto', tol=0.0, iterated_power='auto', random_state=1)
x = decom.fit_transform(X)
plt.title('Instances in 2D ({})'.format(method))
plt.scatter(x[:,0], x[:,1], c=K)
plt.subplot(2,2,4)
method = 't-SNE'
decom = TSNE(n_components=2, perplexity=30.0, early_exaggeration=12.0, learning_rate=200.0, n_iter=1000, n_iter_without_progress=300, min_grad_norm=1e-07, metric='euclidean', init='random', verbose=0, random_state=1, method='barnes_hut', angle=0.5, n_jobs=None)
x = decom.fit_transform(X)
plt.title('Instances in 2D ({})'.format(method))
plt.scatter(x[:,0], x[:,1], c=K)
plt.tight_layout()
if(len(filename)==0):
plt.show()
else:
plt.savefig('res/plot_{}.png'.format(filename))
plt.clf()
return
def __str__(self):
s = ''
for k, action_dict in enumerate(self.actions_):
s += '- Cluster {}: \n'.format(k+1)
s += '\t* Action [{}: {} -> {}] (Acc. = {}/{} = {:.1%} / MeanCost = {:.3}):\n'.format(self.target_name_, self.target_labels_[1], self.target_labels_[0], action_dict['active'].sum(), action_dict['sample'], action_dict['active'].sum()/action_dict['sample'], action_dict['cost'].sum()/action_dict['sample'])
action = action_dict['action']
for i,d in enumerate(np.where(abs(action)>1e-8)[0]):
g = self.feature_categories_inv_[d]
if(g==-1):
if(self.feature_types_[d]=='C'):
s += '\t\t* {}: {:+.4f}\n'.format(self.feature_names_[d], action[d])
elif(self.feature_types_[d]=='B'):
if(action[d]==-1):
s += '\t\t* {}: True -> False\n'.format(self.feature_names_[d], action[d])
else:
s += '\t\t* {}: False -> True\n'.format(self.feature_names_[d], action[d])
else:
s += '\t\t* {}: {:+}\n'.format(self.feature_names_[d], action[d].astype(int))
else:
if(action[d]==-1): continue
cat_name, nxt = self.feature_names_[d].split(':')
cat = self.feature_categories_[g]
prv = self.feature_names_[cat[np.where(action[cat]==-1)[0][0]]].split(':')[1]
s += '\t\t* {}: \"{}\" -> \"{}\"\n'.format(cat_name, prv, nxt)
if(self.print_centers_):
s += '\t* Center:\n'
for d, x_d in enumerate(action_dict['center']):
# s += '\t\t* {}: {}\n'.format(self.feature_names_[d], x_d)
g = self.feature_categories_inv_[d]
if(g==-1):
if(self.feature_types_[d]=='B'):
s += '\t\t* {}: {:.1%}\n'.format(self.feature_names_[d], x_d)
else:
s += '\t\t* {}: {:.2f}\n'.format(self.feature_names_[d], x_d)
for G in self.feature_categories_:
group, _ = self.feature_names_[G[0]].split(':')
s += '\t\t* {}:\n'.format(group)
for d in G:
x_d = action_dict['center'][d]
if(x_d < 1e-8): continue
_, cat = self.feature_names_[d].split(':')
s += '\t\t\t* {}: {:.1%}\n'.format(cat, x_d)
return s
def to_markdown(self):
s = '| | HowToChange |\n'
s += '| --- | --- |\n'
for k, action_dict in enumerate(self.actions_):
a = action_dict['action']
acc = action_dict['active'].sum()/action_dict['sample']; cost = action_dict['cost'].sum()/action_dict['sample']
s += '| Action {} | '.format(k+1)
for d in np.where(abs(a)>1e-8)[0]:
g = self.feature_categories_inv_[d]
if(g==-1):
if(self.feature_types_[d]=='C'):
s += '{}: {:+.4f} <br>'.format(self.feature_names_[d], a[d])
elif(self.feature_types_[d]=='B'):
if(a[d]==-1):
s += '{}: True -> False <br> '.format(self.feature_names_[d], a[d])
else:
s += '{}: False -> True <br> '.format(self.feature_names_[d], a[d])
else:
s += '{}: {:+} <br>'.format(self.feature_names_[d], a[d].astype(int))
else:
if(a[d]==-1): continue
cat_name, nxt = self.feature_names_[d].split(':')
cat = self.feature_categories_[g]
prv = self.feature_names_[cat[np.where(a[cat]==-1)[0][0]]].split(':')[1]
s += '{}: \"{}\" -> \"{}\" <br> '.format(cat_name, prv, nxt)
s += '(Acc: {:.1%} / Cost: {:.3}) |\n'.format(acc, cost)
s += '\n| Feature '
for k in range(self.n_clusters_): s += '| Cluster {} '.format(k+1)
s += '|\n'
s += '| --- |' + ' ---: |'*self.n_clusters_ + '\n'
X = np.array([action_dict['center'] for action_dict in self.actions_])
for d, X_d in enumerate(X.T):
s += '| {} '.format(self.feature_names_[d]+':True' if self.feature_types_[d]=='B' and self.feature_categories_inv_[d]==-1 else self.feature_names_[d])
for x_d in X_d:
if(self.feature_types_[d]=='B'):
s += '| {:.1%} '.format(x_d)
else:
s += '| {:.2f} '.format(x_d)
s += '|\n'
return s
def _check(dataset='h', N=10):
from sklearn.linear_model import LogisticRegression
# from sklearn.ensemble import RandomForestClassifier
# from sklearn.neural_network import MLPClassifier
from utils import DatasetHelper
np.random.seed(0)
GAMMA = 0.7
D = DatasetHelper(dataset=dataset, feature_prefix_index=False)
X_tr, X_ts, y_tr, y_ts = D.train_test_split()
mdl = LogisticRegression(penalty='l2', C=1.0, solver='liblinear')
mdl = mdl.fit(X_tr, y_tr)
X = X_ts[mdl.predict(X_ts)==1]
# for d in range(X.shape[1]): print(D.feature_names[d], D.feature_types[d], D.feature_constraints[d], mdl.coef_[0][d])
print('# Clustering Actionable Recourse Summary')
print('* Dataset:', D.dataset_fullname)
for d in range(X.shape[1]): print('\t* x_{:<2}: {} ({}:{})'.format(d+1, D.feature_names[d], D.feature_types[d], D.feature_constraints[d]))
print()
clustering = Clustering(mdl, X_tr, Y=y_tr, clustering_object='instance', n_clusters=4, print_centers=False,
feature_names=D.feature_names, feature_types=D.feature_types, feature_categories=D.feature_categories,
feature_constraints=D.feature_constraints, target_name=D.target_name, target_labels=D.target_labels)
print('## Learning Clusterwise AReS')
clustering = clustering.fit(X[:N], max_change_num=4, cost_type='MPS', gamma=GAMMA, time_limit=60)
print('- Parameters:')
print('\t- clustering object: {}'.format(clustering.clustering_object_))
print('\t- num. of clusters: {}'.format(clustering.n_clusters_))
print('\t- gamma: {}'.format(clustering.gamma_))
print()
print('### Learned Clusterwise AReS')
print(clustering)
print('### Score:')
cost = clustering.cost(X[:N], cost_type='MPS'); loss = clustering.loss(X[:N]);
print('- cost: {}'.format(cost))
print('- loss: {}'.format(loss))
print('- objective: {}'.format(cost + GAMMA * loss))
if(__name__ == '__main__'):
_check(dataset='d', N=10)
| 14,460 | -2 | 327 |
ebfd484a01ac35b8d8eec6d58c78628014ddee6f | 3,663 | py | Python | my/location/gpslogger.py | ktaranov/HPI | 3aa21107465b19b8b09884fbda8326617d3324ae | [
"MIT"
] | 1 | 2021-08-04T18:54:53.000Z | 2021-08-04T18:54:53.000Z | my/location/gpslogger.py | ktaranov/HPI | 3aa21107465b19b8b09884fbda8326617d3324ae | [
"MIT"
] | null | null | null | my/location/gpslogger.py | ktaranov/HPI | 3aa21107465b19b8b09884fbda8326617d3324ae | [
"MIT"
] | null | null | null | """
Parse gpslogger https://github.com/mendhak/gpslogger .gpx (xml) files
"""
from datetime import datetime, timezone
from dataclasses import dataclass
from pathlib import Path
from typing import NamedTuple, Iterator, Set, Dict
from lxml import etree
from ..core import Stats, Paths, LazyLogger
from ..core.error import Res
from ..core.common import get_files, warn_if_empty, mcachew
from ..core.cachew import cache_dir
# For config, see: https://github.com/seanbreckenridge/dotfiles/blob/master/.config/my/my/config/__init__.py
from my.config import gpslogger as user_config
logger = LazyLogger(__name__, level="warning")
@dataclass
from ..core.cfg import make_config
config = make_config(gpslogger)
@mcachew(
cache_path=cache_dir(),
depends_on=lambda: list(
map(lambda p: p.lstat().st_mtime, get_files(config.export_path))
),
logger=logger,
)
@warn_if_empty
| 31.307692 | 108 | 0.565111 | """
Parse gpslogger https://github.com/mendhak/gpslogger .gpx (xml) files
"""
from datetime import datetime, timezone
from dataclasses import dataclass
from pathlib import Path
from typing import NamedTuple, Iterator, Set, Dict
from lxml import etree
from ..core import Stats, Paths, LazyLogger
from ..core.error import Res
from ..core.common import get_files, warn_if_empty, mcachew
from ..core.cachew import cache_dir
# For config, see: https://github.com/seanbreckenridge/dotfiles/blob/master/.config/my/my/config/__init__.py
from my.config import gpslogger as user_config
logger = LazyLogger(__name__, level="warning")
@dataclass
class gpslogger(user_config):
# path[s]/glob to the synced gpx (XML) files
export_path: Paths
from ..core.cfg import make_config
config = make_config(gpslogger)
class Location(NamedTuple):
dt: datetime
lat: float
lng: float
@mcachew(
cache_path=cache_dir(),
depends_on=lambda: list(
map(lambda p: p.lstat().st_mtime, get_files(config.export_path))
),
logger=logger,
)
def history() -> Iterator[Res[Location]]:
files = get_files(config.export_path, glob="*.gpx")
emitted: Set[datetime] = set()
for p in files:
for l in _extract_locations(p):
if l.dt in emitted:
continue
emitted.add(l.dt)
yield l
def _extract_locations(path: Path) -> Iterator[Res[Location]]:
try:
import gpxpy
with path.open("r") as gf:
gpx_obj = gpxpy.parse(gf)
for track in gpx_obj.tracks:
for segment in track.segments:
for point in segment.points:
# TODO: use elevation?
yield Location(
lat=point.latitude,
lng=point.longitude,
dt=datetime.replace(point.time, tzinfo=timezone.utc),
)
except ImportError:
logger.warning(
"Should install 'gpxpy' to parse .gpx files, falling back to basic XML parsing"
)
yield from _extract_xml_locations(path)
@warn_if_empty
def _extract_xml_locations(path: Path) -> Iterator[Res[Location]]:
# the tags are sort of strange here, because they include the
# input format (URL). cant use findall easily, have to loop through
# and find substrings of the matching tags
tr = etree.parse(str(path))
for el in tr.getroot(): # gpx element
if el.tag.endswith("trk"):
for trkseg in el: # trk
for trkpt in trkseg:
latlon_dict: Dict[str, str] = trkpt.attrib
try:
assert "lat" in latlon_dict
assert "lon" in latlon_dict
except AssertionError as ae:
return ae
for child in trkpt:
# believe this is UTC, since gpx times start at 8AM and I'm in PST
if child.tag.endswith("time"):
yield Location(
dt=datetime.astimezone(
datetime.fromisoformat(child.text.rstrip("Z")),
tz=timezone.utc,
),
lat=float(latlon_dict["lat"]),
lng=float(latlon_dict["lon"]),
)
else:
return RuntimeError("Could not find 'trk' element in GPX XML")
def stats() -> Stats:
from ..core import stat
return {**stat(history)}
| 2,493 | 133 | 135 |
4549b5704caed463f710e1b6e23a2d53d4b41903 | 7,296 | py | Python | pytagmapper/inside_out_tracker.py | markisus/pytagmapper | c6f38da8f129c7bba930c1f88ea27f81f9b9da37 | [
"MIT"
] | 2 | 2021-10-30T23:22:12.000Z | 2022-01-21T01:33:05.000Z | pytagmapper/inside_out_tracker.py | markisus/pytagmapper | c6f38da8f129c7bba930c1f88ea27f81f9b9da37 | [
"MIT"
] | null | null | null | pytagmapper/inside_out_tracker.py | markisus/pytagmapper | c6f38da8f129c7bba930c1f88ea27f81f9b9da37 | [
"MIT"
] | 1 | 2021-11-20T16:09:22.000Z | 2021-11-20T16:09:22.000Z | import numpy as np
from pytagmapper.geometry import *
from pytagmapper.project import *
from pytagmapper.data import *
from pytagmapper.heuristics import *
# success of the tracker heavily depends on initialization
# initialization from one of these viewpoints generally will succeed
INIT_TXS_WORLD_VIEWPOINT = [
# topdown views
look_at_origin([0,0,1], [0,1,0]),
look_at_origin([0,0,1], [0,-1,0]),
look_at_origin([0,0,1], [1,0,0]),
look_at_origin([0,0,1], [-1,0,0]),
# view from left
look_at_origin([1,0,0.5], [0,0,1]),
look_at_origin([1,0,0.5], [0,0,-1]),
look_at_origin([1,0,0.5], [0,1,0]),
look_at_origin([1,0,0.5], [0,-1,0]),
# view from top
look_at_origin([0,1,0.5], [0,0,1]),
look_at_origin([0,1,0.5], [0,0,-1]),
look_at_origin([0,1,0.5], [1,0,0]),
look_at_origin([0,1,0.5], [-1,0,0]),
# view from right
look_at_origin([-1,0,0.5], [0,0,1]),
look_at_origin([-1,0,0.5], [0,0,-1]),
look_at_origin([-1,0,0.5], [0,1,0]),
look_at_origin([-1,0,0.5], [0,-1,0]),
# view from bottom
look_at_origin([0,-1,0.5], [0,0,1]),
look_at_origin([0,-1,0.5], [0,0,-1]),
look_at_origin([0,-1,0.5], [1,0,0]),
look_at_origin([0,-1,0.5], [-1,0,0]),
]
| 37.22449 | 128 | 0.61472 | import numpy as np
from pytagmapper.geometry import *
from pytagmapper.project import *
from pytagmapper.data import *
from pytagmapper.heuristics import *
def look_at_origin(from_xyz, up_dir):
from_xyz = np.array(from_xyz, dtype=np.float64)
up_dir = np.array(up_dir, dtype=np.float64)
look_dir = -from_xyz
look_dir /= np.linalg.norm(look_dir)
x = np.cross(look_dir, up_dir) # z cross (-y) = x
y = np.cross(look_dir, x) # z cross x = y
x /= np.linalg.norm(x)
y /= np.linalg.norm(y)
result = np.empty((4,4))
result[:3,0] = x
result[:3,1] = y
result[:3,2] = look_dir
result[:3,3] = from_xyz
result[3,:] = [0, 0, 0, 1]
return result
# success of the tracker heavily depends on initialization
# initialization from one of these viewpoints generally will succeed
INIT_TXS_WORLD_VIEWPOINT = [
# topdown views
look_at_origin([0,0,1], [0,1,0]),
look_at_origin([0,0,1], [0,-1,0]),
look_at_origin([0,0,1], [1,0,0]),
look_at_origin([0,0,1], [-1,0,0]),
# view from left
look_at_origin([1,0,0.5], [0,0,1]),
look_at_origin([1,0,0.5], [0,0,-1]),
look_at_origin([1,0,0.5], [0,1,0]),
look_at_origin([1,0,0.5], [0,-1,0]),
# view from top
look_at_origin([0,1,0.5], [0,0,1]),
look_at_origin([0,1,0.5], [0,0,-1]),
look_at_origin([0,1,0.5], [1,0,0]),
look_at_origin([0,1,0.5], [-1,0,0]),
# view from right
look_at_origin([-1,0,0.5], [0,0,1]),
look_at_origin([-1,0,0.5], [0,0,-1]),
look_at_origin([-1,0,0.5], [0,1,0]),
look_at_origin([-1,0,0.5], [0,-1,0]),
# view from bottom
look_at_origin([0,-1,0.5], [0,0,1]),
look_at_origin([0,-1,0.5], [0,0,-1]),
look_at_origin([0,-1,0.5], [1,0,0]),
look_at_origin([0,-1,0.5], [-1,0,0]),
]
class InsideOutTracker:
def __init__(self, camera_matrix, map_data,
tx_world_viewpoint = None, max_regularizer = 1e9):
self.tag_locations = map_data['tag_locations']
self.camera_matrix = np.array(camera_matrix)
self.tag_side_lengths = map_data['tag_side_lengths']
self.default_tag_side_length = self.tag_side_lengths['default']
self.default_corners_mat = get_corners_mat(self.default_tag_side_length)
self.corners_mats = {}
for tag_id, tag_side_length in self.tag_side_lengths.items():
self.corners_mats[tag_id] = get_corners_mat(tag_side_length)
self.txs_world_tag = {}
map_lift_3d(map_data)
for tag_id, tx_world_tag in self.tag_locations.items():
self.txs_world_tag[tag_id] = np.array(tx_world_tag)
self.tx_world_viewpoint = tx_world_viewpoint
if self.tx_world_viewpoint is None:
init_dist = 10 * self.default_tag_side_length
self.tx_world_viewpoint = \
np.array([
[1, 0, 0, 0],
[0, -1, 0, 0],
[0, 0, -1, init_dist],
[0, 0, 0, 1]
])
self.error = float('inf')
self.max_regularizer = max_regularizer
self.regularizer = self.max_regularizer
self.txs_world_viewpoint = [tx.copy() for tx in INIT_TXS_WORLD_VIEWPOINT]
for tx in self.txs_world_viewpoint:
tx[:3,3] *= self.default_tag_side_length * 10
self.num_hypotheses = len(self.txs_world_viewpoint)
self.errors = [float('inf') for _ in range(self.num_hypotheses)]
self.regularizers = [self.max_regularizer for _ in range(self.num_hypotheses)]
self.converged_guess = None
self.best_guess = 0
def get_corners_mat(self, tag_id):
return self.corners_mats.get(tag_id, self.default_corners_mat)
def get_projections(self, guess_idx=-1):
tag_ids = []
tag_corners =[]
if guess_idx >= 0:
tx_world_viewpoint = self.txs_world_viewpoint[guess_idx]
else:
tx_world_viewpoint = self.tx_world_viewpoint
for tag_id, tx_world_tag in self.txs_world_tag.items():
tx_viewpoint_tag = SE3_inv(tx_world_viewpoint) @ tx_world_tag
projected_corners, _, _ = project(self.camera_matrix, tx_viewpoint_tag, self.get_corners_mat(tag_id))
tag_ids.append(tag_id)
tag_corners.append(projected_corners)
return tag_ids, tag_corners
def update_guess(self, guess_idx, tags, force_update = False):
tx_world_viewpoint = self.txs_world_viewpoint[guess_idx]
error = self.errors[guess_idx]
regularizer = self.regularizers[guess_idx]
JtJ = np.zeros((6,6))
rtJ = np.zeros((1,6))
curr_error = 0
for tag_id, corners in tags:
tx_world_tag = self.txs_world_tag.get(tag_id, None)
if tx_world_tag is None:
continue
corners = np.array(corners).reshape((8,1))
tx_viewpoint_tag = SE3_inv(tx_world_viewpoint) @ tx_world_tag
projected_corners, dcorners_dcamera, _ = project(self.camera_matrix, tx_viewpoint_tag, self.get_corners_mat(tag_id))
residual = projected_corners - corners
JtJ += dcorners_dcamera.T @ dcorners_dcamera
rtJ += residual.T @ dcorners_dcamera
curr_error += (residual.T @ residual)[0,0]
if curr_error > error:
regularizer *= 25
else:
regularizer *= 0.5
improved = curr_error < error
regularizer = min(regularizer, self.max_regularizer)
regularizer = max(regularizer, 1e-3)
if improved or force_update:
update = np.linalg.solve(JtJ + regularizer * np.eye(6), -rtJ.T)
tx_world_viewpoint = tx_world_viewpoint @ se3_exp(update)
# tx_world_viewpoint = heuristic_flip_tx_world_cam(tx_world_viewpoint @ se3_exp(update))
error = curr_error
self.txs_world_viewpoint[guess_idx] = tx_world_viewpoint
self.regularizers[guess_idx] = regularizer
self.errors[guess_idx] = error
def update1(self, tags, force_update = False):
if self.converged_guess is not None:
self.update_guess(self.converged_guess, tags, force_update)
best_guess = self.converged_guess
else:
for i in range(self.num_hypotheses):
self.update_guess(i, tags, force_update)
# report the tx with the best error
best_guess = 0
best_error = float('inf')
for i, error in enumerate(self.errors):
if error < best_error:
best_guess = i
best_error = error
# heuristic to check convergence
num_tags = len([t for t, c in tags if t in self.txs_world_tag])
if num_tags >= 2:
pt_error = best_error / (num_tags * 4)
if pt_error <= 30: # px
self.converged_guess = best_guess
self.error = self.errors[best_guess]
self.tx_world_viewpoint = self.txs_world_viewpoint[best_guess]
self.regularizer = self.regularizers[best_guess]
self.best_guess = best_guess
def update(self, tag_ids, tag_corners, force_update = False):
return self.update1(list(zip(tag_ids, tag_corners)), force_update)
| 5,850 | 2 | 207 |
fed457364c30491a6e520c7bf8aee44813f0f046 | 2,276 | py | Python | SSD/SSD_FPN_GIoU/model/neck/ssd_neck.py | ForrestPi/ObjectDetection | 54e0821e73f67be5360c36f01229a123c34ab3b3 | [
"MIT"
] | 12 | 2020-03-25T01:24:22.000Z | 2021-09-18T06:40:16.000Z | SSD/SSD_FPN_GIoU/model/neck/ssd_neck.py | ForrestPi/ObjectDetection | 54e0821e73f67be5360c36f01229a123c34ab3b3 | [
"MIT"
] | 1 | 2020-04-22T07:52:36.000Z | 2020-04-22T07:52:36.000Z | SSD/SSD_FPN_GIoU/model/neck/ssd_neck.py | ForrestPi/ObjectDetection | 54e0821e73f67be5360c36f01229a123c34ab3b3 | [
"MIT"
] | 4 | 2020-03-25T01:24:26.000Z | 2020-09-20T11:29:09.000Z | import torch.nn as nn
import torch.nn.functional as F
from ..utils import ConvModule
| 29.558442 | 115 | 0.505712 | import torch.nn as nn
import torch.nn.functional as F
from ..utils import ConvModule
class SSDNeck(nn.Module):
def __init__(self, in_channels = [1024,2048],out_channels = 256,out_map=None,start_level = 0,end_level = None):
super(SSDNeck,self).__init__()
self.in_channels = in_channels
if isinstance(out_channels,int):
out_channels = [out_channels for i in range(len(self.in_channels))]
self.out_channels = out_channels
#select the out map
self.out_map = out_map
self.start_level = start_level
self.end_level = end_level
self.normalize = {'type':'BN'}
if self.end_level is None:
self.end_level = len(self.out_channels)
self.fpn_convs = nn.ModuleList()
for i in range(self.start_level, self.end_level):
if i == 0 :
fpn_conv = ConvModule(
in_channels[-1],
out_channels[0],
3,
stride = 2,
padding=1,
normalize=self.normalize,
bias=True,
inplace=True)
else:
fpn_conv = ConvModule(
out_channels[i-1],
out_channels[i],
3,
stride = 1,
padding=0,
normalize=self.normalize,
bias=True,
inplace=True)
self.fpn_convs.append(fpn_conv)
self.init_weights()
def init_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.xavier_uniform_(m.weight)
def forward(self,inputs):
assert len(inputs) == len(self.in_channels)
outs = []
# build outputs
# part 1: from original levels
x = inputs[-1]
outs += inputs
for i in range(self.start_level, self.end_level):
x = self.fpn_convs[i](x)
outs.append(x)
if self.out_map is not None:
outs = outs[self.out_map]
'''
for i in range(len(outs)):
print(outs[i].shape)
'''
return tuple(outs)
| 2,081 | 4 | 102 |
15fc3dc4f202ef643bcc7c1a3d0324444dfba544 | 10,177 | py | Python | ui_handler.py | firetto/CSC111-Final-Project | d2cc1f9292686bb85c7da8c4312762c05a839df6 | [
"MIT"
] | 1 | 2021-05-06T01:49:46.000Z | 2021-05-06T01:49:46.000Z | ui_handler.py | firetto/CSC111-Final-Project | d2cc1f9292686bb85c7da8c4312762c05a839df6 | [
"MIT"
] | null | null | null | ui_handler.py | firetto/CSC111-Final-Project | d2cc1f9292686bb85c7da8c4312762c05a839df6 | [
"MIT"
] | null | null | null | """
ui_handler.py
Contains the UIHandler method, which contains methods for adding UI elements to the window
and manipulating them.
CSC111 Final Project by Anatoly Zavyalov, Baker Jackson, Elliot Schrider, Rachel Kim
Copyright 2021 Anatoly Zavyalov, Baker Jackson, Elliot Schrider, Rachel Kim
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
associated documentation files (the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge, publish, distribute,
sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import window
import pygame
from reversi import ReversiGame
from typing import List, Dict
from stats import plot_game_statistics
from ai_players import RandomPlayer, MinimaxABPlayer
def increment_player_score(player: str, w: window.Window) -> None:
"""Increments the player score of the corresponding player."""
if player == "black":
# text is a member
w.get_ui_element('text-score-black-amount').set_text(
str(int(w.get_ui_element('text-score-black-amount').get_text()) + 1))
elif player == "white":
# text is a member
w.get_ui_element('text-score-white-amount').set_text(
str(int(w.get_ui_element('text-score-white-amount').get_text()) + 1))
def reset_player_scores(w: window.Window) -> None:
"""Reset the player score of the corresponding player."""
w.get_ui_element('text-score-black-amount').set_text('0')
w.get_ui_element('text-score-white-amount').set_text('0')
def helper_dropdown_select_player(g: ReversiGame, text: str) -> None:
"""HELPER FUNCTION: Select the players given the dropdown option selected."""
if text == "Human vs. AI":
g.start_game(human_player=1)
elif text == "AI vs. Human":
g.start_game(human_player=-1)
else:
g.start_game(human_player=0)
def helper_dropdown_select_board_size(g: ReversiGame,
colour_to_player: Dict, text: str) -> None:
"""
Set the board size given the text.
Preconditions:
- text is of the form '<int>x<int>' where the two
integers are the same and greater than 0.
"""
global board_size_current
# Update the current board size (why?)
board_size_current = int(text.split('x')[0])
# Set new heuristics for players
colour_to_player[1].set_heuristic(board_size_current)
colour_to_player[-1].set_heuristic(board_size_current)
# Update game board size
g.set_board_size(board_size_current)
# Start new game.
g.start_game(human_player=g.get_human_player())
class UIHandler:
"""Contains methods for updating UI.
Instance Attributes:
- board_size_current: Parameter for the board size stored by the selection.
- game_paused: Whether or not the game is paused.
"""
board_size_current: int = 8
game_paused: bool = False
def dropdown_select_player(self, g: ReversiGame) -> any:
"""Return a function for setting the players given the selected dropdown option."""
return lambda text: helper_dropdown_select_player(g, text)
def helper_dropdown_select_ai(self, black: int, colour_to_player: Dict, text: str) -> None:
"""Set the AI given the text.
Preconditions:
- text in {'Minimax 2', 'Minimax 3', 'Minimax 4', 'Minimax 8', 'Random Moves'}
"""
if text.startswith('Minimax '):
colour_to_player.update({black: MinimaxABPlayer(int(text.split("Minimax ")[-1]),
self.board_size_current)})
else:
colour_to_player.update({black: RandomPlayer()})
def dropdown_select_ai(self, black: int, colour_to_player: Dict) -> any:
"""Return a function for setting the AI given the text."""
return lambda text: self.helper_dropdown_select_ai(black, colour_to_player, text)
def dropdown_select_board_size(self, g: ReversiGame, colour_to_player: Dict) -> any:
"""Return a function for setting the board size given the text.
Preconditions:
- text is of the form '<int>x<int>' where the two integers are the same.
"""
return lambda text: helper_dropdown_select_board_size(g, colour_to_player, text)
def update_games_stored_text(self, games: int, w: window.Window) -> None:
"""Update the 'Games Stored' label with to display 'Games Stored: <games>'."""
w.get_ui_element('text-games-stored').set_text(f'Games Stored: {games}')
def clear_results(self, results: List, w: window.Window) -> None:
"""Clear the results list by MUTATING it and update the Games Store text accordingly."""
results.clear()
self.update_games_stored_text(0, w)
reset_player_scores(w)
def button_pause_game(self, w: window.Window) -> None:
"""Function to call when the Pause/Resume game button is pressed.
Toggle the game_paused attribute, and change the text of the button accordingly."""
self.game_paused = not self.game_paused
if self.game_paused:
w.get_ui_element('button-pause-game').set_text('Resume Game')
else:
w.get_ui_element('button-pause-game').set_text('Pause Game')
def get_game_paused(self) -> bool:
"""Return game_paused."""
return self.game_paused
def add_ui(self, w: window.Window, g: ReversiGame,
results: List, colour_to_player: Dict) -> None:
"""
Add some UI to the window, such as buttons, and more.
"""
w.add_text(label="text-score-black-label", text="BLACK:", position=(20, 675))
w.add_text(label="text-score-white-label", text="WHITE:", position=(502, 675))
w.add_text(label="text-score-black-amount", text="0", position=(113, 675))
w.add_text(label="text-score-white-amount", text="0", position=(593, 675))
w.add_button(rect=pygame.Rect(725, 30, 150, 40),
label="button-pause-game", text="Pause Game",
function=lambda: self.button_pause_game(w))
w.add_text(label="text-choose-players", text="Choose Players", position=(720, 100))
w.add_dropdown(options_list=["Human vs. AI", "AI vs. Human", 'AI vs. AI'],
starting_option="Human vs. AI",
rect=pygame.Rect(725, 130, 150, 50),
label="dropdown-player",
function=self.dropdown_select_player(g))
w.add_text(label="text-choose-ai", text="Choose AI types", position=(720, 250))
w.add_text(label="text-choose-ai-black", text="Black AI", position=(705, 280),
large_font=False)
w.add_text(label="text-choose-ai-white", text="White AI", position=(840, 280),
large_font=False)
w.add_dropdown(options_list=["Random Moves", "Minimax 2", 'Minimax 3',
'Minimax 4', 'Minimax 6'],
starting_option="Minimax 2",
rect=pygame.Rect(675, 300, 125, 40),
label="dropdown-ai-black",
function=self.dropdown_select_ai(1, colour_to_player))
w.add_dropdown(options_list=["Random Moves", "Minimax 2", 'Minimax 3',
'Minimax 4', 'Minimax 6'],
starting_option="Minimax 2",
rect=pygame.Rect(810, 300, 125, 40),
label="dropdown-ai-white",
function=self.dropdown_select_ai(-1, colour_to_player))
w.add_text(label="text-choose-board-size", text="Choose Board Size", position=(700, 450))
w.add_dropdown(options_list=["8x8", '12x12', '16x16', '24x24'],
starting_option="8x8",
rect=pygame.Rect(725, 480, 150, 40),
label="dropdown-board-size",
function=self.dropdown_select_board_size(g, colour_to_player))
w.add_button(rect=pygame.Rect(675, 610, 125, 40),
label="button-show-stats", text="View Stats",
function=lambda: plot_game_statistics(g, results, 'black', colour_to_player[1],
colour_to_player[-1]))
w.add_button(rect=pygame.Rect(810, 610, 125, 40),
label="button-clear-stats", text="Clear Stats",
function=lambda: self.clear_results(results, w))
w.add_text(label="text-games-stored", text="Games Stored: 0", position=(715, 665))
w.add_text(label="text-credits",
text="Anatoly Zavyalov, Baker Jackson, Elliot Schrider, Rachel Kim",
position=(20, 2), large_font=False)
if __name__ == "__main__":
# Test doctests
import doctest
doctest.testmod(verbose=True)
import python_ta.contracts
python_ta.contracts.check_all_contracts()
import python_ta
python_ta.check_all(config={
# the names (strs) of imported modules
'extra-imports': ['window', 'pygame', 'reversi', 'stats', 'ai_players'],
'allowed-io': [], # the names (strs) of functions that call print/open/input
'max-line-length': 100,
# Disable too-many-nested-blocks, too-many-arguments
'disable': ['E1136', 'R1702', 'R0913']
})
| 41.369919 | 100 | 0.637909 | """
ui_handler.py
Contains the UIHandler method, which contains methods for adding UI elements to the window
and manipulating them.
CSC111 Final Project by Anatoly Zavyalov, Baker Jackson, Elliot Schrider, Rachel Kim
Copyright 2021 Anatoly Zavyalov, Baker Jackson, Elliot Schrider, Rachel Kim
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
associated documentation files (the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge, publish, distribute,
sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import window
import pygame
from reversi import ReversiGame
from typing import List, Dict
from stats import plot_game_statistics
from ai_players import RandomPlayer, MinimaxABPlayer
def increment_player_score(player: str, w: window.Window) -> None:
"""Increments the player score of the corresponding player."""
if player == "black":
# text is a member
w.get_ui_element('text-score-black-amount').set_text(
str(int(w.get_ui_element('text-score-black-amount').get_text()) + 1))
elif player == "white":
# text is a member
w.get_ui_element('text-score-white-amount').set_text(
str(int(w.get_ui_element('text-score-white-amount').get_text()) + 1))
def reset_player_scores(w: window.Window) -> None:
"""Reset the player score of the corresponding player."""
w.get_ui_element('text-score-black-amount').set_text('0')
w.get_ui_element('text-score-white-amount').set_text('0')
def helper_dropdown_select_player(g: ReversiGame, text: str) -> None:
"""HELPER FUNCTION: Select the players given the dropdown option selected."""
if text == "Human vs. AI":
g.start_game(human_player=1)
elif text == "AI vs. Human":
g.start_game(human_player=-1)
else:
g.start_game(human_player=0)
def helper_dropdown_select_board_size(g: ReversiGame,
colour_to_player: Dict, text: str) -> None:
"""
Set the board size given the text.
Preconditions:
- text is of the form '<int>x<int>' where the two
integers are the same and greater than 0.
"""
global board_size_current
# Update the current board size (why?)
board_size_current = int(text.split('x')[0])
# Set new heuristics for players
colour_to_player[1].set_heuristic(board_size_current)
colour_to_player[-1].set_heuristic(board_size_current)
# Update game board size
g.set_board_size(board_size_current)
# Start new game.
g.start_game(human_player=g.get_human_player())
class UIHandler:
"""Contains methods for updating UI.
Instance Attributes:
- board_size_current: Parameter for the board size stored by the selection.
- game_paused: Whether or not the game is paused.
"""
board_size_current: int = 8
game_paused: bool = False
def dropdown_select_player(self, g: ReversiGame) -> any:
"""Return a function for setting the players given the selected dropdown option."""
return lambda text: helper_dropdown_select_player(g, text)
def helper_dropdown_select_ai(self, black: int, colour_to_player: Dict, text: str) -> None:
"""Set the AI given the text.
Preconditions:
- text in {'Minimax 2', 'Minimax 3', 'Minimax 4', 'Minimax 8', 'Random Moves'}
"""
if text.startswith('Minimax '):
colour_to_player.update({black: MinimaxABPlayer(int(text.split("Minimax ")[-1]),
self.board_size_current)})
else:
colour_to_player.update({black: RandomPlayer()})
def dropdown_select_ai(self, black: int, colour_to_player: Dict) -> any:
"""Return a function for setting the AI given the text."""
return lambda text: self.helper_dropdown_select_ai(black, colour_to_player, text)
def dropdown_select_board_size(self, g: ReversiGame, colour_to_player: Dict) -> any:
"""Return a function for setting the board size given the text.
Preconditions:
- text is of the form '<int>x<int>' where the two integers are the same.
"""
return lambda text: helper_dropdown_select_board_size(g, colour_to_player, text)
def update_games_stored_text(self, games: int, w: window.Window) -> None:
"""Update the 'Games Stored' label with to display 'Games Stored: <games>'."""
w.get_ui_element('text-games-stored').set_text(f'Games Stored: {games}')
def clear_results(self, results: List, w: window.Window) -> None:
"""Clear the results list by MUTATING it and update the Games Store text accordingly."""
results.clear()
self.update_games_stored_text(0, w)
reset_player_scores(w)
def button_pause_game(self, w: window.Window) -> None:
"""Function to call when the Pause/Resume game button is pressed.
Toggle the game_paused attribute, and change the text of the button accordingly."""
self.game_paused = not self.game_paused
if self.game_paused:
w.get_ui_element('button-pause-game').set_text('Resume Game')
else:
w.get_ui_element('button-pause-game').set_text('Pause Game')
def get_game_paused(self) -> bool:
"""Return game_paused."""
return self.game_paused
def add_ui(self, w: window.Window, g: ReversiGame,
results: List, colour_to_player: Dict) -> None:
"""
Add some UI to the window, such as buttons, and more.
"""
w.add_text(label="text-score-black-label", text="BLACK:", position=(20, 675))
w.add_text(label="text-score-white-label", text="WHITE:", position=(502, 675))
w.add_text(label="text-score-black-amount", text="0", position=(113, 675))
w.add_text(label="text-score-white-amount", text="0", position=(593, 675))
w.add_button(rect=pygame.Rect(725, 30, 150, 40),
label="button-pause-game", text="Pause Game",
function=lambda: self.button_pause_game(w))
w.add_text(label="text-choose-players", text="Choose Players", position=(720, 100))
w.add_dropdown(options_list=["Human vs. AI", "AI vs. Human", 'AI vs. AI'],
starting_option="Human vs. AI",
rect=pygame.Rect(725, 130, 150, 50),
label="dropdown-player",
function=self.dropdown_select_player(g))
w.add_text(label="text-choose-ai", text="Choose AI types", position=(720, 250))
w.add_text(label="text-choose-ai-black", text="Black AI", position=(705, 280),
large_font=False)
w.add_text(label="text-choose-ai-white", text="White AI", position=(840, 280),
large_font=False)
w.add_dropdown(options_list=["Random Moves", "Minimax 2", 'Minimax 3',
'Minimax 4', 'Minimax 6'],
starting_option="Minimax 2",
rect=pygame.Rect(675, 300, 125, 40),
label="dropdown-ai-black",
function=self.dropdown_select_ai(1, colour_to_player))
w.add_dropdown(options_list=["Random Moves", "Minimax 2", 'Minimax 3',
'Minimax 4', 'Minimax 6'],
starting_option="Minimax 2",
rect=pygame.Rect(810, 300, 125, 40),
label="dropdown-ai-white",
function=self.dropdown_select_ai(-1, colour_to_player))
w.add_text(label="text-choose-board-size", text="Choose Board Size", position=(700, 450))
w.add_dropdown(options_list=["8x8", '12x12', '16x16', '24x24'],
starting_option="8x8",
rect=pygame.Rect(725, 480, 150, 40),
label="dropdown-board-size",
function=self.dropdown_select_board_size(g, colour_to_player))
w.add_button(rect=pygame.Rect(675, 610, 125, 40),
label="button-show-stats", text="View Stats",
function=lambda: plot_game_statistics(g, results, 'black', colour_to_player[1],
colour_to_player[-1]))
w.add_button(rect=pygame.Rect(810, 610, 125, 40),
label="button-clear-stats", text="Clear Stats",
function=lambda: self.clear_results(results, w))
w.add_text(label="text-games-stored", text="Games Stored: 0", position=(715, 665))
w.add_text(label="text-credits",
text="Anatoly Zavyalov, Baker Jackson, Elliot Schrider, Rachel Kim",
position=(20, 2), large_font=False)
if __name__ == "__main__":
# Test doctests
import doctest
doctest.testmod(verbose=True)
import python_ta.contracts
python_ta.contracts.check_all_contracts()
import python_ta
python_ta.check_all(config={
# the names (strs) of imported modules
'extra-imports': ['window', 'pygame', 'reversi', 'stats', 'ai_players'],
'allowed-io': [], # the names (strs) of functions that call print/open/input
'max-line-length': 100,
# Disable too-many-nested-blocks, too-many-arguments
'disable': ['E1136', 'R1702', 'R0913']
})
| 0 | 0 | 0 |
3c8de1e00e31b27ef74d1bc193acb954fb288de5 | 1,082 | py | Python | blag/__init__.py | ChickenNuggers/blag | 61b48106342e7e682d30e92f4cc74c502f12815b | [
"MIT"
] | null | null | null | blag/__init__.py | ChickenNuggers/blag | 61b48106342e7e682d30e92f4cc74c502f12815b | [
"MIT"
] | 1 | 2016-11-19T20:47:28.000Z | 2016-11-24T08:14:29.000Z | blag/__init__.py | ChickenNuggers/blag | 61b48106342e7e682d30e92f4cc74c502f12815b | [
"MIT"
] | null | null | null | from flask import Flask
import sqlite3
import config as config_module
app = Flask(__name__)
config = {}
for item in dir(config_module):
if item[0] != "_":
print("=== Config ===")
for key in (key for key in dir(config_module) if key[0] != "_"):
app.config[key] = getattr(config_module, key)
config[key] = getattr(config_module, key)
print(key, "=", getattr(config_module, key))
print("===+------+===")
break
app.config['config_module'] = config_module
@app.route("/")
db = sqlite3.connect('blog.db')
db_cursor = db.cursor()
db_cursor.execute("""CREATE TABLE IF NOT EXISTS Posts (
eid INTEGER PRIMARY KEY AUTOINCREMENT,
title TEXT NOT NULL,
post TEXT NOT NULL,
post_source TEXT NOT NULL
)""")
db.commit()
from . import rest # noqa E402
rest.add_routes(add_route, app)
| 23.521739 | 72 | 0.638632 | from flask import Flask
import sqlite3
import config as config_module
app = Flask(__name__)
config = {}
for item in dir(config_module):
if item[0] != "_":
print("=== Config ===")
for key in (key for key in dir(config_module) if key[0] != "_"):
app.config[key] = getattr(config_module, key)
config[key] = getattr(config_module, key)
print(key, "=", getattr(config_module, key))
print("===+------+===")
break
app.config['config_module'] = config_module
@app.route("/")
def get_index():
with open("static/index.html") as index_file:
return index_file.read()
def add_route(route, methods=['GET']):
print(route, repr(methods))
return app.route(route, methods=methods)
db = sqlite3.connect('blog.db')
db_cursor = db.cursor()
db_cursor.execute("""CREATE TABLE IF NOT EXISTS Posts (
eid INTEGER PRIMARY KEY AUTOINCREMENT,
title TEXT NOT NULL,
post TEXT NOT NULL,
post_source TEXT NOT NULL
)""")
db.commit()
from . import rest # noqa E402
rest.add_routes(add_route, app)
| 172 | 0 | 45 |
a1879c1476f051b846212b48098f6963ee2b116a | 716 | py | Python | Chapter2/exercise9.py | careynation/python_class | 61074c75d246010cebfa8610a35f5406c7180896 | [
"MIT"
] | null | null | null | Chapter2/exercise9.py | careynation/python_class | 61074c75d246010cebfa8610a35f5406c7180896 | [
"MIT"
] | null | null | null | Chapter2/exercise9.py | careynation/python_class | 61074c75d246010cebfa8610a35f5406c7180896 | [
"MIT"
] | null | null | null | # @author Carey Nation
# @title Chapter 2, Exercise 8
# @description Calculate tip and tax for a meal
celsius = read_it("Please enter your temperature, in celsius: ")
fahrenheit = convert(celsius)
print("\n%.2fC is %.2fF" % (celsius, fahrenheit))
| 23.096774 | 73 | 0.606145 | # @author Carey Nation
# @title Chapter 2, Exercise 8
# @description Calculate tip and tax for a meal
def read_it(prompt):
# read a single value, makes sure it is a valid number, and return it
valid = False
f = 42 # get it scoped out here so the return can find it
s = 'nothing'
while not valid:
try:
s = input(prompt)
f = float(s)
valid = True
except ValueError:
print(s + " is not a valid number, please try again")
return f
def convert(c):
return (9.0 / 5) * c + 32
celsius = read_it("Please enter your temperature, in celsius: ")
fahrenheit = convert(celsius)
print("\n%.2fC is %.2fF" % (celsius, fahrenheit))
| 418 | 0 | 46 |
68ea68bb4021b9a7d1bfe4d9d0fd29d5e2d741b1 | 3,297 | py | Python | re_calc/expression_parser.py | LilacRapture/re_calc | d8dc744e26ade3edc545dd6509bf2baf973537a2 | [
"MIT"
] | 1 | 2020-02-27T18:29:52.000Z | 2020-02-27T18:29:52.000Z | re_calc/expression_parser.py | LilacRapture/re_calc | d8dc744e26ade3edc545dd6509bf2baf973537a2 | [
"MIT"
] | 1 | 2020-02-27T18:46:46.000Z | 2020-02-27T18:46:46.000Z | re_calc/expression_parser.py | LilacRapture/ReCalc | d8dc744e26ade3edc545dd6509bf2baf973537a2 | [
"MIT"
] | null | null | null | from re_calc.config import control_tokens, operators, functions
from re_calc.util import is_number
from re_calc.exceptions import CalcException
import re
integer_regex = r"(\d+)"
tech_fractional_float = r"(\.\d+)"
float_regex = r"(\d+\.\d+)"
# regex for different num formats are joined by "regex OR" separator
NUMBER_REGEX = r"|".join([float_regex, tech_fractional_float, integer_regex])
def slice_by_pattern(pattern, input_expr):
''' Slices the matching part of the string using regex. Returns the matching
part and the remaining string, if pattern doesn't match returns None.
'''
re_pattern = re.compile(pattern)
match_object = re_pattern.match(input_expr)
if match_object:
start_idx, end_idx = match_object.span()
return input_expr[start_idx:end_idx], input_expr[end_idx:]
def slice_by_string(prefix, input_expr):
''' If string begins with some prefix (control tokens), returns prefix and
remaining string tuple.
'''
if input_expr.startswith(prefix):
chars_to_cut = len(prefix)
return prefix, input_expr[chars_to_cut:]
def combine_unary_sign(tokens):
''' Combines unary signs with adjacent value.
'''
output_queue = list()
while tokens[:-1]:
token = tokens[0]
next_token = tokens[1]
if not output_queue or output_queue[-1] in operators + ['(']:
if token == '-' and is_number(next_token):
output_queue.append(next_token * (-1))
tokens.pop(0)
elif token == '-' and next_token in functions:
output_queue.extend([-1, '*', next_token])
tokens.pop(0)
else:
output_queue.append(token)
else:
output_queue.append(token)
tokens.pop(0)
return output_queue + tokens
def tokenize(expression):
''' Returns tokens list with parsed floats and control tokens.
'''
parsing_expression = expression.strip()
output_queue = list()
while parsing_expression != '':
result = slice_by_pattern(NUMBER_REGEX, parsing_expression)
if result:
token, remaining_string = result
output_queue.append(float(token)) # add number to the output
parsing_expression = remaining_string.strip()
else:
found_control_token = False
for token in control_tokens:
result = slice_by_string(token, parsing_expression)
if result:
token, remaining_string = result
output_queue.append(token) # add control token to the output
parsing_expression = remaining_string.strip()
found_control_token = True
break
if not found_control_token:
combined_token_list = combine_unary_sign(output_queue)
combined_token_list.append(parsing_expression)
error_tokens = combined_token_list
token_position = len(combined_token_list) - 1
raise CalcException(
token_position,
error_tokens,
message='Unknown token',
loc_string="t_unknown_token")
return combine_unary_sign(output_queue)
| 37.044944 | 81 | 0.62663 | from re_calc.config import control_tokens, operators, functions
from re_calc.util import is_number
from re_calc.exceptions import CalcException
import re
integer_regex = r"(\d+)"
tech_fractional_float = r"(\.\d+)"
float_regex = r"(\d+\.\d+)"
# regex for different num formats are joined by "regex OR" separator
NUMBER_REGEX = r"|".join([float_regex, tech_fractional_float, integer_regex])
def slice_by_pattern(pattern, input_expr):
''' Slices the matching part of the string using regex. Returns the matching
part and the remaining string, if pattern doesn't match returns None.
'''
re_pattern = re.compile(pattern)
match_object = re_pattern.match(input_expr)
if match_object:
start_idx, end_idx = match_object.span()
return input_expr[start_idx:end_idx], input_expr[end_idx:]
def slice_by_string(prefix, input_expr):
''' If string begins with some prefix (control tokens), returns prefix and
remaining string tuple.
'''
if input_expr.startswith(prefix):
chars_to_cut = len(prefix)
return prefix, input_expr[chars_to_cut:]
def combine_unary_sign(tokens):
''' Combines unary signs with adjacent value.
'''
output_queue = list()
while tokens[:-1]:
token = tokens[0]
next_token = tokens[1]
if not output_queue or output_queue[-1] in operators + ['(']:
if token == '-' and is_number(next_token):
output_queue.append(next_token * (-1))
tokens.pop(0)
elif token == '-' and next_token in functions:
output_queue.extend([-1, '*', next_token])
tokens.pop(0)
else:
output_queue.append(token)
else:
output_queue.append(token)
tokens.pop(0)
return output_queue + tokens
def tokenize(expression):
''' Returns tokens list with parsed floats and control tokens.
'''
parsing_expression = expression.strip()
output_queue = list()
while parsing_expression != '':
result = slice_by_pattern(NUMBER_REGEX, parsing_expression)
if result:
token, remaining_string = result
output_queue.append(float(token)) # add number to the output
parsing_expression = remaining_string.strip()
else:
found_control_token = False
for token in control_tokens:
result = slice_by_string(token, parsing_expression)
if result:
token, remaining_string = result
output_queue.append(token) # add control token to the output
parsing_expression = remaining_string.strip()
found_control_token = True
break
if not found_control_token:
combined_token_list = combine_unary_sign(output_queue)
combined_token_list.append(parsing_expression)
error_tokens = combined_token_list
token_position = len(combined_token_list) - 1
raise CalcException(
token_position,
error_tokens,
message='Unknown token',
loc_string="t_unknown_token")
return combine_unary_sign(output_queue)
| 0 | 0 | 0 |
1c31a37db490934407f98f1095e746172a04365d | 26 | py | Python | newfeature.py | rdswanson/rbtesteasybug | 44f0ed52ec7e86d496741c49053b133a01666016 | [
"MIT"
] | null | null | null | newfeature.py | rdswanson/rbtesteasybug | 44f0ed52ec7e86d496741c49053b133a01666016 | [
"MIT"
] | null | null | null | newfeature.py | rdswanson/rbtesteasybug | 44f0ed52ec7e86d496741c49053b133a01666016 | [
"MIT"
] | null | null | null | print "Some new feature!"
| 13 | 25 | 0.730769 | print "Some new feature!"
| 0 | 0 | 0 |
526d9ef03007016f4056d280aee69862a95a0419 | 110 | py | Python | t.py | rpicard92/raspberrypi-arduino-security-system | 7791557a610d2c1dda55cd844f060488cfa1e136 | [
"MIT"
] | null | null | null | t.py | rpicard92/raspberrypi-arduino-security-system | 7791557a610d2c1dda55cd844f060488cfa1e136 | [
"MIT"
] | null | null | null | t.py | rpicard92/raspberrypi-arduino-security-system | 7791557a610d2c1dda55cd844f060488cfa1e136 | [
"MIT"
] | null | null | null | import pytz
from datetime import datetime
tz = pytz.timezone('US/Eastern')
now = datetime.now(tz)
print now | 13.75 | 32 | 0.754545 | import pytz
from datetime import datetime
tz = pytz.timezone('US/Eastern')
now = datetime.now(tz)
print now | 0 | 0 | 0 |
09c34e0d54070d1869750655fa88089db5d84ed6 | 4,581 | py | Python | python/django/03-apis/apis/models.py | Pearcee/mac-m1-ash | 8b731661735a5f068cfc14d5ad4d055135f352c3 | [
"Apache-2.0"
] | null | null | null | python/django/03-apis/apis/models.py | Pearcee/mac-m1-ash | 8b731661735a5f068cfc14d5ad4d055135f352c3 | [
"Apache-2.0"
] | null | null | null | python/django/03-apis/apis/models.py | Pearcee/mac-m1-ash | 8b731661735a5f068cfc14d5ad4d055135f352c3 | [
"Apache-2.0"
] | null | null | null | from django.conf import settings
from django.db import models
from django.utils import timezone
# Entry some data into model
# Create a string representation
# api/models.py
"""
Server ID,Sponsor,Server Name,Timestamp,Distance,Ping,Download,Upload,Share,IP Address
4058 Wildcard Networks Newcastle upon Tyne 2021-06-23T09:15:13.173107Z 105.4279453 39.982 69624980.03 18494139.82 86.177.254.153
10602 Aspire Technology Solutions Gateshead 2021-06-23T09:45:09.957434Z 106.1429491 29.023 43132640.01 17716995.05 86.177.254.153
10602 Aspire Technology Solutions Gateshead 2021-06-23T10:15:12.453044Z 106.1429491 28.932 70095792.76 18465945.29 86.177.254.153
4058 Wildcard Networks Newcastle upon Tyne 2021-06-23T10:45:13.237917Z 105.4279453 30.772 69977558.18 18583131.91 86.177.254.153
01/06/2021 00:00 192.168.1.110 p110
tarif
value_exc_vat,value_inc_vat,valid_from,valid_to
15.96,16.758,2021-06-25T21:30:00Z,2021-06-25T22:00:00Z
meter
consumption,interval_start,interval_end
0.073,2021-06-24T00:00:00+01:00,2021-06-24T00:30:00+01:00
""" | 32.956835 | 130 | 0.726261 | from django.conf import settings
from django.db import models
from django.utils import timezone
# Entry some data into model
class books(models.Model):
book_name = models.CharField(max_length=10)
author_name = models.CharField(max_length=10)
book_price = models.IntegerField()
book_quantity = models.IntegerField()
# Create a string representation
def __str__(self):
return self.book_name
# api/models.py
class Note(models.Model):
title = models.CharField(max_length=200)
body = models.TextField()
created_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.title
class Hero(models.Model):
name = models.CharField(max_length=60)
alias = models.CharField(max_length=60)
def __str__(self):
return self.name
class GeeksModel(models.Model):
title = models.CharField(max_length = 200)
description = models.TextField()
def __str__(self):
return self.title
class Post(models.Model):
author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
title = models.CharField(max_length=200)
text = models.TextField()
created_date = models.DateTimeField(default=timezone.now)
published_date = models.DateTimeField(blank=True, null=True)
def publish(self):
self.published_date = timezone.now()
self.save()
def approved_comments(self):
return self.comments.filter(approved_comment=True)
def __str__(self):
return self.title
class Comment(models.Model):
post = models.ForeignKey('apis.Post', on_delete=models.CASCADE, related_name='comments')
author = models.CharField(max_length=200)
text = models.TextField()
created_date = models.DateTimeField(default=timezone.now)
approved_comment = models.BooleanField(default=False)
def approve(self):
self.approved_comment = True
self.save()
def __str__(self):
return self.text
def approved_comments(self):
return self.comments.filter(approved_comment=True)
class Staff(models.Model):
author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
title = models.CharField(max_length=200)
text = models.TextField()
created_date = models.DateTimeField(default=timezone.now)
published_date = models.DateTimeField(blank=True, null=True)
def publish(self):
self.published_date = timezone.now()
self.save()
def __str__(self):
return self.title
class tarif(models.Model):
value = models.DecimalField( max_digits=3, decimal_places=3) # value_inc_vat
Timestamp = models.DateTimeField(blank=False, null=True)
def __str__(self):
return self.Timestamp
class meter(models.Model):
consumption = models.DecimalField( max_digits=3, decimal_places=3)
Timestamp = models.DateTimeField(blank=False, null=True)
def __str__(self):
return self.Timestamp
class online(models.Model):
Timestamp = models.DateTimeField(blank=False, null=True)
ip = models.CharField(max_length=62)
name = models.CharField(max_length=64)
def __str__(self):
return self.Timestamp
class speedtest(models.Model):
ServerID = models.IntegerField( )
Timestamp = models.DateTimeField(blank=False, null=True)
Distance = models.DecimalField( max_digits=12, decimal_places=8)
Ping = models.DecimalField( max_digits=3, decimal_places=3)
Download = models.DecimalField( max_digits=10, decimal_places=2)
Upload = models.DecimalField( max_digits=10, decimal_places=2)
ip = models.CharField(max_length=62)
def __str__(self):
return self.Timestamp
"""
Server ID,Sponsor,Server Name,Timestamp,Distance,Ping,Download,Upload,Share,IP Address
4058 Wildcard Networks Newcastle upon Tyne 2021-06-23T09:15:13.173107Z 105.4279453 39.982 69624980.03 18494139.82 86.177.254.153
10602 Aspire Technology Solutions Gateshead 2021-06-23T09:45:09.957434Z 106.1429491 29.023 43132640.01 17716995.05 86.177.254.153
10602 Aspire Technology Solutions Gateshead 2021-06-23T10:15:12.453044Z 106.1429491 28.932 70095792.76 18465945.29 86.177.254.153
4058 Wildcard Networks Newcastle upon Tyne 2021-06-23T10:45:13.237917Z 105.4279453 30.772 69977558.18 18583131.91 86.177.254.153
01/06/2021 00:00 192.168.1.110 p110
tarif
value_exc_vat,value_inc_vat,valid_from,valid_to
15.96,16.758,2021-06-25T21:30:00Z,2021-06-25T22:00:00Z
meter
consumption,interval_start,interval_end
0.073,2021-06-24T00:00:00+01:00,2021-06-24T00:30:00+01:00
""" | 585 | 2,665 | 277 |
ed749cfdd8782d97c57f02aac6d2e606b35e06bb | 5,067 | py | Python | topicblob_st.py | banjtheman/TopicBlob | 4c48f09aba9526357bcb548e4dbecba06a738a8f | [
"Apache-2.0"
] | 3 | 2020-10-09T18:40:53.000Z | 2021-10-30T20:24:04.000Z | topicblob_st.py | banjtheman/TopicBlob | 4c48f09aba9526357bcb548e4dbecba06a738a8f | [
"Apache-2.0"
] | 18 | 2020-10-01T16:48:29.000Z | 2020-10-20T03:58:33.000Z | topicblob_st.py | banjtheman/TopicBlob | 4c48f09aba9526357bcb548e4dbecba06a738a8f | [
"Apache-2.0"
] | 9 | 2020-10-01T18:11:37.000Z | 2020-10-19T00:27:14.000Z | # Wikipeida topic blob example
import random
import wikipedia
import streamlit as st
import pandas as pd
from topicblob import TopicBlob
from streamlit import caching
wiki_pages_default = [
"Facebook(Company)",
"Barack Obama",
"Wikipedia",
"Topic Modeling",
"Python (programming language)",
"Snapchat",
]
@st.cache(allow_output_mutation=True)
@st.cache(allow_output_mutation=True)
# Default pages
@st.cache(allow_output_mutation=True, suppress_st_warning=True)
@st.cache(allow_output_mutation=True)
if __name__ == "__main__":
main()
| 27.688525 | 176 | 0.662522 | # Wikipeida topic blob example
import random
import wikipedia
import streamlit as st
import pandas as pd
from topicblob import TopicBlob
from streamlit import caching
wiki_pages_default = [
"Facebook(Company)",
"Barack Obama",
"Wikipedia",
"Topic Modeling",
"Python (programming language)",
"Snapchat",
]
@st.cache(allow_output_mutation=True)
def cache_wiki_pages():
wiki_pages = [
"Facebook(Company)",
"Barack Obama",
"Wikipedia",
"Topic Modeling",
"Python (programming language)",
"Snapchat",
]
return wiki_pages
@st.cache(allow_output_mutation=True)
def update_wiki_pages(new_pages):
return new_pages
# Default pages
def select_wiki_pages():
wiki_pages = cache_wiki_pages()
# TODO let user add to wiki pages
st.subheader("Add a wikipedia page for example 'United States' ")
wiki_page = st.text_input("Type wikipeida page")
if st.button("Add to page list"):
wiki_pages.append(wiki_page)
st.subheader("Reset list to defaults")
if st.button("Reset list"):
caching.clear_cache()
wiki_pages = wiki_pages_default
st.subheader("Selected wikipedia pages")
all_pages = st.multiselect("Select pages", wiki_pages, default=wiki_pages)
wiki_pages = update_wiki_pages(all_pages)
return all_pages
@st.cache(allow_output_mutation=True, suppress_st_warning=True)
def cahce_wiki_pages(wiki_pages):
texts = []
for page in wiki_pages:
try:
text = wikipedia.summary(page)
texts.append(text)
except wikipedia.DisambiguationError as e:
alt_page = random.choice(e.options)
st.warning(f"Picking random page from {e.options}")
st.warning(f"Using {alt_page}")
try:
text = wikipedia.summary(alt_page)
texts.append(text)
except wikipedia.DisambiguationError as e2:
st.error("Be more specific with your Page name")
st.error(e2.options)
except wikipedia.PageError as e:
st.error(e)
st.error("Be more specific with your Page name")
return texts
@st.cache(allow_output_mutation=True)
def cahce_topic_blob(texts, num_topics, num_words):
tb = TopicBlob(texts, num_topics, num_words)
return tb
def show_ranked_search(row, wiki_pages):
doc_num = row["Document_No"]
st.write(f"{wiki_pages[doc_num]}: {row['ranked_score']}")
def show_sim_search(row, wiki_pages, expander):
doc_num = row["Document_No"]
expander.write(f"{wiki_pages[doc_num]}: {row['sim_score']}")
def show_topic_search(row, wiki_pages):
doc_num = row["Document_No"]
st.write(f"{wiki_pages[doc_num]}: {row['Topics']}")
def main():
st.title("TopicBlob")
st.header("Get topics from text")
st.write(
"This example will allow you to run TopicBlob on wikipedia pages to show off what it can do"
)
wiki_pages = select_wiki_pages()
# st.write(wiki_pages)
texts = cahce_wiki_pages(wiki_pages)
num_topics = st.number_input("Number of topics", min_value=1, value=20, step=1)
num_words = st.number_input(
"Number of words per topic", min_value=1, value=20, step=1
)
tb = cahce_topic_blob(texts, num_topics, num_words)
st.write("Here is the topic blob df")
st.write(tb.df)
counter = 0
for page in wiki_pages:
expander = st.expander(page)
curr_text = tb.df.iloc[counter]["Original Text"]
topics = tb.df.iloc[counter]["Topics"]
expander.header("Original Text")
expander.text(curr_text)
expander.header("Topic List")
expander.write(topics)
expander.header("These are the most similar pages")
sims = tb.get_sim(counter)
# st.write(sims)
sims.apply(lambda row: show_sim_search(row, wiki_pages, expander), axis=1)
counter += 1
st.header("Ranked Search")
st.subheader(
"With the docs you can do a ranked word search which will find the documents that mention your input words based on the BM25 algorithm (Note: search IS Case Sensitive)"
)
ranked_search_word = st.text_input("Do a ranked search")
if st.button("Ranked Search"):
search_results = tb.ranked_search_docs_by_words(ranked_search_word)
st.write(search_results)
# for the df show the doc
search_results.apply(lambda row: show_ranked_search(row, wiki_pages), axis=1)
st.header("Topic Search")
st.subheader(
"With the docs you can do a topic word search which will find the documents that have your topic words (Note: search IS Case Sensitive)"
)
topic_search_word = st.text_input("Do a topic search")
if st.button("Topic Search"):
search_results = tb.search_docs_by_topics(topic_search_word)
# st.write(search_results)
# for the df show the doc
search_results.apply(lambda row: show_topic_search(row, wiki_pages), axis=1)
if __name__ == "__main__":
main()
| 4,282 | 0 | 203 |
a976864d9f8e5bb46c513d46ee9bd846736e94f2 | 20,397 | py | Python | netforce/tests/unit/napalm/test_eos.py | eBay/pynetforce | 599fbfd4d2dc23c0d70a730c80a0e63a4f461b2f | [
"Apache-2.0"
] | 16 | 2018-01-26T19:29:50.000Z | 2020-07-31T04:50:37.000Z | netforce/tests/unit/napalm/test_eos.py | eBay/pynetforce | 599fbfd4d2dc23c0d70a730c80a0e63a4f461b2f | [
"Apache-2.0"
] | 3 | 2018-02-02T21:45:00.000Z | 2019-09-13T15:31:50.000Z | netforce/tests/unit/napalm/test_eos.py | eBay/pynetforce | 599fbfd4d2dc23c0d70a730c80a0e63a4f461b2f | [
"Apache-2.0"
] | 7 | 2018-01-27T01:08:49.000Z | 2021-01-15T11:03:59.000Z | # Copyright 2018 eBay Inc.
# Copyright 2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from napalm_baseebay import ebay_exceptions
from napalm_base import get_network_driver
from netforce.tests.unit.napalm import base
from pyeapi.eapilib import CommandError
class EosTestSuite(base.DietTestCase):
"""Arista EOS Test Suite
This test suite performs setup and teardown functions for this file's
unit tests. Each unit test class should inherit from this class, and
implement a single "runTest" function.
"""
def setUp(self):
"""Perform setup activities
"""
super(EosTestSuite, self).setUp()
driver = get_network_driver('ebayeos')
self.driver = driver(
hostname='127.0.0.1',
username='arista',
password='arista'
)
self.interface_names = ["Ethernet1", "Ethernet2"]
mock_mgr = mock.Mock()
self.driver.manager = mock_mgr
self.stdout = None
def tearDown(self):
"""Perform teardown activities
"""
super(EosTestSuite, self).tearDown()
# Test cases for vlan creation
| 44.927313 | 79 | 0.417218 | # Copyright 2018 eBay Inc.
# Copyright 2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from napalm_baseebay import ebay_exceptions
from napalm_base import get_network_driver
from netforce.tests.unit.napalm import base
from pyeapi.eapilib import CommandError
class EosTestSuite(base.DietTestCase):
"""Arista EOS Test Suite
This test suite performs setup and teardown functions for this file's
unit tests. Each unit test class should inherit from this class, and
implement a single "runTest" function.
"""
def setUp(self):
"""Perform setup activities
"""
super(EosTestSuite, self).setUp()
driver = get_network_driver('ebayeos')
self.driver = driver(
hostname='127.0.0.1',
username='arista',
password='arista'
)
self.interface_names = ["Ethernet1", "Ethernet2"]
mock_mgr = mock.Mock()
self.driver.manager = mock_mgr
self.stdout = None
def tearDown(self):
"""Perform teardown activities
"""
super(EosTestSuite, self).tearDown()
# Test cases for vlan creation
class test_interface_label_validation_success(EosTestSuite):
def validate_commands(self, config):
self.assertIs(2, len(config))
def mock_interfaces(self, interfaces):
ifdict = {}
for ifname in self.interface_names:
ifdict[ifname] = {
"name": ifname,
"description": 'test-label',
}
return ifdict
def runTest(self):
with mock.patch.object(self.driver, 'load_merge_candidate') \
as load_merge_candidate_mock:
load_merge_candidate_mock.side_effect = self.validate_commands
with mock.patch.object(self.driver, 'commit_config') \
as commit_config_mock:
commit_config_mock.return_value = None
with mock.patch.object(self.driver, 'get_interfaces_by_name') \
as \
get_interfaces_mock:
with mock.patch.object(self.driver, '_exec_command') \
as push_changes:
with mock.patch.object(
self.driver, '_check_if_connected') \
as check_connected:
check_connected.return_value = None
push_changes.return_value = None
get_interfaces_mock.side_effect = \
self.mock_interfaces
self.driver.update_interface_label('Ethernet1',
'test-label')
class test_interface_label_post_change_validation_failure(EosTestSuite):
def validate_commands(self, config):
self.assertIs(2, len(config))
def mock_interfaces(self, interfaces):
ifdict = {}
for ifname in self.interface_names:
ifdict[ifname] = {
"name": ifname,
"description": 'test-label-1',
}
return ifdict
def runTest(self):
with mock.patch.object(self.driver, 'get_interfaces_by_name') \
as \
get_interfaces_mock:
with mock.patch.object(self.driver, '_exec_command') \
as push_changes:
with mock.patch.object(
self.driver, '_check_if_connected') \
as check_connected:
check_connected.return_value = None
push_changes.return_value = None
get_interfaces_mock.side_effect = self.mock_interfaces
self.assertRaises(ebay_exceptions.
PostChangeValidationException,
self.driver.update_interface_label,
'Ethernet1', 'test-label')
class test_get_routes_with_vrf(EosTestSuite):
def runTest(self):
with mock.patch.object(self.driver, '_exec_command_json') \
as push_changes:
with mock.patch.object(
self.driver, '_check_if_connected') \
as check_connected:
with mock.patch.object(self.driver,
'get_routes_aggregate') \
as routes_aggregate:
routes_aggregate.return_value = []
with mock.patch.object(self.driver, '_get_vrfs') as\
vrf_mock:
vrf_mock.return_value = ['fake-native']
check_connected.return_value = None
push_changes.return_value = \
{
"vrfs": {
"fake-native": {
"routes": {
"10.215.112.131/32": {
"kernelProgrammed": True,
"directlyConnected": False,
"preference": 200,
"routeAction": "forward",
"vias": [{
"interface":
"Ethernet4/28/1.5",
"interfaceDescription":
"L3Q-fake-lc04:5:17/1",
"nexthopAddr":
"10.215.100.87"
}],
"metric": 0,
"hardwareProgrammed": True,
"routeType": "eBGP"
},
},
"allRoutesProgrammedKernel": True,
"routingDisabled": False,
"allRoutesProgrammedHardware": True,
"defaultRouteState": "reachable"
}
}
}
ret = self.driver.get_routes("fake-native")
expected = ["10.215.112.131/32"]
self.assertEqual(expected, ret)
class test_get_routes_aggregate_with_vrf(EosTestSuite):
def runTest(self):
with mock.patch.object(self.driver, '_exec_command_json') \
as push_changes:
with mock.patch.object(
self.driver, '_check_if_connected') \
as check_connected:
with mock.patch.object(self.driver, '_get_vrfs') as vrf_mock:
vrf_mock.return_value = ['fake-native']
check_connected.return_value = None
push_changes.return_value = \
{
"vrfs": {
"fake-native": {
"routes": {
"10.215.0.0/16": {
"kernelProgrammed": True,
"directlyConnected": True,
"routeAction": "drop",
"vias": [],
"hardwareProgrammed": True,
"routeType": "bgpAggregate"
}
},
"allRoutesProgrammedKernel": True,
"routingDisabled": False,
"allRoutesProgrammedHardware": True,
"defaultRouteState": "reachable"
}
}
}
ret = self.driver.get_routes_aggregate("fake-native")
expected = ["10.215.0.0/16"]
self.assertEqual(expected, ret)
class test_get_ip_addrs_on_interface(EosTestSuite):
def runTest(self):
with mock.patch.object(self.driver, '_exec_command_json') \
as push_changes:
with mock.patch.object(
self.driver, '_check_if_connected') \
as check_connected:
check_connected.return_value = None
push_changes.return_value = \
{
"interfaces": {
"Vlan1": {
"lastStatusChangeTimestamp": 1501719470.7075827,
"name": "Vlan1",
"interfaceStatus": "connected",
"burnedInAddress": "44:4c:a8:e4:18:84",
"mtu": 1500,
"hardware": "vlan",
"bandwidth": 0,
"forwardingModel": "routed",
"lineProtocolStatus": "up",
"interfaceAddress": [{
"secondaryIpsOrderedList": [{
"maskLen": 24,
"address": "192.168.1.1"}],
"broadcastAddress":
"255.255.255.255",
"virtualSecondaryIps": {},
"dhcp": False,
"secondaryIps": {
"192.168.3.1": {
"maskLen": 24,
"address": "192.168.1.1"
}
},
"primaryIp": {
"maskLen": 24,
"address": "192.168.2.1"
},
"virtualSecondaryIpsOrderedList": [],
"virtualIp": {
"maskLen": 0,
"address": "0.0.0.0"
}}],
"physicalAddress": "44:4c:a8:e4:18:84",
"description": ""
}
}
}
data = self.driver.get_ip_addrs_on_interface('Vlan1')
expected = [u'192.168.1.1/24', u'192.168.2.1/24']
self.assertEqual(sorted(expected), sorted(data))
class test_delete_subnet_success(EosTestSuite):
def runTest(self):
with mock.patch.object(self.driver, 'get_vlan') as vlan_mock:
vlan_mock.return_value = {
'name': 'test-vlan',
'status': 'active'
}
with mock.patch.object(
self.driver, '_check_if_connected') \
as check_connected:
with mock.patch.object(self.driver, '_exec_command') \
as push_changes:
push_changes.return_value = None
check_connected.return_value = None
commands = self.driver.delete_subnet_on_device(
'1.1.1.1/24', 2)
self.assertIsNotNone(commands)
class test_create_subnet_success(EosTestSuite):
def runTest(self):
with mock.patch.object(self.driver, 'get_vlan') as vlan_mock:
vlan_mock.return_value = {
'name': 'test-vlan',
'status': 'active'
}
with mock.patch.object(self.driver,
'get_ip_addrs_on_interface') \
as get_subnets_mock:
with mock.patch.object(self.driver, '_exec_command') \
as push_changes:
with mock.patch.object(
self.driver, '_check_if_connected') \
as check_connected:
with mock.patch.object(self.driver, 'open') \
as open:
with mock.patch.object(self.driver, 'close') \
as close:
open.return_value = None
close.return_value = None
check_connected.return_value = None
push_changes.return_value = None
get_subnets_mock.side_effect = \
[[], ['1.1.1.1/24']]
commands = self.driver.create_subnet(
'1.1.1.1/24', 2)
self.assertIsNotNone(commands)
class test_get_mac_addresses_on_interface(EosTestSuite):
def runTest(self):
with mock.patch.object(self.driver, 'get_interfaces') as \
interface_mock:
interface_mock.return_value = \
['Ethernet38 is up, line protocol is up (connected)',
' Hardware is Ethernet, address is 001c.7312.692f'
' (bia 001c.7312.692f)',
' Ethernet MTU 9214 bytes , BW 10000000 kbit']
with mock.patch.object(self.driver, '_exec_command') \
as exec_command:
with mock.patch.object(
self.driver, '_check_if_connected') \
as check_connected:
with mock.patch.object(self.driver, 'open') \
as open:
with mock.patch.object(self.driver, 'close') \
as close:
open.return_value = None
close.return_value = None
check_connected.return_value = None
exec_command.return_value = """
Mac Address Table
------------------------------------------------------------------
Vlan Mac Address Type Ports Moves Last Move
---- ----------- ---- ----- ----- ---------
1 001c.7315.b96c STATIC Router
1 1cc1.de18.9a42 DYNAMIC Et38 1 410 days, 10:10:18 ag
1 1cc1.de18.9a44 DYNAMIC Et38 1 410 days, 9:43:05 ag
Total Mac Addresses for this criterion: 2
"""
data = \
self.driver.get_mac_addresses_on_interface(
'Ethernet38')
expected = \
[{'vlan': 1,
'mac_address': u'1C:C1:DE:18:9A:42'},
{'vlan': 1,
'mac_address': u'1C:C1:DE:18:9A:44'}]
self.assertEqual(expected, data)
class test_get_traffic_on_interface(EosTestSuite):
def runTest(self):
with mock.patch.object(self.driver, 'get_interfaces') as\
interface_mock:
interface_mock.return_value = \
['Ethernet38 is up, line protocol is up (connected)',
' Hardware is Ethernet, address is 001c.7312.692f ('
'bia 001c.7312.692f)',
' Ethernet MTU 9214 bytes , BW 10000000 kbit']
with mock.patch.object(self.driver, '_exec_command') \
as exec_command:
with mock.patch.object(
self.driver, '_check_if_connected') \
as check_connected:
with mock.patch.object(self.driver, 'open') \
as open:
with mock.patch.object(self.driver, 'close') \
as close:
open.return_value = None
close.return_value = None
check_connected.return_value = None
exec_command.side_effect = [
" 5 minutes input rate 830 Mbps (8.4% with"
" framing overhead), 69640 packets/sec\n",
" 5 minutes output rate 411 Mbps (4.2% with"
" framing overhead), 42739 packets/sec\n"]
data = self.driver.get_traffic_on_interface(
'Ethernet38')
expected = (830000000, 411000000)
self.assertEqual(expected, data)
class test_get_routes_aggregate_flat_network(EosTestSuite):
def runTest(self):
with mock.patch.object(self.driver, '_exec_command_json') \
as push_changes:
with mock.patch.object(
self.driver, '_check_if_connected') \
as check_connected:
check_connected.return_value = None
push_changes.side_effect = \
[{u'vrfs': {u'default': {u'routes': {},
u'defaultRouteState': u'notSet',
u'allRoutesProgrammedKernel':
True, u'routingDisabled':
False,
u'allRoutesProgrammedHardware':
True}}}, {
"vrfs": {
"default": {
"routes": {
"10.174.128.0/18": {
"kernelProgrammed": True,
"directlyConnected": True,
"routeAction": "drop",
"vias": [],
"hardwareProgrammed": True,
"routeType": "static"
},
"10.20.125.0/25": {
"kernelProgrammed": True,
"directlyConnected": True,
"routeAction": "drop",
"vias": [],
"hardwareProgrammed": True,
"routeType": "static"
}
},
"allRoutesProgrammedKernel": True,
"routingDisabled": True,
"allRoutesProgrammedHardware": True,
"defaultRouteState": "notSet"
}
}
}]
ret = self.driver.get_routes_aggregate()
expected = [u'10.174.128.0/18', u'10.20.125.0/25']
self.assertEqual(sorted(expected), sorted(ret))
| 17,738 | 332 | 603 |
950f2753e021969b86d5511f420b5f157420f4b5 | 1,063 | py | Python | net.py | JIZZ-in-my-pants-NTNU/NTNU-Automatic-Enrollment-2021 | 9aae920885d347057d3e3f875ea4df6e601971c8 | [
"Apache-2.0"
] | null | null | null | net.py | JIZZ-in-my-pants-NTNU/NTNU-Automatic-Enrollment-2021 | 9aae920885d347057d3e3f875ea4df6e601971c8 | [
"Apache-2.0"
] | null | null | null | net.py | JIZZ-in-my-pants-NTNU/NTNU-Automatic-Enrollment-2021 | 9aae920885d347057d3e3f875ea4df6e601971c8 | [
"Apache-2.0"
] | null | null | null | import torch
import torch.nn as nn
from torchsummary import summary
if __name__ == '__main__':
device = 'cuda' if torch.cuda.is_available() else 'cpu'
model = Net().to(device)
summary(model, (1, 20, 20)) | 25.309524 | 56 | 0.675447 | import torch
import torch.nn as nn
from torchsummary import summary
class Net(nn.Module):
def __init__(self, in_channels=1, num_classes=39):
super(Net, self).__init__()
self._block1 = nn.Sequential(
nn.Conv2d(in_channels, 4, 3, stride=1, padding=1),
nn.BatchNorm2d(4),
nn.LeakyReLU(0.01),
nn.MaxPool2d(2))
self._block2 = nn.Sequential(
nn.Conv2d(4, 16, 3, stride=1, padding=1),
nn.BatchNorm2d(16),
nn.LeakyReLU(0.01),
nn.MaxPool2d(2))
self._block3 = nn.Sequential(
nn.Linear(16*5*5, 64),
nn.ReLU(),
nn.BatchNorm1d(64),
nn.Linear(64, num_classes))
self._init_weights()
def _init_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight.data)
m.bias.data.zero_()
def forward(self, x):
pool1 = self._block1(x)
pool2 = self._block2(pool1)
flatten = pool2.view(pool2.size(0), -1)
return self._block3(flatten)
if __name__ == '__main__':
device = 'cuda' if torch.cuda.is_available() else 'cpu'
model = Net().to(device)
summary(model, (1, 20, 20)) | 760 | 0 | 94 |
fa5363a87e7a2e3c77223aa8042cdf96ea35649a | 455 | py | Python | environment/sideshop.py | EdwardYGLi/the_sideshop | a8259bb99a50b187f8a187473b36b1e168fc53ab | [
"MIT"
] | null | null | null | environment/sideshop.py | EdwardYGLi/the_sideshop | a8259bb99a50b187f8a187473b36b1e168fc53ab | [
"MIT"
] | null | null | null | environment/sideshop.py | EdwardYGLi/the_sideshop | a8259bb99a50b187f8a187473b36b1e168fc53ab | [
"MIT"
] | null | null | null | """
Created by Edward Li at 10/6/20
"""
import numpy as np
| 21.666667 | 80 | 0.637363 | """
Created by Edward Li at 10/6/20
"""
import numpy as np
class SideShop:
def __init__(self, bench_size = 5, counter_size = 9, starting_money = 1000):
self.bench_size = bench_size
self.counter_size = counter_size
self.starting_money = starting_money
self.bench = [None] * bench_size
self.counter = [None] * counter_size
def action(self, action):
pass
def refresh_bench(self):
pass
| 298 | -6 | 103 |
98567d4bd5f863556f2309a22f861641bf9c9de6 | 1,381 | py | Python | babyfood/components/ABC.py | meawoppl/babyfood | 0014ac73b2f9bf5717d805e8525fafd79e6a57dd | [
"BSD-2-Clause"
] | 1 | 2019-06-03T03:57:51.000Z | 2019-06-03T03:57:51.000Z | babyfood/components/ABC.py | meawoppl/babyfood | 0014ac73b2f9bf5717d805e8525fafd79e6a57dd | [
"BSD-2-Clause"
] | null | null | null | babyfood/components/ABC.py | meawoppl/babyfood | 0014ac73b2f9bf5717d805e8525fafd79e6a57dd | [
"BSD-2-Clause"
] | null | null | null | from babyfood.homogenous import HomogenousTransform
| 28.770833 | 58 | 0.646633 | from babyfood.homogenous import HomogenousTransform
class AbstractComponent:
def __init__(self):
self._connectorIDs = []
self._connectorNames = []
self._connectorXYs = []
self._xform = HomogenousTransform()
def _addConnector(self, idee, xy, name=None):
if name is None:
name = "Pin " + str(idee)
# Super sanity check these ones, avoid dumb later.
assert int(idee) == idee, "ID must be an integer"
assert str(name) == name, "Name must be a string"
assert len(xy) == 2
assert float(xy[0]) == xy[0]
assert float(xy[1]) == xy[1]
self._connectorIDs.append(idee)
self._connectorNames.append(name)
self._connectorXYs.append(xy)
def getConnectorIDs(self):
return self._connectorIDs
def getConnectorNames(self):
return self._connectorNames
def getConnectorCenters(self):
return self._xform.project(self._connectorXYs)
def place(self, ctx):
self._side = ctx.getActiveSide()
self._xform = ctx.transform.copyCurrentTransform()
class AbstractSMAComponent(AbstractComponent):
def addPad(self, idee, xy, name=None):
self._addConnector(idee, xy, name)
class AbstractTHComponent(AbstractComponent):
def addPin(self, idee, xy, name=None):
self._addConnector(idee, xy, name)
| 992 | 52 | 282 |
2400c273eb1f7ada707ef76c3d8e8a89990aaf3e | 486 | py | Python | src/data_preparation/AoA/combine_repetitions_wordbank.py | rgalhama/public_ICCM2021 | 6a528a26c649da0843b7acbc785aa99b80d29a74 | [
"MIT"
] | null | null | null | src/data_preparation/AoA/combine_repetitions_wordbank.py | rgalhama/public_ICCM2021 | 6a528a26c649da0843b7acbc785aa99b80d29a74 | [
"MIT"
] | null | null | null | src/data_preparation/AoA/combine_repetitions_wordbank.py | rgalhama/public_ICCM2021 | 6a528a26c649da0843b7acbc785aa99b80d29a74 | [
"MIT"
] | null | null | null | """
Author : Raquel G. Alhama
Desc:
"""
import sys
import pandas as pd
if __name__=="__main__":
args=sys.argv[1:]
if len(args) != 1:
print("Usage: combine_repetitions_wordbank.py <file>")
exit(-1)
main(*args)
| 21.130435 | 62 | 0.617284 | """
Author : Raquel G. Alhama
Desc:
"""
import sys
import pandas as pd
def main(fname):
df=pd.read_csv(fname, sep=";")
sdf=df[["uni_lemma", "lexical_class", "aoa",]]
gsdf=sdf.groupby(["uni_lemma","lexical_class"]).mean()
newfname=fname.replace(".csv","_means.csv")
gsdf.to_csv(newfname, sep=";")
if __name__=="__main__":
args=sys.argv[1:]
if len(args) != 1:
print("Usage: combine_repetitions_wordbank.py <file>")
exit(-1)
main(*args)
| 223 | 0 | 23 |
0a1827c34b373bbf5c43c1c317679fde999a079f | 3,598 | py | Python | bebop/editor/models.py | nanvel/bebop | c54ceb0151e26cf90b2df3f20908f8a01c09c896 | [
"MIT"
] | 1 | 2019-01-12T06:34:32.000Z | 2019-01-12T06:34:32.000Z | bebop/editor/models.py | nanvel/bebop | c54ceb0151e26cf90b2df3f20908f8a01c09c896 | [
"MIT"
] | null | null | null | bebop/editor/models.py | nanvel/bebop | c54ceb0151e26cf90b2df3f20908f8a01c09c896 | [
"MIT"
] | null | null | null | import logging
from tornado import gen
from ..common.dynamo import DDBBase
logger = logging.getLogger(__name__)
| 29.491803 | 104 | 0.573096 | import logging
from tornado import gen
from ..common.dynamo import DDBBase
logger = logging.getLogger(__name__)
class DDBEpisode(DDBBase):
TABLE_NAME = 'bebop'
# The data type for the attribute. You can specify S for string data,
# N for numeric data, or B for binary data.
ATTRIBUTE_DEFINITIONS = [{
'AttributeName': 'number',
'AttributeType': 'N'
}, {
'AttributeName': 'title',
'AttributeType': 'S'
}]
# http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataModel.html#DataModelPrimaryKey
KEY_SCHEMA = [{
'AttributeName': 'number',
'KeyType': 'HASH'
}]
# http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/SecondaryIndexes.html
LOCAL_SECONDARY_INDEXES = []
GLOBAL_SECONDARY_INDEXES = [{
'IndexName': 'by_title',
'KeySchema': [
{
'AttributeName': 'title',
'KeyType': 'HASH'
}
],
# http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/LSI.html#LSI.Projections
'Projection': {
'ProjectionType': 'ALL',
},
'ProvisionedThroughput': {
'ReadCapacityUnits': 2,
'WriteCapacityUnits': 2,
}
}]
# http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/ProvisionedThroughputIntro.html
PROVISIONED_THROUGHPUT = {
'ReadCapacityUnits': 5,
'WriteCapacityUnits': 2
}
ATTRIBUTES = {
'number': 'N',
'title': 'S',
'airdate': 'N', # timestamp
'content': 'S',
}
@gen.coroutine
def get(self, number):
# http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_GetItem.html
ddb_get_item = self.dynamodb(operation='GetItem')
res = yield gen.Task(ddb_get_item.call,
table_name=self.TABLE_NAME,
key=self.with_types({'number': number}))
raise gen.Return(res)
@gen.coroutine
def create(self, **kwargs):
item = {}
ddb_put_item = self.dynamodb(operation='PutItem')
res = yield gen.Task(ddb_put_item.call,
table_name=self.TABLE_NAME,
item=self.with_types(kwargs))
raise gen.Return(res)
@gen.coroutine
def delete(self, number):
ddb_delete_item = self.dynamodb(operation='DeleteItem')
res = yield gen.Task(ddb_delete_item.call,
table_name=self.TABLE_NAME,
key=self.with_types({'number': number}))
raise gen.Return(res)
@gen.coroutine
def items(self, limit=10, last=None):
ddb_scan = self.dynamodb(operation='Scan')
kwargs = {
'table_name': self.TABLE_NAME,
'limit': int(limit),
}
if last:
kwargs['exclusive_start_key'] = self.with_types({'number': last})
res = yield gen.Task(ddb_scan.call, **kwargs)
raise gen.Return(res)
@gen.coroutine
def search(self, q, limit=10, last=None):
ddb_query = self.dynamodb(operation='Query')
kwargs = {
'table_name': self.TABLE_NAME,
'limit': int(limit),
'index_name': 'by_title',
'key_conditions': {
'title': {
'AttributeValueList': [{'S': q}],
'ComparisonOperator': 'EQ'
}
}
}
if last:
kwargs['exclusive_start_key'] = self.with_types({'number': last})
res = yield gen.Task(ddb_query.call, **kwargs)
raise gen.Return(res)
| 1,742 | 1,716 | 23 |
693aaf32ed68d1950e3ebddf062ec477ba905a32 | 872 | py | Python | bubble sort.py | Aditya-Kashyap/dsa_python | 9725a10cfbcbe3debaff50cb99b4f22e93d99762 | [
"MIT"
] | null | null | null | bubble sort.py | Aditya-Kashyap/dsa_python | 9725a10cfbcbe3debaff50cb99b4f22e93d99762 | [
"MIT"
] | null | null | null | bubble sort.py | Aditya-Kashyap/dsa_python | 9725a10cfbcbe3debaff50cb99b4f22e93d99762 | [
"MIT"
] | null | null | null | def bubble_sort(array_to_sort):
"""
Simple Sorting Algorithm which Worst Case of O(n2) and Best Case od O(n) if array is already sorted
:param array_to_sort: The Array to be sorted
:return: Sorted Array
"""
n = len(array_to_sort)
for i in range(n-1):
swap = False
for j in range(n-i-1):
if array_to_sort[j] > array_to_sort[j + 1]:
array_to_sort[j], array_to_sort[j + 1] = array_to_sort[j + 1], array_to_sort[j]
swap = True
if swap is False:
return array_to_sort
return array_to_sort
if __name__ == '__main__':
sample = [[1, 2, 34, 45, 12, 42, 44, 53, 5],
[1, 0],
[0, 9, 8, 6, 5, 4, 3, 2, 1],
[],
[1, 2, 3, 4, 5, 6, 7]]
for arr in range(len(sample)):
print(bubble_sort(sample[arr]))
| 28.129032 | 103 | 0.530963 | def bubble_sort(array_to_sort):
"""
Simple Sorting Algorithm which Worst Case of O(n2) and Best Case od O(n) if array is already sorted
:param array_to_sort: The Array to be sorted
:return: Sorted Array
"""
n = len(array_to_sort)
for i in range(n-1):
swap = False
for j in range(n-i-1):
if array_to_sort[j] > array_to_sort[j + 1]:
array_to_sort[j], array_to_sort[j + 1] = array_to_sort[j + 1], array_to_sort[j]
swap = True
if swap is False:
return array_to_sort
return array_to_sort
if __name__ == '__main__':
sample = [[1, 2, 34, 45, 12, 42, 44, 53, 5],
[1, 0],
[0, 9, 8, 6, 5, 4, 3, 2, 1],
[],
[1, 2, 3, 4, 5, 6, 7]]
for arr in range(len(sample)):
print(bubble_sort(sample[arr]))
| 0 | 0 | 0 |
3e275af8678d6e2dc7611df332b5091af46124fd | 4,495 | py | Python | scripts/scil_detect_streamlines_loops.py | fullbat/scilpy | 8f5b95a0b298ac95268c94d04a162b14fe2773ad | [
"MIT"
] | null | null | null | scripts/scil_detect_streamlines_loops.py | fullbat/scilpy | 8f5b95a0b298ac95268c94d04a162b14fe2773ad | [
"MIT"
] | null | null | null | scripts/scil_detect_streamlines_loops.py | fullbat/scilpy | 8f5b95a0b298ac95268c94d04a162b14fe2773ad | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import logging
import nibabel as nib
import numpy as np
from scilpy.io.utils import (add_overwrite_arg,
assert_inputs_exist,
assert_outputs_exist,
check_tracts_same_format)
from scilpy.tractanalysis.features import remove_loops_and_sharp_turns
DESCRIPTION = """
This script can be used to remove loops in two types of streamline datasets:
- Whole brain: For this type, the script removes streamlines if they
make a loop with an angle of more than 360 degrees. It's possible to change
this angle with the -a option. Warning: Don't use --qb option for a
whole brain tractography.
- Bundle dataset: For this type, it is possible to remove loops and
streamlines outside of the bundle. For the sharp angle turn, use --qb option.
----------------------------------------------------------------------------
Reference:
QuickBundles based on [Garyfallidis12] Frontiers in Neuroscience, 2012.
----------------------------------------------------------------------------
"""
if __name__ == "__main__":
main()
| 39.429825 | 81 | 0.56218 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import logging
import nibabel as nib
import numpy as np
from scilpy.io.utils import (add_overwrite_arg,
assert_inputs_exist,
assert_outputs_exist,
check_tracts_same_format)
from scilpy.tractanalysis.features import remove_loops_and_sharp_turns
DESCRIPTION = """
This script can be used to remove loops in two types of streamline datasets:
- Whole brain: For this type, the script removes streamlines if they
make a loop with an angle of more than 360 degrees. It's possible to change
this angle with the -a option. Warning: Don't use --qb option for a
whole brain tractography.
- Bundle dataset: For this type, it is possible to remove loops and
streamlines outside of the bundle. For the sharp angle turn, use --qb option.
----------------------------------------------------------------------------
Reference:
QuickBundles based on [Garyfallidis12] Frontiers in Neuroscience, 2012.
----------------------------------------------------------------------------
"""
def _build_arg_parser():
p = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter,
description=DESCRIPTION)
p.add_argument('in_tractogram',
help='Tractogram input file name.')
p.add_argument('out_tractogram',
help='Output tractogram without loops.')
p.add_argument('--remaining_tractogram',
help='If set, saves detected looping streamlines.')
p.add_argument('--qb', action='store_true',
help='If set, uses QuickBundles to detect\n' +
'outliers (loops, sharp angle turns).\n' +
'Should mainly be used with bundles. '
'[%(default)s]')
p.add_argument('--threshold', default=8., type=float,
help='Maximal streamline to bundle distance\n' +
'for a streamline to be considered as\n' +
'a tracking error. [%(default)s]')
p.add_argument('-a', dest='angle', default=360, type=float,
help='Maximum looping (or turning) angle of\n' +
'a streamline in degrees. [%(default)s]')
add_overwrite_arg(p)
return p
def main():
parser = _build_arg_parser()
args = parser.parse_args()
assert_inputs_exist(parser, args.in_tractogram)
assert_outputs_exist(parser, args, args.out_tractogram,
optional=args.remaining_tractogram)
check_tracts_same_format(parser, [args.in_tractogram, args.out_tractogram,
args.remaining_tractogram])
if args.threshold <= 0:
parser.error('Threshold "{}" '.format(args.threshold) +
'must be greater than 0')
if args.angle <= 0:
parser.error('Angle "{}" '.format(args.angle) +
'must be greater than 0')
tractogram = nib.streamlines.load(args.in_tractogram)
streamlines = tractogram.streamlines
streamlines_c = []
loops = []
if len(streamlines) > 1:
streamlines_c, loops = remove_loops_and_sharp_turns(streamlines,
args.angle,
args.qb,
args.threshold)
else:
parser.error('Zero or one streamline in {}'.format(args.in_tractogram) +
'. The file must have more than one streamline.')
if len(streamlines_c) > 0:
tractogram_c = nib.streamlines.Tractogram(streamlines_c,
affine_to_rasmm=np.eye(4))
nib.streamlines.save(tractogram_c, args.out_tractogram,
header=tractogram.header)
else:
logging.warning(
'No clean streamlines in {}'.format(args.in_tractogram))
if len(loops) == 0:
logging.warning('No loops in {}'.format(args.in_tractogram))
elif args.remaining_tractogram:
tractogram_l = nib.streamlines.Tractogram(loops,
affine_to_rasmm=np.eye(4))
nib.streamlines.save(tractogram_l, args.remaining_tractogram,
header=tractogram.header)
if __name__ == "__main__":
main()
| 3,265 | 0 | 46 |
6a9aeebc5c617372c8868cb459b6cbb1f0fcbbe3 | 265 | py | Python | quiz 2 (1).py | Ummed1/Project | 3be222d88a81c3303df510986c08c74200b947de | [
"BSD-3-Clause"
] | null | null | null | quiz 2 (1).py | Ummed1/Project | 3be222d88a81c3303df510986c08c74200b947de | [
"BSD-3-Clause"
] | null | null | null | quiz 2 (1).py | Ummed1/Project | 3be222d88a81c3303df510986c08c74200b947de | [
"BSD-3-Clause"
] | null | null | null | Python 3.6.2 (v3.6.2:5fd33b5, Jul 8 2017, 04:14:34) [MSC v.1900 32 bit (Intel)] on win32
Type "copyright", "credits" or "license()" for more information.
>>> def mystery(l):
l = l[2:5]
return()
>>> list1 = [7,82,44,23,11]
>>> mystery(list1)
()
>>>
| 24.090909 | 90 | 0.577358 | Python 3.6.2 (v3.6.2:5fd33b5, Jul 8 2017, 04:14:34) [MSC v.1900 32 bit (Intel)] on win32
Type "copyright", "credits" or "license()" for more information.
>>> def mystery(l):
l = l[2:5]
return()
>>> list1 = [7,82,44,23,11]
>>> mystery(list1)
()
>>>
| 0 | 0 | 0 |
73756f794590a3d616b42fa27649ffced19c16d3 | 458 | py | Python | tests/unit/test_client_initialisation.py | querius-dev/client | 91018b78fd199e5653adc022ef628347a4e6c588 | [
"Apache-2.0"
] | null | null | null | tests/unit/test_client_initialisation.py | querius-dev/client | 91018b78fd199e5653adc022ef628347a4e6c588 | [
"Apache-2.0"
] | null | null | null | tests/unit/test_client_initialisation.py | querius-dev/client | 91018b78fd199e5653adc022ef628347a4e6c588 | [
"Apache-2.0"
] | null | null | null | import json
from pathlib import Path
from querius import QueriusClient
| 28.625 | 84 | 0.727074 | import json
from pathlib import Path
from querius import QueriusClient
def test_initialising_client_from_json():
service_account_info = json.loads(Path('fake-service-account.json').read_text())
config = {
"customer_id": "test_customer_id",
"service_account_info": service_account_info,
"api_url": "test_api_url",
"timeout_seconds": "test_timeout_seconds"
}
QueriusClient.from_service_account_info(**config)
| 362 | 0 | 23 |
194479747ae092634db8f1a11755259ce6781f70 | 137 | py | Python | aiocloudflare/api/zones/amp/sxg/sxg.py | Stewart86/aioCloudflare | 341c0941f8f888a8b7e696e64550bce5da4949e6 | [
"MIT"
] | 2 | 2021-09-14T13:20:55.000Z | 2022-02-24T14:18:24.000Z | aiocloudflare/api/zones/amp/sxg/sxg.py | Stewart86/aioCloudflare | 341c0941f8f888a8b7e696e64550bce5da4949e6 | [
"MIT"
] | 46 | 2021-09-08T08:39:45.000Z | 2022-03-29T12:31:05.000Z | aiocloudflare/api/zones/amp/sxg/sxg.py | Stewart86/aioCloudflare | 341c0941f8f888a8b7e696e64550bce5da4949e6 | [
"MIT"
] | 1 | 2021-12-30T23:02:23.000Z | 2021-12-30T23:02:23.000Z | from aiocloudflare.commons.auth import Auth
| 17.125 | 43 | 0.693431 | from aiocloudflare.commons.auth import Auth
class Sxg(Auth):
_endpoint1 = "zones"
_endpoint2 = "amp/sxg"
_endpoint3 = None
| 0 | 69 | 23 |
00bfc7231cdfb91e3edb0620539ba099756c173b | 2,995 | py | Python | examples/ocean_2D_paper/vvp.py | wedeling/FabUQCampaign | f89ee1a7b72ec1c41d6bf662f1b42acd8065cb32 | [
"BSD-3-Clause"
] | 1 | 2020-06-26T10:37:56.000Z | 2020-06-26T10:37:56.000Z | examples/ocean_2D_paper/vvp.py | wedeling/FabUQCampaign | f89ee1a7b72ec1c41d6bf662f1b42acd8065cb32 | [
"BSD-3-Clause"
] | null | null | null | examples/ocean_2D_paper/vvp.py | wedeling/FabUQCampaign | f89ee1a7b72ec1c41d6bf662f1b42acd8065cb32 | [
"BSD-3-Clause"
] | 2 | 2020-04-20T12:50:11.000Z | 2020-04-24T10:35:13.000Z | # This file contains prototype VVP implementations.
#
# These patterns should be purposed for specific settings.
# As such, they do not contain a @task descriptor.
import os
"""
validate_ensemble_output Validation Pattern.
Purpose:
1. given an ensemble of validation output directories, it will:
2. operate a validation_function on each direction, and
3. print the outputs to screen, and
4. use an aggregation function to combine all outputs into a compound metric,
and
5. print the compound metric.
SPECS of the required validation_function:
- Should operate with a single argument (the simulation output directory).
- ASSUMPTION: There is no explicit argument to indicate where the validation
data resides (currently assumed to be packaged with the simulation output
and known by the function). -> we could choose to make this explicit.
- The validation function should return a set of metrics.
SPECS of the aggregation_function:
- Receives a Python list with the output of validation_function in each
element.
- Returns a data type that represents the compound validation outcome
(.e.g, one or more error scores).
"""
"""
***********
SUGGESTIONS
***********
1) 'validate_ensemble_output' may not be a good name, this function will be
used for verification as well, changed it to 'ensemble_vvp'
2) 'validation_function': same comment, changed it to 'sample_testing_function',
being the opposite of 'aggregation_function', something that acts on a single sample only
3) print("AVERAGED VALIDATION SCORE ...) line is removed
4) added **kwargs in case the sample_testing/aggragation function takes more than the result_dir as argument
5) added the possibility of multiple results_dirs
6) added the possibility of hand-selecting selecting (a subset of) the sample directories via 'items' in kwargs
!! This is also required if the order in which the scores are appended is important
since os.listdirs returns an illogical order
"""
def ensemble_vvp(results_dirs, sample_testing_function, aggregation_function, **kwargs):
"""
Goes through all the output directories and calculates the scores.
"""
#if a single result_dir is specified, still add it to a list
if type(results_dirs) == str:
tmp = []; tmp.append(results_dirs); results_dirs = tmp
for results_dir in results_dirs:
scores = []
#use user-specified sample directories if specified,
#otherwise look for uq results in all directories in results_dir
if 'items' in kwargs:
items = kwargs['items']
else:
items = os.listdir("{}".format(results_dir))
for item in items:
if os.path.isdir(os.path.join(results_dir, item)):
print(os.path.join(results_dir, item))
scores.append(sample_testing_function(os.path.join(results_dir, item), **kwargs))
#print("scores:", scores)
aggregation_function(scores, **kwargs) | 39.407895 | 112 | 0.718531 | # This file contains prototype VVP implementations.
#
# These patterns should be purposed for specific settings.
# As such, they do not contain a @task descriptor.
import os
"""
validate_ensemble_output Validation Pattern.
Purpose:
1. given an ensemble of validation output directories, it will:
2. operate a validation_function on each direction, and
3. print the outputs to screen, and
4. use an aggregation function to combine all outputs into a compound metric,
and
5. print the compound metric.
SPECS of the required validation_function:
- Should operate with a single argument (the simulation output directory).
- ASSUMPTION: There is no explicit argument to indicate where the validation
data resides (currently assumed to be packaged with the simulation output
and known by the function). -> we could choose to make this explicit.
- The validation function should return a set of metrics.
SPECS of the aggregation_function:
- Receives a Python list with the output of validation_function in each
element.
- Returns a data type that represents the compound validation outcome
(.e.g, one or more error scores).
"""
"""
***********
SUGGESTIONS
***********
1) 'validate_ensemble_output' may not be a good name, this function will be
used for verification as well, changed it to 'ensemble_vvp'
2) 'validation_function': same comment, changed it to 'sample_testing_function',
being the opposite of 'aggregation_function', something that acts on a single sample only
3) print("AVERAGED VALIDATION SCORE ...) line is removed
4) added **kwargs in case the sample_testing/aggragation function takes more than the result_dir as argument
5) added the possibility of multiple results_dirs
6) added the possibility of hand-selecting selecting (a subset of) the sample directories via 'items' in kwargs
!! This is also required if the order in which the scores are appended is important
since os.listdirs returns an illogical order
"""
def ensemble_vvp(results_dirs, sample_testing_function, aggregation_function, **kwargs):
"""
Goes through all the output directories and calculates the scores.
"""
#if a single result_dir is specified, still add it to a list
if type(results_dirs) == str:
tmp = []; tmp.append(results_dirs); results_dirs = tmp
for results_dir in results_dirs:
scores = []
#use user-specified sample directories if specified,
#otherwise look for uq results in all directories in results_dir
if 'items' in kwargs:
items = kwargs['items']
else:
items = os.listdir("{}".format(results_dir))
for item in items:
if os.path.isdir(os.path.join(results_dir, item)):
print(os.path.join(results_dir, item))
scores.append(sample_testing_function(os.path.join(results_dir, item), **kwargs))
#print("scores:", scores)
aggregation_function(scores, **kwargs) | 0 | 0 | 0 |
44d001dd8db974eaa563c80c08c73ce830935e16 | 1,858 | py | Python | curris/helper.py | a1trl9/curris | f9b55a7a2a8864882f297a4a324ccff182176417 | [
"MIT"
] | null | null | null | curris/helper.py | a1trl9/curris | f9b55a7a2a8864882f297a4a324ccff182176417 | [
"MIT"
] | null | null | null | curris/helper.py | a1trl9/curris | f9b55a7a2a8864882f297a4a324ccff182176417 | [
"MIT"
] | null | null | null | """ common helper module
"""
def elimate_whitespace_around(source):
""" return contents surrounded by whitespaces.
whitespace :: space | tab
"""
if not source:
return source
start, length = 0, len(source)
end, whitespace = length - 1, ' \t'
while start < length:
if source[start] not in whitespace:
break
start += 1
while end >= 0:
if source[end] not in whitespace:
break
end -= 1
return source[start:end + 1]
def elimate_leading_whitespace(source, target=None):
""" return the count of whitespaces before the first target
if it is not the mode: <whitespace>*_target_, return 0
"""
if not source:
return 0
i, length = 0, len(source)
while i < length:
if source[i] not in ' \t':
if (target and source[i] == target) or target is None:
return i
return 0
i += 1
return 0
def split_first_whitespace(source):
""" split source by first <whitespace>*
"""
if not source:
return [source]
i, length = 0, len(source)
while i < length:
if source[i] in ' \t':
temp = i
i += 1
while i < length and source[i] in ' \t':
i += 1
return [source[:temp], source[i:]]
i += 1
return [source]
def forward_until(source, start, symbol):
""" forward util a symbol
"""
count = len(symbol)
length = len(source) + 1 - count
result = []
while start < length and source[start:start + count] != symbol:
if source[start] == '\\':
if start + 1 < length:
result.append(source[start + 1])
start += 2
continue
result.append(source[start])
start += 1
return (start, ''.join(result))
| 26.169014 | 67 | 0.535522 | """ common helper module
"""
def elimate_whitespace_around(source):
""" return contents surrounded by whitespaces.
whitespace :: space | tab
"""
if not source:
return source
start, length = 0, len(source)
end, whitespace = length - 1, ' \t'
while start < length:
if source[start] not in whitespace:
break
start += 1
while end >= 0:
if source[end] not in whitespace:
break
end -= 1
return source[start:end + 1]
def elimate_leading_whitespace(source, target=None):
""" return the count of whitespaces before the first target
if it is not the mode: <whitespace>*_target_, return 0
"""
if not source:
return 0
i, length = 0, len(source)
while i < length:
if source[i] not in ' \t':
if (target and source[i] == target) or target is None:
return i
return 0
i += 1
return 0
def split_first_whitespace(source):
""" split source by first <whitespace>*
"""
if not source:
return [source]
i, length = 0, len(source)
while i < length:
if source[i] in ' \t':
temp = i
i += 1
while i < length and source[i] in ' \t':
i += 1
return [source[:temp], source[i:]]
i += 1
return [source]
def forward_until(source, start, symbol):
""" forward util a symbol
"""
count = len(symbol)
length = len(source) + 1 - count
result = []
while start < length and source[start:start + count] != symbol:
if source[start] == '\\':
if start + 1 < length:
result.append(source[start + 1])
start += 2
continue
result.append(source[start])
start += 1
return (start, ''.join(result))
| 0 | 0 | 0 |
3a798996665bff8011af44fac1c04f3067f62a8e | 2,173 | py | Python | charts-external/israelproxy/differ/main.py | omertalmi5/hasadna-k8s | cbc3236e4a9b9a88e227a97a7f5f98d15b62dc83 | [
"MIT"
] | 9 | 2018-10-28T00:28:09.000Z | 2021-11-26T23:02:05.000Z | charts-external/israelproxy/differ/main.py | omertalmi5/hasadna-k8s | cbc3236e4a9b9a88e227a97a7f5f98d15b62dc83 | [
"MIT"
] | 7 | 2019-01-07T18:12:04.000Z | 2021-11-09T18:04:17.000Z | charts-external/israelproxy/differ/main.py | omertalmi5/hasadna-k8s | cbc3236e4a9b9a88e227a97a7f5f98d15b62dc83 | [
"MIT"
] | 15 | 2018-08-13T13:38:17.000Z | 2020-10-08T17:45:32.000Z | #!/usr/bin/env python3
import time, requests, hashlib, os, json, sys, subprocess, traceback
from datetime import datetime
from ruamel import yaml
with open('/etc/differ/config.yaml') as f:
config = yaml.safe_load(f)
INTERVAL_SECONDS = 60*20 # check all diffs every 20 minutes
DIFF_OBJS = config['diff-objs']
DATA_DIR = os.environ.get('DATA_DIR', '/tmp/differ-data')
while True:
for obj in DIFF_OBJS:
date = datetime.now().strftime('%Y-%m-%d-%H%M')
data_dir = DATA_DIR + '/' + obj['id'] + '/'
os.makedirs(data_dir + 'history', exist_ok=True)
os.makedirs(data_dir + 'hashes', exist_ok=True)
try:
content = requests.get(obj['url']).content
hash = hashlib.sha512(content).hexdigest()
last_hash = None
fn = data_dir + 'last_update.json'
if os.path.exists(fn):
with open(fn) as f:
last_hash = json.load(f)['hash']
if not last_hash or last_hash != hash:
print(date + ' updated: ' + obj['id'])
data = {'hash': hash, 'date': date}
with open(fn, 'w') as f:
json.dump(data, f)
with open(data_dir + 'history/' + date + '.hash', 'w') as f:
f.write(hash)
fn = data_dir + 'hashes/' + hash
if not os.path.exists(fn):
with open(fn, 'wb') as f:
f.write(content)
diff_fn = data_dir + 'last_update.diff'
if os.path.exists(diff_fn):
os.unlink(diff_fn)
if last_hash:
last_fn = data_dir + 'hashes/' + last_hash
if os.path.exists(last_fn):
subprocess.call('diff %s %s > %s' % (last_fn, fn, diff_fn), shell=True)
except Exception:
exc = traceback.format_exc()
print(exc)
error_fn = data_dir + 'last_error.json'
with open(error_fn, 'w') as f:
json.dump({'exc': exc, 'date': date}, f)
print('.')
sys.stdout.flush()
time.sleep(INTERVAL_SECONDS)
| 40.240741 | 95 | 0.517717 | #!/usr/bin/env python3
import time, requests, hashlib, os, json, sys, subprocess, traceback
from datetime import datetime
from ruamel import yaml
with open('/etc/differ/config.yaml') as f:
config = yaml.safe_load(f)
INTERVAL_SECONDS = 60*20 # check all diffs every 20 minutes
DIFF_OBJS = config['diff-objs']
DATA_DIR = os.environ.get('DATA_DIR', '/tmp/differ-data')
while True:
for obj in DIFF_OBJS:
date = datetime.now().strftime('%Y-%m-%d-%H%M')
data_dir = DATA_DIR + '/' + obj['id'] + '/'
os.makedirs(data_dir + 'history', exist_ok=True)
os.makedirs(data_dir + 'hashes', exist_ok=True)
try:
content = requests.get(obj['url']).content
hash = hashlib.sha512(content).hexdigest()
last_hash = None
fn = data_dir + 'last_update.json'
if os.path.exists(fn):
with open(fn) as f:
last_hash = json.load(f)['hash']
if not last_hash or last_hash != hash:
print(date + ' updated: ' + obj['id'])
data = {'hash': hash, 'date': date}
with open(fn, 'w') as f:
json.dump(data, f)
with open(data_dir + 'history/' + date + '.hash', 'w') as f:
f.write(hash)
fn = data_dir + 'hashes/' + hash
if not os.path.exists(fn):
with open(fn, 'wb') as f:
f.write(content)
diff_fn = data_dir + 'last_update.diff'
if os.path.exists(diff_fn):
os.unlink(diff_fn)
if last_hash:
last_fn = data_dir + 'hashes/' + last_hash
if os.path.exists(last_fn):
subprocess.call('diff %s %s > %s' % (last_fn, fn, diff_fn), shell=True)
except Exception:
exc = traceback.format_exc()
print(exc)
error_fn = data_dir + 'last_error.json'
with open(error_fn, 'w') as f:
json.dump({'exc': exc, 'date': date}, f)
print('.')
sys.stdout.flush()
time.sleep(INTERVAL_SECONDS)
| 0 | 0 | 0 |
1c944ddc7f210e2bb69a5e1b3f739d4102061b1f | 1,381 | py | Python | doc/integrations/pytorch/parlai/crowdsourcing/tasks/turn_annotations_static/util.py | novium258/cortx-1 | ce5b939b33b8d24d89b31807ac3bcaa8f24096bc | [
"Apache-2.0"
] | 1 | 2020-09-27T05:00:06.000Z | 2020-09-27T05:00:06.000Z | doc/integrations/pytorch/parlai/crowdsourcing/tasks/turn_annotations_static/util.py | novium258/cortx-1 | ce5b939b33b8d24d89b31807ac3bcaa8f24096bc | [
"Apache-2.0"
] | 1 | 2021-08-04T11:17:39.000Z | 2021-08-04T11:17:39.000Z | doc/integrations/pytorch/parlai/crowdsourcing/tasks/turn_annotations_static/util.py | novium258/cortx-1 | ce5b939b33b8d24d89b31807ac3bcaa8f24096bc | [
"Apache-2.0"
] | 1 | 2021-05-03T13:27:14.000Z | 2021-05-03T13:27:14.000Z | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import random
from mephisto.operations.operator import Operator
from mephisto.tools.scripts import load_db_and_process_config
from omegaconf import DictConfig, OmegaConf
from parlai.crowdsourcing.utils.frontend import build_task
from parlai.crowdsourcing.utils.mturk import soft_block_mturk_workers
def run_static_task(cfg: DictConfig, task_directory: str):
"""
Run static task, given configuration.
"""
db, cfg = load_db_and_process_config(cfg)
print(f'\nHydra config:\n{OmegaConf.to_yaml(cfg)}')
random.seed(42)
task_name = cfg.mephisto.task.get('task_name', 'turn_annotations_static')
soft_block_qual_name = cfg.mephisto.blueprint.get(
'block_qualification', f'{task_name}_block'
)
# Default to a task-specific name to avoid soft-block collisions
soft_block_mturk_workers(cfg=cfg, db=db, soft_block_qual_name=soft_block_qual_name)
build_task(task_directory)
operator = Operator(db)
operator.validate_and_run_config(run_config=cfg.mephisto, shared_state=None)
operator.wait_for_runs_then_shutdown(
skip_input=True, log_rate=cfg.monitoring_log_rate
)
| 33.682927 | 88 | 0.745836 | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import random
from mephisto.operations.operator import Operator
from mephisto.tools.scripts import load_db_and_process_config
from omegaconf import DictConfig, OmegaConf
from parlai.crowdsourcing.utils.frontend import build_task
from parlai.crowdsourcing.utils.mturk import soft_block_mturk_workers
def run_static_task(cfg: DictConfig, task_directory: str):
"""
Run static task, given configuration.
"""
db, cfg = load_db_and_process_config(cfg)
print(f'\nHydra config:\n{OmegaConf.to_yaml(cfg)}')
random.seed(42)
task_name = cfg.mephisto.task.get('task_name', 'turn_annotations_static')
soft_block_qual_name = cfg.mephisto.blueprint.get(
'block_qualification', f'{task_name}_block'
)
# Default to a task-specific name to avoid soft-block collisions
soft_block_mturk_workers(cfg=cfg, db=db, soft_block_qual_name=soft_block_qual_name)
build_task(task_directory)
operator = Operator(db)
operator.validate_and_run_config(run_config=cfg.mephisto, shared_state=None)
operator.wait_for_runs_then_shutdown(
skip_input=True, log_rate=cfg.monitoring_log_rate
)
| 0 | 0 | 0 |
20a3608e091e6f1855a85a202b4b7cd654459ff8 | 361 | py | Python | app/app/test.py | xrgarcia/python-api | 580ba3d9fdd3f8d5486dbcb0aa907036e3286748 | [
"MIT"
] | null | null | null | app/app/test.py | xrgarcia/python-api | 580ba3d9fdd3f8d5486dbcb0aa907036e3286748 | [
"MIT"
] | null | null | null | app/app/test.py | xrgarcia/python-api | 580ba3d9fdd3f8d5486dbcb0aa907036e3286748 | [
"MIT"
] | null | null | null | from django.test import TestCase
from app.calc import add, subtract | 32.818182 | 58 | 0.695291 | from django.test import TestCase
from app.calc import add, subtract
class CalcTests(TestCase):
def test_add_numbers(self):
"""Test that two numbers are added together"""
self.assertEqual(add(10,4),14)
def test_subtract_numbers(self):
"""Test that values are subtracted and returned"""
self.assertEqual(subtract(10,4),6) | 0 | 272 | 22 |
0f2808657125e397d5f0154b7dcaa22585e90526 | 3,881 | py | Python | comparebruslib.py | Caronthir/TALYS-Launcher | b8d1e1cf8966d0a72bb8ace31d9923e77d22846f | [
"MIT"
] | 4 | 2016-11-02T16:07:26.000Z | 2019-12-04T14:57:23.000Z | comparebruslib.py | ErlendLima/TALYS-Launcher | b8d1e1cf8966d0a72bb8ace31d9923e77d22846f | [
"MIT"
] | 2 | 2017-11-25T18:04:07.000Z | 2017-11-25T18:11:59.000Z | comparebruslib.py | ErlendLima/TALYS-Launcher | b8d1e1cf8966d0a72bb8ace31d9923e77d22846f | [
"MIT"
] | 3 | 2016-11-02T16:16:00.000Z | 2020-02-28T08:23:58.000Z | import re
import os
import argparse
import sys
import pprint
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("talysdirectory")
parser.add_argument("bruslib")
args = parser.parse_args()
# Regex to extract mass+symbol of element
pattern = re.compile("(\d{1,3}[a-zA-Z]{1,3})")
# Make the paths absolute (technical detail)
talys_directory = os.path.abspath(args.talysdirectory)
bruslib_directory = os.path.abspath(args.bruslib)
# Get the data
resdata = load_results(talys_directory)
brusdata = load_bruslib(bruslib_directory)
# resdata and brusdata are dicts of the form
# {MassElement:[Temperature,ReactionRate]}
# for example:
# {151Sm:[[0,0.1],[0.1,2.1E-8]...], 152S:[...]}
# For illustration:
compare(brusdata, resdata)
| 34.963964 | 81 | 0.51327 | import re
import os
import argparse
import sys
import pprint
def load_bruslib(directory):
data = {}
# Iterate through the directories
for root, subdirs, files in os.walk(directory):
for file in files:
# Read all files
path = os.path.join(root, file)
try:
with open(path, "r") as inputfile:
lines = inputfile.readlines()
except Exception as E:
print(E)
continue
# The reaction rates from each isotope is stored in chunks
chunks = []
while lines:
# A block of results is 33 lines long
chunks.append(lines[:33])
lines = lines[33:]
for chunk in chunks:
# Find the mass+Symbol on the second line
match = re.search(pattern, chunk[1])
# If not, skip it
if match is None:
continue
massSymbol = match.group(1)
data[massSymbol] = []
for line in chunk[4:]:
# Remove newline and leading space
line = line.lstrip()
splot = line.split(' ')
# Split the data into columns, so to speak
try:
# Some of the files contain random newlines and stuff
# Only use data that is real data, not newlines
if len(splot) > 1:
data[name].append([float(splot[0]),
float(splot[1])])
except:
# The file pfLI serves to purpose
if file != "pfLI":
print(file)
return data
def load_results(directory):
data = {}
# Iterate through the directories and sub-directories
for root, subdirs, files in os.walk(os.path.join(directory, "results_data")):
for file in files:
# Only look at astrorate.g
if not file == "astrorate.g":
continue
# Find the path and read the file
path = os.path.join(root, file)
with open(path, "r") as inputfile:
lines = inputfile.readlines()
# Extract the mass+symbol from the first line
match = re.search(pattern, lines[0])
# Line might be empty, so skip if no mass+symbol is found
if match is None:
continue
massSymbol = match.group(1)
data[massSymbol] = []
for line in lines[4:]:
line = line.lstrip()
splot = line.split(' ')
data[massSymbol].append([float(splot[0]),
float(splot[1])])
return data
def compare(dict1, dict2):
for key in dict1.keys():
zips = zip(dict1[key], dict2[key])
print(key)
for temp, rate in zips:
print(temp, rate)
input()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("talysdirectory")
parser.add_argument("bruslib")
args = parser.parse_args()
# Regex to extract mass+symbol of element
pattern = re.compile("(\d{1,3}[a-zA-Z]{1,3})")
# Make the paths absolute (technical detail)
talys_directory = os.path.abspath(args.talysdirectory)
bruslib_directory = os.path.abspath(args.bruslib)
# Get the data
resdata = load_results(talys_directory)
brusdata = load_bruslib(bruslib_directory)
# resdata and brusdata are dicts of the form
# {MassElement:[Temperature,ReactionRate]}
# for example:
# {151Sm:[[0,0.1],[0.1,2.1E-8]...], 152S:[...]}
# For illustration:
compare(brusdata, resdata)
| 2,979 | 0 | 69 |
6d5892c0195f851fcec9bb24b95a2255b0e64ac0 | 3,320 | py | Python | tests/test_from_root_xpublic_key.py | phoenixburton/xhdwallet | 37e450fd4eaceff25786b42c56946e29b041375d | [
"ISC"
] | 1 | 2021-06-26T08:11:21.000Z | 2021-06-26T08:11:21.000Z | tests/test_from_root_xpublic_key.py | phoenixburton/xhdwallet | 37e450fd4eaceff25786b42c56946e29b041375d | [
"ISC"
] | null | null | null | tests/test_from_root_xpublic_key.py | phoenixburton/xhdwallet | 37e450fd4eaceff25786b42c56946e29b041375d | [
"ISC"
] | null | null | null | #!/usr/bin/env python3
import json
import os
from hdwallet import HDWallet
# Test Values
base_path: str = os.path.dirname(__file__)
file_path: str = os.path.abspath(os.path.join(base_path, "values.json"))
values = open(file_path, "r", encoding="utf-8")
_: dict = json.loads(values.read())
values.close()
| 52.698413 | 213 | 0.789458 | #!/usr/bin/env python3
import json
import os
from hdwallet import HDWallet
# Test Values
base_path: str = os.path.dirname(__file__)
file_path: str = os.path.abspath(os.path.join(base_path, "values.json"))
values = open(file_path, "r", encoding="utf-8")
_: dict = json.loads(values.read())
values.close()
def test_from_root_xpublic_key():
hdwallet: HDWallet = HDWallet(
symbol=_["bitcoin"]["mainnet"]["symbol"]
)
hdwallet.from_root_xpublic_key(
xpublic_key=_["bitcoin"]["mainnet"]["root_xpublic_key"], strict=True
)
hdwallet.from_path(
path="m/44/0/0/0/0"
)
assert hdwallet.cryptocurrency() == _["bitcoin"]["mainnet"]["cryptocurrency"]
assert hdwallet.symbol() == _["bitcoin"]["mainnet"]["symbol"]
assert hdwallet.network() == _["bitcoin"]["mainnet"]["network"]
assert hdwallet.strength() is None
assert hdwallet.entropy() is None
assert hdwallet.mnemonic() is None
assert hdwallet.language() is None
assert hdwallet.passphrase() is None
assert hdwallet.seed() is None
assert hdwallet.root_xprivate_key(encoded=False) is None
assert hdwallet.root_xprivate_key() is None
assert hdwallet.root_xpublic_key(encoded=False) == "0488b21e000000000000000000ad41ef910cdcae932cb4060777b4284ee38f5b29c5fb60fda8416f298a14702c02949b9f64223e124eb9a8383fba0b21b5845fcfbdc84dec7692d21c716410eab0"
assert hdwallet.root_xpublic_key() == "xpub661MyMwAqRbcGGUtsoFw2d6ARvD2ABd7z327zxt2XiBBwMx9GAuNrrE7tbRuWF5MjjZ1BzDsRdaSHc9nVKAgHzQrv6pwYW3Hd7LSzbh8sWS"
assert hdwallet.xprivate_key(encoded=False) is None
assert hdwallet.xprivate_key() is None
assert hdwallet.xpublic_key(encoded=False) == "0488b21e052c0269af000000006c95c19e932b9e8f3d834e874526768ca1b3d89933ad71fd8253bcca67ac283d038f24175db513b40c75503c25040e5f0ea4d38e912d1f83daf5fd8c4b9512ad87"
assert hdwallet.xpublic_key() == "xpub6FjoSaU1JaG6fC6wTYmb1mJzaZxSunxASN7nTRHhFynh33gKRfmmNrtQ82s8YouLCrEniskjumfACiiTyVmi4aXyLL8HvLdZc8mjKsbzT9z"
assert hdwallet.uncompressed() == "8f24175db513b40c75503c25040e5f0ea4d38e912d1f83daf5fd8c4b9512ad8750a64d9e0ee3555225e4130c7e36a443ec20330bf0be1e4de913e31e00202993"
assert hdwallet.compressed() == "038f24175db513b40c75503c25040e5f0ea4d38e912d1f83daf5fd8c4b9512ad87"
assert hdwallet.chain_code() == "6c95c19e932b9e8f3d834e874526768ca1b3d89933ad71fd8253bcca67ac283d"
assert hdwallet.private_key() is None
assert hdwallet.public_key() == "038f24175db513b40c75503c25040e5f0ea4d38e912d1f83daf5fd8c4b9512ad87"
assert hdwallet.wif() is None
assert hdwallet.finger_print() == "4e749a26"
assert hdwallet.semantic() == "p2pkh"
assert hdwallet.path() == "m/44/0/0/0/0"
assert hdwallet.hash() == "4e749a26934bca5091a05ee6f55e7d0e21482647"
assert hdwallet.p2pkh_address() == "189qPd6J81ns9LEGx6kun7Xtg1bJV8GJXh"
assert hdwallet.p2sh_address() == "3C71bNRojv3Gc7zHvWygas4AFt34rKezcF"
assert hdwallet.p2wpkh_address() == "bc1qfe6f5f5nf099pydqtmn02hnapcs5sfj86dpqjm"
assert hdwallet.p2wpkh_in_p2sh_address() == "3NykoodgJ7Li43JPt5xsezQz8xfwwwFZUs"
assert hdwallet.p2wsh_address() == "bc1qazm6kznlgs06exh4cq2qxh567xrffppwujje5zg84upnng4essusd08nhz"
assert hdwallet.p2wsh_in_p2sh_address() == "32yGj8ncXBBTjXqg188ZHxd1xffoQDcjin"
assert isinstance(hdwallet.dumps(), dict)
| 2,989 | 0 | 23 |
9378a5d2028a55aa39ca371630252a88f754f0eb | 5,510 | py | Python | examples/toy_dataset.py | feynmanliang/firefly-monte-carlo | 61a5e120d5c340a0e946dfe4231783c5be5aeb28 | [
"MIT"
] | null | null | null | examples/toy_dataset.py | feynmanliang/firefly-monte-carlo | 61a5e120d5c340a0e946dfe4231783c5be5aeb28 | [
"MIT"
] | null | null | null | examples/toy_dataset.py | feynmanliang/firefly-monte-carlo | 61a5e120d5c340a0e946dfe4231783c5be5aeb28 | [
"MIT"
] | 1 | 2019-03-18T01:24:50.000Z | 2019-03-18T01:24:50.000Z | import numpy as np
import numpy.random as npr
from scipy import optimize
import matplotlib as mpl
import matplotlib.pyplot as plt
import pypmc
from mnist import MNIST
from sklearn.decomposition import PCA
import sys
sys.path.append('..')
import flymc as ff
# Set hyperparameters
stepsize = 0.001 # size of Metropolis-Hastings step in theta
th0 = 0.20 # scale of weights
N_steps = 3000
N_ess = 2000
# Cosmetic settings
mpl.rcParams['axes.linewidth'] = 3
mpl.rcParams['lines.linewidth'] = 7
mpl.rcParams['toolbar'] = "None"
mpl.rcParams['figure.facecolor'] = "1"
if __name__ == "__main__":
main()
| 32.60355 | 105 | 0.54755 | import numpy as np
import numpy.random as npr
from scipy import optimize
import matplotlib as mpl
import matplotlib.pyplot as plt
import pypmc
from mnist import MNIST
from sklearn.decomposition import PCA
import sys
sys.path.append('..')
import flymc as ff
# Set hyperparameters
stepsize = 0.001 # size of Metropolis-Hastings step in theta
th0 = 0.20 # scale of weights
N_steps = 3000
N_ess = 2000
# Cosmetic settings
mpl.rcParams['axes.linewidth'] = 3
mpl.rcParams['lines.linewidth'] = 7
mpl.rcParams['toolbar'] = "None"
mpl.rcParams['figure.facecolor'] = "1"
def main():
mndata = MNIST('.')
mndata.load_training()
#mndata.load_testing()
ss_idx = filter(lambda i: mndata.train_labels[i] in [7,9], range(len(mndata.train_images)))
data_ss = np.array(mndata.train_images)[ss_idx,:]
label_ss = np.array(mndata.train_labels)[ss_idx]
pca = PCA(n_components=10) # TODO: change to 50
pca.fit(data_ss)
x = pca.transform(data_ss)
x = np.concatenate((x, np.ones((x.shape[0],1))),axis=1)
t = label_ss == 7
N, D = x.shape
y0 = 1.5 # \xce in paper
# Generate synthetic data
# x = 2 * npr.rand(N,D) - 1 # data features, an (N,D) array
# x[:, 0] = 1
# th_true = 10.0 * np.array([0, 1, 1])
# y = np.dot(x, th_true[:, None])[:, 0]
# t = npr.rand(N) > (1 / ( 1 + np.exp(y))) # data targets, an (N) array of 0s and 1s
# Obtain joint distributions over z and th
# Set up step functions
def run_model(model, th_init=np.random.randn(D)*th0, q=0.1, fly=False):
th = th_init
if fly:
z = ff.BrightnessVars(N)
else:
z = ff.BrightnessVars(N, range(N))
th_stepper = ff.ThetaStepMH(model.log_p_joint, stepsize)
if fly:
z__stepper = ff.zStepMH(model.log_pseudo_lik, q)
ths = []
num_rejects = 0
for i in range(N_steps):
num_lik_prev = model.num_lik_evals
if i % N_ess == 0 and i > 0:
#print pypmc.tools.convergence.ess(ths) # TODO: is this correct?
#print ess(ths)
np.savetxt('trace-untuned-{0}.csv'.format(i), np.array(ths))
ths = []
th = th_stepper.step(th, z) # Markov transition step for theta
num_rejects += th_stepper.num_rejects
if fly:
z = z__stepper.step(th ,z) # Markov transition step for z
ths.append(th)
print "\t\t".join(
map(lambda x: "{0:.5f}".format(x), [
i,
len(z.bright),
model.num_lik_evals - num_lik_prev,
1.0 - num_rejects / float(i+1),
-1.0 * model.log_p_marg(th, increment_ctr=False)
]))
return th
def ess(th_list):
th = np.array(th_list)
th_mean = np.mean(th, axis=0)
def autocorr(x, t):
return np.mean((x[0:len(x)-t,:] - th_mean) * (x[t:len(x),:] - th_mean))
return 1.0 * th.shape[0] / (1.0 + 2.0 * sum(map(lambda t: autocorr(th,t), range(1,th.shape[0]))))
# print ess([
# np.array([1]),
# np.array([1.1]),
# np.array([0.9]),
# np.array([1])
# ])
print "Running MCMC"
#model_mcmc = ff.LogisticModel(x, t, th0=th0, y0=y0)
#print run_model(model_mcmc)
print "Running untuned FlyMC"
#model_flymc = ff.LogisticModel(x, t, th0=th0, y0=y0)
#print run_model(model_flymc, q=0.1, fly=True) # q = prob(dim -> bright)
print "Running MAP-tuned FlyMC"
_model = ff.LogisticModel(x, t, th0=th0)
th_map = optimize.minimize(
fun=lambda th: -1.0*_model.log_p_marg(th),
x0=np.random.randn(D)*th0,
jac=lambda th: -1.0*_model.D_log_p_marg(th),
method='BFGS',
options={
'maxiter': 100,
'disp': True
})
model_flymc_map = ff.LogisticModel(x, t, th0=th0, th_map=th_map.x)
print run_model(
model_flymc_map,
#th_init=th_map.x, # TODO: is it okay to initialize at the MAP?
q=0.01,
fly=True)
#print model_flymc_map.num_lik_evals
# plt.ion()
# ax = plt.figure(figsize=(8, 6)).add_subplot(111)
# while True:
# th = th_stepper.step(th, z) # Markov transition step for theta
# z = z__stepper.step(th ,z) # Markov transition step for z
# update_fig(ax, x, y, z, th, t)
# plt.draw()
# plt.pause(0.05)
def update_fig(ax, x, y, z, th, t):
b = np.zeros(N)
b[z.bright] = 1
bright1s = ( t * b ).astype(bool)
bright0s = ((1-t) * b ).astype(bool)
dark1s = ( t * (1-b)).astype(bool)
dark0s = ((1-t) * (1-b)).astype(bool)
ms, bms, mew = 45, 45, 5
ax.clear()
ax.plot(x[dark0s,1], x[dark0s,2], 's', mec='Blue', mfc='None', ms=ms, mew=mew)
ax.plot(x[dark1s,1], x[dark1s,2], 'o', mec='Red', mfc='None', ms=ms, mew=mew)
ax.plot(x[bright0s,1], x[bright0s,2],'s', mec='Blue', mfc='Blue', ms=bms, mew=mew)
ax.plot(x[bright1s,1], x[bright1s,2],'o', mec='Red', mfc='Red', ms=bms, mew=mew)
X = np.arange(-3,3)
th1, th2, th3 = th[0], th[1], th[2]
Y = (-th1 - th2 * X) / th3
ax.plot(X, Y, color='grey')
lim = 1.15
ax.set_xlim([-lim,lim])
ax.set_ylim([-lim,lim])
ax.set_yticks([])
ax.set_xticks([])
if __name__ == "__main__":
main()
| 4,836 | 0 | 46 |
722148731fd1c3d9d614a99858a68b4aa7f6117c | 416 | py | Python | 007-operators/2_comparisons.py | zaiddashti/python-tutorial | 9ae325999a79f5f6471e4126995a2219e5ba33a3 | [
"MIT"
] | null | null | null | 007-operators/2_comparisons.py | zaiddashti/python-tutorial | 9ae325999a79f5f6471e4126995a2219e5ba33a3 | [
"MIT"
] | null | null | null | 007-operators/2_comparisons.py | zaiddashti/python-tutorial | 9ae325999a79f5f6471e4126995a2219e5ba33a3 | [
"MIT"
] | null | null | null | x = 10
y = 5
print("x = " + str(x)) # + is concatination and not addition, will be explained later
print("y = " + str(y))
# equal
print("x == y = " + str(x == y))
# not equal
print("x != y = " + str(x != y))
# greater than
print("x > y = " + str(x > y))
# greater than or equal
print("x >= y = " + str(x >= y))
# less than
print("x < y = " + str(x < y))
# less than or equal
print("x <= y = " + str(x <= y)) | 17.333333 | 86 | 0.507212 | x = 10
y = 5
print("x = " + str(x)) # + is concatination and not addition, will be explained later
print("y = " + str(y))
# equal
print("x == y = " + str(x == y))
# not equal
print("x != y = " + str(x != y))
# greater than
print("x > y = " + str(x > y))
# greater than or equal
print("x >= y = " + str(x >= y))
# less than
print("x < y = " + str(x < y))
# less than or equal
print("x <= y = " + str(x <= y)) | 0 | 0 | 0 |
536fd9d7f8332fe614cfe1dd465a0331d4382dd1 | 1,628 | py | Python | tests/test_one.py | rouzbeh-afrasiabi/traintorch | ac3a49e5511463cc60834a8c663f12c02282cc58 | [
"BSD-2-Clause"
] | null | null | null | tests/test_one.py | rouzbeh-afrasiabi/traintorch | ac3a49e5511463cc60834a8c663f12c02282cc58 | [
"BSD-2-Clause"
] | 19 | 2019-10-12T20:18:28.000Z | 2019-12-18T12:20:09.000Z | tests/test_one.py | rouzbeh-afrasiabi/traintorch | ac3a49e5511463cc60834a8c663f12c02282cc58 | [
"BSD-2-Clause"
] | null | null | null | import pytest
import sys, os
myPath = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, myPath + '/../')
from traintorch import *
import numpy as np
import pandas as pd
| 37 | 107 | 0.593366 | import pytest
import sys, os
myPath = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, myPath + '/../')
from traintorch import *
import numpy as np
import pandas as pd
class TestClass:
def test_metric(self):
test=metric('test',w_size=10,average=False,xaxis_int=True,n_ticks=(5, 5))
assert test.name=='test'
assert test.w_size==11
assert test.average==False
assert test.xaxis_int==True
assert test.n_ticks==(5,5)
for i in range(0,100):
pass
test.update(x=2*i,t=i,f=3*i)
assert all(np.hstack(test.means)==[5,15,16,48,27,81,38,114,49,147,60,180,71,213,82,246,93,279])
assert test.counter==100
assert test.keys==['x', 't', 'f']
assert test.updated==True
assert all(np.hstack(test.window().values==[[ 89, 267],[ 90, 270],[ 91, 273],[ 92, 276],[ 93, 279],
[ 94, 282],[ 95, 285],[ 96, 288],[ 97, 291],[ 98, 294],[ 99, 297]]))
assert test.x==[198]
def test_traintorch(self):
tracker=traintorch(n_custom_plots=1,main_grid_hspace=.1, figsize=(15,10),show_table=True)
assert tracker.n_custom_plots==1
assert tracker.main_grid_hspace==0.1
assert tracker.figsize==(15,10)
assert tracker.show_table==True
assert tracker.window==100
assert tracker.main_grid_wspace==0.5
test=metric('test',w_size=10,average=False,xaxis_int=True,n_ticks=(5, 5))
for i in range(0,100):
pass
test.update(x=2*i,t=i,f=3*i)
tracker.append([test])
tracker.plot()
| 1,367 | -5 | 84 |
a6eb8c86fea38ca63ed2b3f2345fac1dc5d9b1e5 | 1,196 | py | Python | 7.2.py | Ainevsia/RSA-related | 816101e3951b95b79b59057199179859941f460a | [
"MIT"
] | 7 | 2019-12-31T13:45:36.000Z | 2021-11-14T20:09:02.000Z | 7.2.py | Ainevsia/Algebraic-Number-Theory | 816101e3951b95b79b59057199179859941f460a | [
"MIT"
] | null | null | null | 7.2.py | Ainevsia/Algebraic-Number-Theory | 816101e3951b95b79b59057199179859941f460a | [
"MIT"
] | null | null | null | from fractions import Fraction
from toolkit import *
def bezout(a, b):
"""
:return s and t st. sa + tb = (a,b)
"""
s, t, sn, tn, r = 1, 0, 0, 1, 1
while r != 0:
q, r = divmod(a, b)
st, tt = sn * (-q) + s, tn * (-q) + t
s, t = sn, tn
sn, tn = st, tt
a, b = b, r
return s, t
for i in range(10):
x, y = randint(1), randint(1)
# print(x, y)
if continued_fraction_bezout(x, y) == bezout(x, y):
print('yes')
| 23.92 | 59 | 0.44398 | from fractions import Fraction
from toolkit import *
def bezout(a, b):
"""
:return s and t st. sa + tb = (a,b)
"""
s, t, sn, tn, r = 1, 0, 0, 1, 1
while r != 0:
q, r = divmod(a, b)
st, tt = sn * (-q) + s, tn * (-q) + t
s, t = sn, tn
sn, tn = st, tt
a, b = b, r
return s, t
def bezout_unittest():
x, y = 1859, 1573
s, t = bezout(x, y)
if s * x + t * y == gcd(x, y):
print('yes')
x, y = 7700, 2145
s, t = bezout(x, y)
if s * x + t * y == gcd(x, y):
print('yes')
def continued_fraction_bezout(x, y):
n = Fraction(x, y)
P_nm2, P_nm1, Q_nm2, Q_nm1, P_n, Q_n = 0, 1, 1, 0, 0, 0
for i in range(10):
a = int(n)
P_n, Q_n = a * P_nm1 + P_nm2, a * Q_nm1 + Q_nm2
P_nm2, P_nm1, Q_nm2, Q_nm1 = P_nm1, P_n, Q_nm1, Q_n
x = n - a
if x != 0:
n = 1 / x
else:
s = Q_nm2 * (1 if i & 1 == 1 else -1)
t = P_nm2 * (1 if i & 1 == 0 else -1)
return s, t
for i in range(10):
x, y = randint(1), randint(1)
# print(x, y)
if continued_fraction_bezout(x, y) == bezout(x, y):
print('yes')
| 659 | 0 | 46 |
9763e6da8feb9a37927f198790d3720a08b1459d | 31 | py | Python | py/desisim/_version.py | Andrea-MG/desisim | 8308fc44cdc86aea14155b0db5c6f529eeea8423 | [
"BSD-3-Clause"
] | null | null | null | py/desisim/_version.py | Andrea-MG/desisim | 8308fc44cdc86aea14155b0db5c6f529eeea8423 | [
"BSD-3-Clause"
] | null | null | null | py/desisim/_version.py | Andrea-MG/desisim | 8308fc44cdc86aea14155b0db5c6f529eeea8423 | [
"BSD-3-Clause"
] | null | null | null | __version__ = '0.36.0.dev2370'
| 15.5 | 30 | 0.709677 | __version__ = '0.36.0.dev2370'
| 0 | 0 | 0 |
581ecf182716bfe8d168804a5a6851de4f83d7ac | 1,146 | py | Python | wbcms/tiger/urls.py | westurner/wbcms | 9a5c6ae4b07997149b418fcdeefd47c24897c276 | [
"BSD-3-Clause"
] | null | null | null | wbcms/tiger/urls.py | westurner/wbcms | 9a5c6ae4b07997149b418fcdeefd47c24897c276 | [
"BSD-3-Clause"
] | null | null | null | wbcms/tiger/urls.py | westurner/wbcms | 9a5c6ae4b07997149b418fcdeefd47c24897c276 | [
"BSD-3-Clause"
] | null | null | null | from django.conf.urls.defaults import *
from django.contrib import admin
from tiger.views import course_request_create, course_request_update
from tiger.views import course_list, course_detail
from tiger.views import *
from tiger.models import *
#admin.autodiscover()
urlpatterns = patterns('',
# Courses
url(r'courses/$',
course_list,
name='course_list'),
# Course Requests
url(r'courses/(?P<slug>[\w-]+)/$',
course_request_create,
name='course_request_create'),
url(r'requests/$',
course_request_list,
name='course_request_list'),
url('requests/(?P<id>[\w-]+)/$',
course_request_detail,
name='course_request_detail'),
url(r'requests/create/$',
course_request_create,
name='course_request_create_blank'),
url(r'requests/(?P<id>[\w-]+)/update/$',
course_request_update,
name='course_request_update'),
url(r'requests/(?P<id>[\w-]+)/cancel/$',
course_request_cancel,
name='course_request_cancel'),
)
| 27.95122 | 68 | 0.597731 | from django.conf.urls.defaults import *
from django.contrib import admin
from tiger.views import course_request_create, course_request_update
from tiger.views import course_list, course_detail
from tiger.views import *
from tiger.models import *
#admin.autodiscover()
urlpatterns = patterns('',
# Courses
url(r'courses/$',
course_list,
name='course_list'),
# Course Requests
url(r'courses/(?P<slug>[\w-]+)/$',
course_request_create,
name='course_request_create'),
url(r'requests/$',
course_request_list,
name='course_request_list'),
url('requests/(?P<id>[\w-]+)/$',
course_request_detail,
name='course_request_detail'),
url(r'requests/create/$',
course_request_create,
name='course_request_create_blank'),
url(r'requests/(?P<id>[\w-]+)/update/$',
course_request_update,
name='course_request_update'),
url(r'requests/(?P<id>[\w-]+)/cancel/$',
course_request_cancel,
name='course_request_cancel'),
)
| 0 | 0 | 0 |
437d19ab2a008ce7b37a195c9cc13ba65d14333f | 1,373 | py | Python | app.py | Mann1904/Handwritten-Digit-Recognizer- | bd0f7ca7b59c8897e876a4cb66d5319f4c25cdbd | [
"MIT"
] | null | null | null | app.py | Mann1904/Handwritten-Digit-Recognizer- | bd0f7ca7b59c8897e876a4cb66d5319f4c25cdbd | [
"MIT"
] | null | null | null | app.py | Mann1904/Handwritten-Digit-Recognizer- | bd0f7ca7b59c8897e876a4cb66d5319f4c25cdbd | [
"MIT"
] | null | null | null | import base64
import json
import re
import os
import numpy as np
import tensorflow as tf
from flask import Flask, render_template, request
from scipy.misc import imread, imresize
app = Flask(__name__)
global model
# load model
with open('model/model_in_json.json', 'r') as f:
model_json = json.load(f)
model = tf.keras.models.model_from_json(model_json)
model.load_weights("model/model.h5")
@app.route('/')
@app.route('/predict/', methods=['GET', 'POST'])
if __name__ == '__main__':
app.debug = True
port = int(os.environ.get("PORT", 5000))
app.run(host='0.0.0.0', port=port)
| 24.087719 | 63 | 0.669337 | import base64
import json
import re
import os
import numpy as np
import tensorflow as tf
from flask import Flask, render_template, request
from scipy.misc import imread, imresize
app = Flask(__name__)
global model
# load model
with open('model/model_in_json.json', 'r') as f:
model_json = json.load(f)
model = tf.keras.models.model_from_json(model_json)
model.load_weights("model/model.h5")
@app.route('/')
def index():
return render_template("index.html")
@app.route('/predict/', methods=['GET', 'POST'])
def predict():
# get data from drawing canvas and save as image
parseImage(request.get_data())
# read parsed image back in 8-bit, black and white mode (L)
x = imread('output.png', mode='L')
x = np.invert(x)
x = imresize(x, (28, 28))
# reshape image data for use in neural network
x = x.reshape(1, 28, 28, 1)
out = model.predict(x)
print(out)
print(np.argmax(out, axis=1))
response = np.array_str(np.argmax(out, axis=1))
return response
def parseImage(imgData):
# parse canvas bytes and save as output.png
imgstr = re.search(b'base64,(.*)', imgData).group(1)
with open('output.png', 'wb') as output:
output.write(base64.decodebytes(imgstr))
if __name__ == '__main__':
app.debug = True
port = int(os.environ.get("PORT", 5000))
app.run(host='0.0.0.0', port=port)
| 703 | 0 | 67 |
a3857e61cbdf7c495387480eee37655fd30e07e5 | 39,728 | py | Python | awssystemsmanager_connector.py | splunk-soar-connectors/awssystemsmanager | 13009ae4a868086dd9c9102acf55c470120f2d08 | [
"Apache-2.0"
] | 1 | 2022-02-13T16:39:24.000Z | 2022-02-13T16:39:24.000Z | awssystemsmanager_connector.py | splunk-soar-connectors/awssystemsmanager | 13009ae4a868086dd9c9102acf55c470120f2d08 | [
"Apache-2.0"
] | 2 | 2021-12-09T00:26:55.000Z | 2022-03-09T06:24:56.000Z | awssystemsmanager_connector.py | splunk-soar-connectors/awssystemsmanager | 13009ae4a868086dd9c9102acf55c470120f2d08 | [
"Apache-2.0"
] | null | null | null | # File: awssystemsmanager_connector.py
#
# Copyright (c) 2019-2022 Splunk Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under
# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific language governing permissions
# and limitations under the License.
#
#
# Phantom App imports
import ast
import base64
import json
import os
import sys
import tempfile
import time
from datetime import datetime
import botocore.paginate as bp
import botocore.response as br
import phantom.app as phantom
import phantom.rules as ph_rules
import requests
from boto3 import Session, client
from botocore.config import Config
from bs4 import UnicodeDammit
from phantom.action_result import ActionResult
from phantom.base_connector import BaseConnector
from phantom.vault import Vault
# Usage of the consts file is recommended
from awssystemsmanager_consts import *
if __name__ == '__main__':
import argparse
import pudb
pudb.set_trace()
argparser = argparse.ArgumentParser()
argparser.add_argument('input_test_json', help='Input Test JSON file')
argparser.add_argument('-u', '--username', help='username', required=False)
argparser.add_argument('-p', '--password', help='password', required=False)
argparser.add_argument('-v', '--verify', action='store_true', help='verify', required=False, default=False)
args = argparser.parse_args()
session_id = None
username = args.username
password = args.password
verify = args.verify
if username is not None and password is None:
# User specified a username but not a password, so ask
import getpass
password = getpass.getpass("Password: ")
if username and password:
login_url = BaseConnector._get_phantom_base_url() + "login"
try:
print("Accessing the Login page")
r = requests.get(login_url, verify=verify, timeout=DEFAULT_REQUEST_TIMEOUT)
csrftoken = r.cookies['csrftoken']
data = dict()
data['username'] = username
data['password'] = password
data['csrfmiddlewaretoken'] = csrftoken
headers = dict()
headers['Cookie'] = 'csrftoken=' + csrftoken
headers['Referer'] = login_url
print("Logging into Platform to get the session id")
r2 = requests.post(login_url, verify=verify, data=data, headers=headers, timeout=DEFAULT_REQUEST_TIMEOUT)
session_id = r2.cookies['sessionid']
except Exception as e:
print("Unable to get session id from the platform. Error: {}".format(str(e)))
sys.exit(1)
with open(args.input_test_json) as f:
in_json = f.read()
in_json = json.loads(in_json)
print(json.dumps(in_json, indent=4))
connector = AwsSystemsManagerConnector()
connector.print_progress_message = True
if session_id is not None:
in_json['user_session_token'] = session_id
connector._set_csrf_info(csrftoken, headers['Referer'])
ret_val = connector._handle_action(json.dumps(in_json), None)
print(json.dumps(json.loads(ret_val), indent=4))
sys.exit(0)
| 39.569721 | 140 | 0.626636 | # File: awssystemsmanager_connector.py
#
# Copyright (c) 2019-2022 Splunk Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under
# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific language governing permissions
# and limitations under the License.
#
#
# Phantom App imports
import ast
import base64
import json
import os
import sys
import tempfile
import time
from datetime import datetime
import botocore.paginate as bp
import botocore.response as br
import phantom.app as phantom
import phantom.rules as ph_rules
import requests
from boto3 import Session, client
from botocore.config import Config
from bs4 import UnicodeDammit
from phantom.action_result import ActionResult
from phantom.base_connector import BaseConnector
from phantom.vault import Vault
# Usage of the consts file is recommended
from awssystemsmanager_consts import *
class RetVal(tuple):
def __new__(cls, val1, val2=None):
return tuple.__new__(RetVal, (val1, val2))
class AwsSystemsManagerConnector(BaseConnector):
def __init__(self):
# Call the BaseConnectors init first
super(AwsSystemsManagerConnector, self).__init__()
self._state = None
self._region = None
self._access_key = None
self._secret_key = None
self._session_token = None
self._default_s3_bucket = None
self._proxy = None
self._python_version = None
def _sanitize_data(self, cur_obj):
try:
json.dumps(cur_obj)
return cur_obj
except:
pass
if isinstance(cur_obj, dict):
new_dict = {}
for k, v in cur_obj.items():
if isinstance(v, br.StreamingBody):
content = v.read()
new_dict[k] = json.loads(content)
else:
new_dict[k] = self._sanitize_data(v)
return new_dict
if isinstance(cur_obj, list):
new_list = []
for v in cur_obj:
new_list.append(self._sanitize_data(v))
return new_list
if isinstance(cur_obj, datetime):
return cur_obj.strftime("%Y-%m-%d %H:%M:%S")
if isinstance(cur_obj, bp.PageIterator):
new_dict = dict()
try:
for page in cur_obj:
new_dict.update(page)
return new_dict
except Exception as e:
return { 'error': e }
return cur_obj
def _handle_py_ver_compat_for_input_str(self, input_str):
"""
This method returns the encoded|original string based on the Python version.
:param input_str: Input string to be processed
:return: input_str (Processed input string based on following logic 'input_str - Python 3; encoded input_str - Python 2')
"""
try:
if input_str and self._python_version < 3:
input_str = UnicodeDammit(input_str).unicode_markup.encode('utf-8')
except:
self.debug_print("Error occurred while handling python 2to3 compatibility for the input string")
return input_str
def _handle_get_ec2_role(self):
session = Session(region_name=self._region)
credentials = session.get_credentials()
return credentials
def _make_boto_call(self, action_result, method, paginate=False, empty_payload=False, **kwargs):
if paginate is False:
try:
boto_func = getattr(self._client, method)
except AttributeError:
return RetVal(action_result.set_status(phantom.APP_ERROR, "Invalid method: {0}".format(method)), None)
try:
resp_json = boto_func(**kwargs)
if empty_payload:
resp_json['Payload'] = {'body': "", 'statusCode': resp_json['StatusCode']}
except Exception as e:
exception_message = self._handle_py_ver_compat_for_input_str(e.args[0]).strip()
if 'BucketAlreadyExists' in exception_message:
return phantom.APP_SUCCESS, None
return RetVal(action_result.set_status(phantom.APP_ERROR, 'boto3 call to SSM failed', exception_message), None)
else:
try:
paginator = self._client.get_paginator(method)
resp_json = paginator.paginate(**kwargs)
except Exception as e:
return RetVal(action_result.set_status(phantom.APP_ERROR, 'boto3 call to SSM failed', e), None)
return phantom.APP_SUCCESS, self._sanitize_data(resp_json)
def _sanatize_dates(self, cur_obj):
try:
json.dumps(cur_obj)
return cur_obj
except:
pass
if isinstance(cur_obj, dict):
new_dict = {}
for k, v in cur_obj.items():
new_dict[k] = self._sanatize_dates(v)
return new_dict
if isinstance(cur_obj, list):
new_list = []
for v in cur_obj:
new_list.append(self._sanatize_dates(v))
return new_list
if isinstance(cur_obj, datetime):
return cur_obj.strftime("%Y-%m-%d %H:%M:%S")
return cur_obj
def _make_s3_boto_call(self, action_result, method, **kwargs):
try:
boto_func = getattr(self._client, method)
except AttributeError:
return RetVal(action_result.set_status(phantom.APP_ERROR, "Invalid method: {0}".format(method)), None)
try:
resp_json = boto_func(**kwargs)
except Exception as e:
return RetVal(action_result.set_status(phantom.APP_ERROR, 'boto3 call to S3 failed', e), None)
return phantom.APP_SUCCESS, resp_json
def _create_client(self, action_result, param=None):
boto_config = None
if self._proxy:
boto_config = Config(proxies=self._proxy)
# Try getting and using temporary assume role credentials from parameters
temp_credentials = dict()
if param and 'credentials' in param:
try:
temp_credentials = ast.literal_eval(param['credentials'])
self._access_key = temp_credentials.get('AccessKeyId', '')
self._secret_key = temp_credentials.get('SecretAccessKey', '')
self._session_token = temp_credentials.get('SessionToken', '')
self.save_progress("Using temporary assume role credentials for action")
except Exception as e:
return action_result.set_status(phantom.APP_ERROR,
"Failed to get temporary credentials: {}".format(e))
try:
if self._access_key and self._secret_key:
self.debug_print("Creating boto3 client with API keys")
self._client = client(
'ssm',
region_name=self._region,
aws_access_key_id=self._access_key,
aws_secret_access_key=self._secret_key,
aws_session_token=self._session_token,
config=boto_config)
else:
self.debug_print("Creating boto3 client without API keys")
self._client = client(
'ssm',
region_name=self._region,
config=boto_config)
except Exception as e:
return action_result.set_status(phantom.APP_ERROR, "Could not create boto3 client: {0}".format(e))
return phantom.APP_SUCCESS
def _create_s3_client(self, action_result, param=None):
boto_config = None
if self._proxy:
boto_config = Config(proxies=self._proxy)
# Try getting and using temporary assume role credentials from parameters
temp_credentials = dict()
if param and 'credentials' in param:
try:
temp_credentials = ast.literal_eval(param['credentials'])
self._access_key = temp_credentials.get('AccessKeyId', '')
self._secret_key = temp_credentials.get('SecretAccessKey', '')
self._session_token = temp_credentials.get('SessionToken', '')
self.save_progress("Using temporary assume role credentials for action")
except Exception as e:
return action_result.set_status(phantom.APP_ERROR,
"Failed to get temporary credentials: {0}".format(e))
try:
if self._access_key and self._secret_key:
self.debug_print("Creating boto3 client with API keys")
self._client = client(
's3',
region_name=self._region,
aws_access_key_id=self._access_key,
aws_secret_access_key=self._secret_key,
aws_session_token=self._session_token,
config=boto_config)
else:
self.debug_print("Creating boto3 client without API keys")
self._client = client(
's3',
region_name=self._region,
config=boto_config)
except Exception as e:
return self.set_status(phantom.APP_ERROR, "Could not create boto3 client: {0}".format(e))
return phantom.APP_SUCCESS
def _get_s3_bucket(self, action_result, output_s3_bucket_name, param):
self._create_s3_client(action_result, param)
ret_val, resp_json = self._make_boto_call(action_result, 'get_bucket_accelerate_configuration', Bucket=output_s3_bucket_name)
return ret_val
def _create_s3_bucket(self, action_result, output_s3_bucket_name, param):
self._create_s3_client(action_result, param)
location = {'LocationConstraint': SSM_REGION_DICT[self.get_config()['region']]}
if not output_s3_bucket_name:
output_s3_bucket_name = self._default_s3_bucket
# boto3 bug
if location['LocationConstraint'] == 'us-east-1':
ret_val, resp_json = self._make_boto_call(action_result, 'create_bucket', Bucket=output_s3_bucket_name)
else:
ret_val, resp_json = self._make_boto_call(action_result,
'create_bucket', Bucket=output_s3_bucket_name, CreateBucketConfiguration=location)
return ret_val, output_s3_bucket_name
def _get_s3_object(self, action_result, output_s3_bucket_name, output_s3_object_key, save_output_to_vault, file_name, param):
self._create_s3_client(action_result, param)
ret_val, resp_json = self._make_s3_boto_call(action_result, 'get_object', Bucket=output_s3_bucket_name, Key=output_s3_object_key)
if phantom.is_fail(ret_val):
return ret_val
try:
file_data = resp_json.pop('Body').read()
# This conditional means 'get file' action has been called. Decodes the base64 string written by 'send command'
if file_name:
file_data = base64.b64decode(file_data)
except:
return action_result.set_status(phantom.APP_ERROR, "Could not retrieve object body from boto response")
result_json = {}
result_json['output'] = file_data
if save_output_to_vault:
if hasattr(file_data, 'decode'):
file_data = file_data.decode('utf-8')
if hasattr(Vault, 'get_vault_tmp_dir'):
vault_path = Vault.get_vault_tmp_dir()
else:
vault_path = '/vault/tmp/'
file_desc, file_path = tempfile.mkstemp(dir=vault_path)
outfile = open(file_path, 'w')
outfile.write(file_data)
outfile.close()
os.close(file_desc)
try:
# This conditional means 'get file' action has been called. This updates the correct filename that is written into the vault
if file_name:
success, message, vault_id = ph_rules.vault_add(file_location=file_path,
container=self.get_container_id(), file_name=file_name)
result_json['filename'] = file_name
# We do not need to return output for 'get file' action
result_json.pop('output', None)
# This conditional means 'execute program' action has been called. This will name the file
# as either 'stdout' or 'stderr' into the vault
else:
success, message, vault_id = ph_rules.vault_add(file_location=file_path,
container=self.get_container_id(), file_name=os.path.basename(output_s3_object_key))
result_json['filename'] = os.path.basename(output_s3_object_key)
except Exception as e:
return action_result.set_status(phantom.APP_ERROR, "Could not file to vault: {0}".format(e))
if not success:
return action_result.set_status(phantom.APP_ERROR, "Could not save file to vault: {0}".format(message))
result_json['vault_id'] = vault_id
action_result.set_summary({"created_vault_id": vault_id})
result_json['s3_object_key'] = output_s3_object_key
return ret_val, result_json
def _handle_test_connectivity(self, param):
# Add an action result object to self (BaseConnector) to represent the action for this param
action_result = self.add_action_result(ActionResult(dict(param)))
self.save_progress("Querying AWS to check credentials")
if not self._create_client(action_result, param):
return action_result.get_status()
# make rest call
ret_val, resp_json = self._make_boto_call(action_result, 'list_commands', MaxResults=1)
if phantom.is_fail(ret_val):
self.save_progress("Test Connectivity Failed.")
return action_result.get_status()
# Return success
self.save_progress("Test Connectivity Passed")
return action_result.set_status(phantom.APP_SUCCESS)
def _handle_send_command(self, param):
self.save_progress("In action handler for: {0}".format(self.get_action_identifier()))
# Add an action result object to self (BaseConnector) to represent the action for this param
action_result = self.add_action_result(ActionResult(dict(param)))
instance_id = param['instance_id']
platform_type = param['platform_type']
if platform_type == 'Windows':
document_name = POWERSHELL_DOCUMENT
document_hash = POWERSHELL_DOC_HASH
object_path = 'awsrunPowerShellScript/0.awsrunPowerShellScript/stdout'
else:
document_name = LINUX_DOCUMENT
document_hash = LINUX_DOC_HASH
object_path = 'awsrunShellScript/0.awsrunShellScript/stdout'
# If running get_file, 'cat' the file into an S3 bucket
if self.get_action_identifier() == 'get_file':
file_path = param['file_path'].replace('\\', '/')
file_name = file_path.split('/')[-1]
if platform_type == 'Windows':
command = '[Convert]::ToBase64String([IO.File]::ReadAllBytes("{}"))'.format(file_path)
else:
command = 'cat ' + file_path + ' | base64'
save_output_to_vault = True
else:
command = param['command']
file_name = None
save_output_to_vault = param.get('save_output_to_vault')
output_s3_bucket_name = param.get('output_s3_bucket_name')
working_directory = param.get('working_directory')
timeout_seconds = param.get('timeout_seconds')
comment = param.get('comment')
if not output_s3_bucket_name:
output_s3_bucket_name = self._default_s3_bucket
# Create S3 bucket to store command output if it does not already exist
if self._get_s3_bucket(action_result, output_s3_bucket_name, param) is False:
ret_val, output_s3_bucket_name = self._create_s3_bucket(action_result, output_s3_bucket_name, param)
if ret_val is False:
return action_result.set_status(phantom.APP_ERROR, "Failed to create S3 bucket")
args = {
'InstanceIds': [instance_id],
'DocumentName': document_name,
'DocumentHash': document_hash,
'DocumentHashType': 'Sha256',
'OutputS3BucketName': output_s3_bucket_name,
'Parameters': {
'commands': [command]
}
}
if working_directory:
args['Parameters']['workingDirectory'] = [working_directory]
if timeout_seconds:
args['TimeoutSeconds'] = timeout_seconds
if comment:
args['Comment'] = comment
if not self._create_client(action_result, param):
return action_result.get_status()
# Executes the shell program via SSM boto call
ret_val, response = self._make_boto_call(action_result, 'send_command', **args)
if phantom.is_fail(ret_val):
return action_result.get_status()
result_json = {"Command": response['Command']}
result_json['ResponseMetadata'] = response['ResponseMetadata']
output_s3_object_key = response['Command']['CommandId'] + '/' + instance_id + '/' + object_path
# Give time for command output to be written to S3 bucket
time.sleep(10)
try:
ret_val, resp_json = self._get_s3_object(action_result, output_s3_bucket_name,
output_s3_object_key, save_output_to_vault, file_name, param)
except Exception:
# Look for stderr file if stdout file was not found. If this is get_file action, then action fails with a no file found message.
try:
if self.get_action_identifier() == 'get_file':
return action_result.set_status(phantom.APP_ERROR,
"{}: No such file found. Please check full file path (include filename)".format(file_path))
output_s3_object_key = output_s3_object_key.replace('stdout', 'stderr')
ret_val, resp_json = self._get_s3_object(action_result, output_s3_bucket_name,
output_s3_object_key, save_output_to_vault, file_name, param)
except Exception:
return action_result.set_status(phantom.APP_ERROR, "Failed to get S3 object")
result_json["File"] = resp_json
# Add the response into the data section
action_result.add_data(result_json)
# Add a dictionary that is made up of the most important values from data into the summary
summary = action_result.update_summary({})
if self.get_action_identifier() == 'get_file':
summary['status'] = "Successfully downloaded file into the vault"
else:
summary['status'] = "Successfully executed program"
return action_result.set_status(phantom.APP_SUCCESS)
def _handle_execute_program(self, param):
return self._handle_send_command(param)
def _handle_get_file(self, param):
return self._handle_send_command(param)
def _handle_run_document(self, param):
self.save_progress("In action handler for: {0}".format(self.get_action_identifier()))
# Add an action result object to self (BaseConnector) to represent the action for this param
action_result = self.add_action_result(ActionResult(dict(param)))
output_s3_bucket_name = param.get('output_s3_bucket_name')
output_s3_key_prefix = param.get('output_s3_key_prefix')
if output_s3_bucket_name:
# Create S3 bucket to store command output if it does not already exist
if self._get_s3_bucket(action_result, output_s3_bucket_name, param) is False:
ret_val, output_s3_bucket_name = self._create_s3_bucket(action_result, output_s3_bucket_name, param)
if ret_val is False:
return action_result.get_status()
if not self._create_client(action_result, param):
return action_result.get_status()
instance_id = param['instance_id']
document_name = param['document_name']
document_hash = param['document_hash']
if phantom.is_sha1(document_hash):
document_hash_type = 'Sha1'
else:
document_hash_type = 'Sha256'
try:
parameters = json.loads(param['parameters'])
except Exception as e:
return RetVal(action_result.set_status(phantom.APP_ERROR, "Invalid JSON for Parameters. Error: {0}".format(str(e))), None)
working_directory = param.get('working_directory')
timeout_seconds = param.get('timeout_seconds')
comment = param.get('comment')
args = {
'InstanceIds': [instance_id],
'DocumentName': document_name,
'DocumentHash': document_hash,
'DocumentHashType': document_hash_type,
'Parameters': parameters
}
if working_directory:
args['Parameters']['workingDirectory'] = [working_directory]
if timeout_seconds:
args['TimeoutSeconds'] = timeout_seconds
if comment:
args['Comment'] = comment
if output_s3_bucket_name:
args['OutputS3BucketName'] = output_s3_bucket_name
if output_s3_key_prefix:
args['OutputS3KeyPrefix'] = output_s3_key_prefix
# make rest call
ret_val, response = self._make_boto_call(action_result, 'send_command', **args)
if phantom.is_fail(ret_val):
return action_result.get_status()
# Add the response into the data section
action_result.add_data(response)
# Add a dictionary that is made up of the most important values from data into the summary
summary = action_result.update_summary({})
summary['status'] = "Successfully sent command"
return action_result.set_status(phantom.APP_SUCCESS)
def _handle_list_commands(self, param):
self.save_progress("In action handler for: {0}".format(self.get_action_identifier()))
# Add an action result object to self (BaseConnector) to represent the action for this param
action_result = self.add_action_result(ActionResult(dict(param)))
if not self._create_client(action_result, param):
return action_result.get_status()
total_commands = 0
max_results = param.get('max_results')
command_id = param.get('command_id')
instance_id = param.get('instance_id')
while True:
limit = None
if max_results == 0:
return action_result.set_status(phantom.APP_ERROR, "MaxResults parameter must be greater than 0")
elif max_results is not None and max_results > 50:
limit = max_results
max_results = None
next_token = param.get('next_token')
args = {}
if command_id:
args['CommandId'] = command_id
if instance_id:
args['InstanceId'] = instance_id
if max_results:
args['MaxResults'] = max_results
if next_token:
args['NextToken'] = next_token
# make rest call
ret_val, response = self._make_boto_call(action_result, 'list_commands', **args)
if phantom.is_fail(ret_val):
return action_result.get_status()
num_commands = len(response['Commands'])
total_commands += num_commands
self.debug_print("Found {0} commands in last list_commands response".format(num_commands))
# handles limitation of boto3 pagination results greater than 50
if limit is not None:
action_result.add_data(response)
limit = limit - num_commands
max_results = limit
if response.get('NextToken'):
param['next_token'] = response.get('NextToken')
continue
else:
# Add a dictionary that is made up of the most important values from data into the summary
summary = action_result.update_summary({})
summary['num_commands'] = total_commands
return action_result.set_status(phantom.APP_SUCCESS)
# Add the response into the data section
action_result.add_data(response)
next_token = response.get('NextToken')
if next_token and (max_results is None or num_commands == 0):
param['next_token'] = response['NextToken']
else:
# Add a dictionary that is made up of the most important values from data into the summary
summary = action_result.update_summary({})
summary['num_commands'] = total_commands
return action_result.set_status(phantom.APP_SUCCESS)
def _handle_list_documents(self, param):
self.save_progress("In action handler for: {0}".format(self.get_action_identifier()))
# Add an action result object to self (BaseConnector) to represent the action for this param
action_result = self.add_action_result(ActionResult(dict(param)))
if not self._create_client(action_result, param):
return action_result.get_status()
num_documents = 0
while True:
name = param.get('name')
owner = param.get('owner')
platform_type = param.get('platform_type')
document_type = param.get('document_type')
max_results = param.get('max_results')
# This flag is to handle the special case where max_results is a number greater than 50
flag = False
if max_results == 0:
return action_result.set_status(phantom.APP_ERROR, "MaxResults parameter must be greater than 0")
elif max_results is not None and max_results > 50:
limit = max_results
max_results = None
flag = True
next_token = param.get('next_token')
args = {}
if name or owner or platform_type or document_type:
args['DocumentFilterList'] = []
if name:
name_obj = {'key': 'Name', 'value': name}
args['DocumentFilterList'].append(name_obj)
if owner:
owner_obj = {'key': 'Owner', 'value': owner}
args['DocumentFilterList'].append(owner_obj)
if platform_type:
platform_obj = {'key': 'PlatformTypes', 'value': platform_type}
args['DocumentFilterList'].append(platform_obj)
if document_type:
document_obj = {'key': 'DocumentType', 'value': document_type}
args['DocumentFilterList'].append(document_obj)
if max_results:
args['MaxResults'] = max_results
if next_token:
args['NextToken'] = next_token
self.debug_print("Making list_documents call to get next set of documents.")
# make rest call
ret_val, response = self._make_boto_call(action_result, 'list_documents', **args)
if phantom.is_fail(ret_val):
return action_result.get_status()
next_token = response.get('NextToken')
# boto3 returning incorrect pagination results. This logic corrects the amount of results added
if max_results is not None or flag:
if flag:
upper_bound = limit - num_documents
else:
upper_bound = max_results - num_documents
if upper_bound > len(response['DocumentIdentifiers']):
for document in response['DocumentIdentifiers']:
action_result.add_data(document)
num_documents = num_documents + len(response['DocumentIdentifiers'])
else:
for document in response['DocumentIdentifiers'][0:upper_bound]:
action_result.add_data(document)
num_documents = num_documents + len(response['DocumentIdentifiers'][0:upper_bound])
else:
for document in response['DocumentIdentifiers']:
action_result.add_data(document)
num_documents = num_documents + len(response['DocumentIdentifiers'])
if next_token and max_results is None:
param['next_token'] = response['NextToken']
elif max_results is not None and num_documents < max_results and next_token:
param['next_token'] = response['NextToken']
else:
# Add a dictionary that is made up of the most important values from data into the summary
summary = action_result.update_summary({})
summary['num_documents'] = num_documents
return action_result.set_status(phantom.APP_SUCCESS)
def _handle_get_parameter(self, param):
self.save_progress("In action handler for: {0}".format(self.get_action_identifier()))
# Add an action result object to self (BaseConnector) to represent the action for this param
action_result = self.add_action_result(ActionResult(dict(param)))
if not self._create_client(action_result, param):
return action_result.get_status()
name = param['name']
with_decryption = param.get('with_decryption', False)
self.debug_print("Making get_parameter call {0} decryption".format('with' if with_decryption else 'without'))
args = {
'Name': name,
'WithDecryption': with_decryption
}
# make rest call
ret_val, response = self._make_boto_call(action_result, 'get_parameter', **args)
if phantom.is_fail(ret_val):
return action_result.get_status()
# Add the response into the data section
action_result.add_data(response)
# Add a dictionary that is made up of the most important values from data into the summary
summary = action_result.update_summary({})
summary['status'] = "Successfully retrieved parameter"
return action_result.set_status(phantom.APP_SUCCESS)
def _handle_add_parameter(self, param):
self.save_progress("In action handler for: {0}".format(self.get_action_identifier()))
# Add an action result object to self (BaseConnector) to represent the action for this param
action_result = self.add_action_result(ActionResult(dict(param)))
if not self._create_client(action_result, param):
return action_result.get_status()
name = param['name']
description = param.get('description')
value = param['value']
type = param['type']
key_id = param.get('key_id')
overwrite = param.get('overwrite')
allowed_pattern = param.get('allowed_pattern')
args = {
'Name': name,
'Value': value,
'Type': type,
'Overwrite': overwrite
}
if description:
args['Description'] = description
if key_id:
args['KeyId'] = key_id
if allowed_pattern:
args['AllowedPattern'] = allowed_pattern
self.debug_print("Making put_parameter call with body: ", args)
# make rest call
ret_val, response = self._make_boto_call(action_result, 'put_parameter', **args)
if phantom.is_fail(ret_val):
return action_result.get_status()
# Add the response into the data section
action_result.add_data(response)
# Add a dictionary that is made up of the most important values from data into the summary
summary = action_result.update_summary({})
summary['status'] = "Successfully added parameter"
return action_result.set_status(phantom.APP_SUCCESS)
def _handle_describe_instance(self, param):
self.save_progress("In action handler for: {0}".format(self.get_action_identifier()))
# Add an action result object to self (BaseConnector) to represent the action for this param
action_result = self.add_action_result(ActionResult(dict(param)))
if not self._create_client(action_result, param):
return action_result.get_status()
instance_id = param['instance_id']
instance_information_filter_list = [{'key': 'InstanceIds', 'valueSet': [instance_id]}]
args = {
'InstanceInformationFilterList': instance_information_filter_list
}
# make rest call
ret_val, response = self._make_boto_call(action_result, 'describe_instance_information', **args)
if phantom.is_fail(ret_val):
return action_result.get_status()
self.debug_print("Found {0} instances in describe_instance response".format(len(response['InstanceInformationList'])))
if len(response['InstanceInformationList']) == 0:
return action_result.set_status(phantom.APP_ERROR,
"No SSM instance found. Please check if instance is assigned to a System Manager IAM role.")
# Add the response into the data section
action_result.add_data(response)
# Add a dictionary that is made up of the most important values from data into the summary
summary = action_result.update_summary({})
summary['status'] = "Successfully retrieved instance information"
return action_result.set_status(phantom.APP_SUCCESS)
def handle_action(self, param):
ret_val = phantom.APP_SUCCESS
# Get the action that we are supposed to execute for this App Run
action_id = self.get_action_identifier()
self.debug_print("action_id", self.get_action_identifier())
if action_id == 'test_connectivity':
ret_val = self._handle_test_connectivity(param)
elif action_id == 'list_commands':
ret_val = self._handle_list_commands(param)
elif action_id == 'list_documents':
ret_val = self._handle_list_documents(param)
elif action_id == 'execute_program':
ret_val = self._handle_send_command(param)
elif action_id == 'get_file':
ret_val = self._handle_send_command(param)
elif action_id == 'run_document':
ret_val = self._handle_run_document(param)
elif action_id == 'get_parameter':
ret_val = self._handle_get_parameter(param)
elif action_id == 'add_parameter':
ret_val = self._handle_add_parameter(param)
elif action_id == 'describe_instance':
ret_val = self._handle_describe_instance(param)
return ret_val
def initialize(self):
# Load the state in initialize, use it to store data
# that needs to be accessed across actions
self._state = self.load_state()
# Fetching the Python major version
try:
self._python_version = int(sys.version_info[0])
except:
return self.set_status(phantom.APP_ERROR, "Error occurred while getting the Phantom server's Python major version.")
# get the asset config
config = self.get_config()
self._default_s3_bucket = config.get(SSM_JSON_DEFAULT_S3_BUCKET)
self._region = SSM_REGION_DICT.get(config[SSM_JSON_REGION])
self._proxy = {}
env_vars = config.get('_reserved_environment_variables', {})
if 'HTTP_PROXY' in env_vars:
self._proxy['http'] = env_vars['HTTP_PROXY']['value']
if 'HTTPS_PROXY' in env_vars:
self._proxy['https'] = env_vars['HTTPS_PROXY']['value']
if config.get('use_role'):
credentials = self._handle_get_ec2_role()
if not credentials:
return self.set_status(phantom.APP_ERROR, "Failed to get EC2 role credentials")
self._access_key = credentials.access_key
self._secret_key = credentials.secret_key
self._session_token = credentials.token
return phantom.APP_SUCCESS
self._access_key = config.get(SSM_JSON_ACCESS_KEY)
self._secret_key = config.get(SSM_JSON_SECRET_KEY)
if not (self._access_key and self._secret_key):
return self.set_status(phantom.APP_ERROR, SSM_JSON_BAD_ASSET_CONFIG_MSG)
return phantom.APP_SUCCESS
def finalize(self):
# Save the state, this data is saved across actions and app upgrades
self.save_state(self._state)
return phantom.APP_SUCCESS
if __name__ == '__main__':
import argparse
import pudb
pudb.set_trace()
argparser = argparse.ArgumentParser()
argparser.add_argument('input_test_json', help='Input Test JSON file')
argparser.add_argument('-u', '--username', help='username', required=False)
argparser.add_argument('-p', '--password', help='password', required=False)
argparser.add_argument('-v', '--verify', action='store_true', help='verify', required=False, default=False)
args = argparser.parse_args()
session_id = None
username = args.username
password = args.password
verify = args.verify
if username is not None and password is None:
# User specified a username but not a password, so ask
import getpass
password = getpass.getpass("Password: ")
if username and password:
login_url = BaseConnector._get_phantom_base_url() + "login"
try:
print("Accessing the Login page")
r = requests.get(login_url, verify=verify, timeout=DEFAULT_REQUEST_TIMEOUT)
csrftoken = r.cookies['csrftoken']
data = dict()
data['username'] = username
data['password'] = password
data['csrfmiddlewaretoken'] = csrftoken
headers = dict()
headers['Cookie'] = 'csrftoken=' + csrftoken
headers['Referer'] = login_url
print("Logging into Platform to get the session id")
r2 = requests.post(login_url, verify=verify, data=data, headers=headers, timeout=DEFAULT_REQUEST_TIMEOUT)
session_id = r2.cookies['sessionid']
except Exception as e:
print("Unable to get session id from the platform. Error: {}".format(str(e)))
sys.exit(1)
with open(args.input_test_json) as f:
in_json = f.read()
in_json = json.loads(in_json)
print(json.dumps(in_json, indent=4))
connector = AwsSystemsManagerConnector()
connector.print_progress_message = True
if session_id is not None:
in_json['user_session_token'] = session_id
connector._set_csrf_info(csrftoken, headers['Referer'])
ret_val = connector._handle_action(json.dumps(in_json), None)
print(json.dumps(json.loads(ret_val), indent=4))
sys.exit(0)
| 34,780 | 1,335 | 72 |
16a5773dd7741d23aedbb34a48de67ed172aaeaa | 844 | py | Python | app/models.py | mrgnr/flask-vanilla-demo | 4c316e2e2ae747e84ab2d2ac44056d3dee528b91 | [
"BSD-2-Clause"
] | null | null | null | app/models.py | mrgnr/flask-vanilla-demo | 4c316e2e2ae747e84ab2d2ac44056d3dee528b91 | [
"BSD-2-Clause"
] | null | null | null | app/models.py | mrgnr/flask-vanilla-demo | 4c316e2e2ae747e84ab2d2ac44056d3dee528b91 | [
"BSD-2-Clause"
] | null | null | null | from app import db
| 25.575758 | 69 | 0.633886 | from app import db
class Product(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255))
price = db.Column(db.Float)
image_path = db.Column(db.String(255))
category_id = db.Column(db.Integer, db.ForeignKey("category.id"))
category = db.relationship(
"Category", backref=db.backref("products", lazy="dynamic")
)
def __init__(self, name, price, category, image_path):
self.name = name
self.price = price
self.category = category
self.image_path = image_path
def __repr__(self):
return f"{self.name}"
class Category(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255))
def __init__(self, name):
self.name = name
def __repr__(self):
return f"{self.name}"
| 240 | 537 | 46 |
f156dd7fa54181c6990291348307f470c438b030 | 544 | py | Python | src/utils/report_months.py | kimvanwyk/ndlc_finances | 7c19a2a9778875fcb08dd7b98685d3aa36f5c7b1 | [
"BSD-3-Clause"
] | null | null | null | src/utils/report_months.py | kimvanwyk/ndlc_finances | 7c19a2a9778875fcb08dd7b98685d3aa36f5c7b1 | [
"BSD-3-Clause"
] | null | null | null | src/utils/report_months.py | kimvanwyk/ndlc_finances | 7c19a2a9778875fcb08dd7b98685d3aa36f5c7b1 | [
"BSD-3-Clause"
] | null | null | null | from datetime import date, timedelta
if __name__ == '__main__':
print(get_report_months())
| 25.904762 | 47 | 0.544118 | from datetime import date, timedelta
def get_report_months():
today = date.today() + timedelta(weeks=4)
months = [date(today.year, today.month, 1)]
while True:
year = months[-1].year
month = months[-1].month
if months[-1].month > 1:
month -= 1
else:
month = 12
year -= 1
if month == 6:
break
months.append(date(year, month, 1))
return [d.strftime('%y%m') for d in months]
if __name__ == '__main__':
print(get_report_months())
| 425 | 0 | 23 |
cdc626e531852e378dce413240d443d81b7ab07f | 5,154 | py | Python | gestorClientes.py | AndresDuque16/GestorClientes | 25a1157d2135bd5afb0314d1639f7e3a7e496a70 | [
"Apache-2.0"
] | null | null | null | gestorClientes.py | AndresDuque16/GestorClientes | 25a1157d2135bd5afb0314d1639f7e3a7e496a70 | [
"Apache-2.0"
] | null | null | null | gestorClientes.py | AndresDuque16/GestorClientes | 25a1157d2135bd5afb0314d1639f7e3a7e496a70 | [
"Apache-2.0"
] | null | null | null | from tkinter import *
from tkinter import messagebox
from tkinter import ttk #treeview
import sqlite3
root = Tk()
root.title('Libreta de Clientes')
conn = sqlite3.connect('gestorCliente.db') #coneccion a la base de datos
c = conn.cursor() #cursor para ejecutar consultas
#creación de base de datos
c.execute("""
CREATE TABLE if not exists cliente (
id INTEGER PRIMARY KEY AUTOINCREMENT,
cedula TEXT NOT NULL,
nombre TEXT NOT NULL,
empresa TEXT NOT NULL
);
""")
#defincion de funciones
#funcion que renderiza los clientes
#funcion que guarda los datos de nuevos clientes en la BD
#FUNCION DE ELIMINAR CLIENTE
btn_new = Button(root, text='Nuevo Cliente', command=new_cliente)
btn_new.grid(row=0, column=0, padx=5,pady=5)
btn_del = Button(root, text='Eliminar Cliente', command=del_cliente)
btn_del.grid(row=0, column=1, padx=5,pady=5)
tree = ttk.Treeview(root) #representancion de informacion en forma jerarquica en una tabla
tree['columns'] =('Cedula', 'Nombre', 'Empresa') #columnas
tree.column('#0', width=0, stretch=NO)# ES LA COLUMNA INICIAL PERO SE COLOCA STRETCH PARA QUE NO APARESCA y width de 0
tree.column('Cedula') #configuracion de los nombres que va a tener cada columna
tree.column('Nombre')#configuracion de los nombres que va a tener cada columna
tree.column('Empresa')#configuracion de los nombres que va a tener cada columna
tree.heading('Cedula', text='Cedula') #los textos de heading que van a tener las tablas
tree.heading('Nombre', text='Nombre')#los textos de heading que van a tener las tablas
tree.heading('Empresa', text='Empresa')#los textos de heading que van a tener las tablas
tree.grid(row=1, column=0, columnspan=2)
render_clientes() #con el objetivo de que actualice los clientes en la tabla
root.mainloop()
| 39.646154 | 123 | 0.668219 | from tkinter import *
from tkinter import messagebox
from tkinter import ttk #treeview
import sqlite3
root = Tk()
root.title('Libreta de Clientes')
conn = sqlite3.connect('gestorCliente.db') #coneccion a la base de datos
c = conn.cursor() #cursor para ejecutar consultas
#creación de base de datos
c.execute("""
CREATE TABLE if not exists cliente (
id INTEGER PRIMARY KEY AUTOINCREMENT,
cedula TEXT NOT NULL,
nombre TEXT NOT NULL,
empresa TEXT NOT NULL
);
""")
#defincion de funciones
#funcion que renderiza los clientes
def render_clientes():
rows = c.execute("SELECT * FROM cliente").fetchall() #consulta de clientes en la bd
tree.delete(*tree.get_children())#elimina los elementos de la tabla y luego se insertan nuevamente
for row in rows:
tree.insert('',END, row[0], values=(row[1],row[2], row[3])) # el primer argumento es la tabla misma '',
# en donde se agrega el registro en el final de la tabla END, el ID del registro row[0],
# luego se indica los valores a insertar en values
#funcion que guarda los datos de nuevos clientes en la BD
def insertar(cliente):
c.execute("""
INSERT INTO cliente(cedula, nombre, empresa) VALUES (?,?,?)
""", (cliente['cedula'], cliente['nombre'], cliente['empresa'])) #se crea una tupla
conn.commit() #se compromete la consulta en la base de datos
render_clientes() #para que actualice los clientes que se visualizan en la tabla
def new_cliente():
#se define funcion
def guardar():
if not cedula.get(): #valida de que tenga un dato almacenado en el entry de cedula
messagebox.showerror('Error', 'La cedula es obligatoria')
return #corta la ejecucion de la funcion guardar
if not nombre.get(): #valida de que tenga un dato almacenado en el entry de nombre
messagebox.showerror('Error', 'El nombre es obligatorio')
return #corta la ejecucion de la funcion guardar
if not empresa.get(): #valida de que tenga un dato almacenado en el entry de nombre
messagebox.showerror('Error', 'La empresa es obligatoria')
return #corta la ejecucion de la funcion guardar
#se crea un diccionario de cliente para almacenar los datos
cliente = {
'cedula': cedula.get(),
'nombre': nombre.get(),
'empresa': empresa.get()
}
insertar(cliente) #funcion definida para almacenar el diccionario creado al guardar nuevo cliente
top.destroy()#permite cerrar la ventana de nuevo cliente una vez insertado el cliente
top = Toplevel()
top.title('Nuevo Cliente')
#creacion de los label
lcedula = Label(top, text='Cedula')
lcedula.grid(row=0, column=0)
cedula = Entry(top, width=40)
cedula.grid(row=0, column=1)
lnombre = Label(top, text='Nombre')
lnombre.grid(row=1, column=0)
nombre = Entry(top, width=40)
nombre.grid(row=1, column=1)
lempresa = Label(top, text='Empresa')
lempresa.grid(row=2, column=0)
empresa = Entry(top, width=40)
empresa.grid(row=2, column=1)
#creacion de boton guardar
guardar = Button(top, text='Guardar', command=guardar)
guardar.grid(row=3, column=1)
top.mainloop()
#FUNCION DE ELIMINAR CLIENTE
def del_cliente():
id_cliente = tree.selection()[0]
cliente = c.execute("SELECT * FROM cliente WHERE id = ?",(id_cliente, )).fetchone()
respuesta = messagebox.askokcancel('Confirmación', '¿Estas seguro de querer eliminar el cliente ' + cliente[2] + '?')
#confirmacion
if respuesta:
c.execute("DELETE FROM cliente WHERE id = ?",(id_cliente, )) #eliminar el cliente en la base de datos
conn.commit() #ejecutar en la bd la eliminacion
render_clientes() #renderiza la tabla
else:
pass #pasa y no ejecuta nada
btn_new = Button(root, text='Nuevo Cliente', command=new_cliente)
btn_new.grid(row=0, column=0, padx=5,pady=5)
btn_del = Button(root, text='Eliminar Cliente', command=del_cliente)
btn_del.grid(row=0, column=1, padx=5,pady=5)
tree = ttk.Treeview(root) #representancion de informacion en forma jerarquica en una tabla
tree['columns'] =('Cedula', 'Nombre', 'Empresa') #columnas
tree.column('#0', width=0, stretch=NO)# ES LA COLUMNA INICIAL PERO SE COLOCA STRETCH PARA QUE NO APARESCA y width de 0
tree.column('Cedula') #configuracion de los nombres que va a tener cada columna
tree.column('Nombre')#configuracion de los nombres que va a tener cada columna
tree.column('Empresa')#configuracion de los nombres que va a tener cada columna
tree.heading('Cedula', text='Cedula') #los textos de heading que van a tener las tablas
tree.heading('Nombre', text='Nombre')#los textos de heading que van a tener las tablas
tree.heading('Empresa', text='Empresa')#los textos de heading que van a tener las tablas
tree.grid(row=1, column=0, columnspan=2)
render_clientes() #con el objetivo de que actualice los clientes en la tabla
root.mainloop()
| 3,195 | 0 | 89 |
8edac4a434ff40d7f5d637b471e2676b77119bc3 | 978 | py | Python | spicedmodel/PlotPlanet.py | mattkjames7/spicedmodel | 7762a37180f8d399a4e16dd666fad5a1bbea1991 | [
"MIT"
] | null | null | null | spicedmodel/PlotPlanet.py | mattkjames7/spicedmodel | 7762a37180f8d399a4e16dd666fad5a1bbea1991 | [
"MIT"
] | null | null | null | spicedmodel/PlotPlanet.py | mattkjames7/spicedmodel | 7762a37180f8d399a4e16dd666fad5a1bbea1991 | [
"MIT"
] | null | null | null | import numpy as np
def PlotPlanet(ax,R=1.0,Center=[0.0,0.0,0.0],zorder=10,NoonTop=True):
'''
Plot the planet in the centre of a set of Axes.
Inputs
======
ax : matplotlib.pyplot.Axes instance
This is the set of axes to plot on.
R : float
Radius to plot planet with.
Center : float
3-element array-like object containing the x, y and z
coordinates in the plot at which to center the planet.
zorder : float
This controls whether the planet appears above or below other
elements of the plot.
'''
a = 2*np.pi*np.arange(361,dtype='float32')/360
x = R*np.sin(a) + Center[0]
y = R*np.cos(a) + Center[1]
if NoonTop:
ax.fill(y,x,color=[1.0,1.0,1.0],zorder=zorder)
ax.plot(y,x,color=[0,0,0],zorder=zorder+1)
ax.fill(y[180:360],x[180:360],color=[0.0,0.0,0.0],zorder=zorder+1)
else:
ax.fill(x,y,color=[1.0,1.0,1.0],zorder=zorder)
ax.plot(x,y,color=[0,0,0],zorder=zorder+1)
ax.fill(x[180:360],y[180:360],color=[0.0,0.0,0.0],zorder=zorder+1)
| 27.942857 | 69 | 0.664622 | import numpy as np
def PlotPlanet(ax,R=1.0,Center=[0.0,0.0,0.0],zorder=10,NoonTop=True):
'''
Plot the planet in the centre of a set of Axes.
Inputs
======
ax : matplotlib.pyplot.Axes instance
This is the set of axes to plot on.
R : float
Radius to plot planet with.
Center : float
3-element array-like object containing the x, y and z
coordinates in the plot at which to center the planet.
zorder : float
This controls whether the planet appears above or below other
elements of the plot.
'''
a = 2*np.pi*np.arange(361,dtype='float32')/360
x = R*np.sin(a) + Center[0]
y = R*np.cos(a) + Center[1]
if NoonTop:
ax.fill(y,x,color=[1.0,1.0,1.0],zorder=zorder)
ax.plot(y,x,color=[0,0,0],zorder=zorder+1)
ax.fill(y[180:360],x[180:360],color=[0.0,0.0,0.0],zorder=zorder+1)
else:
ax.fill(x,y,color=[1.0,1.0,1.0],zorder=zorder)
ax.plot(x,y,color=[0,0,0],zorder=zorder+1)
ax.fill(x[180:360],y[180:360],color=[0.0,0.0,0.0],zorder=zorder+1)
| 0 | 0 | 0 |
260dbb8d6617ad3cba7e98627e5c0dc79bbcf6a1 | 2,538 | py | Python | rowTransposition_cipher.py | Kchao1910/Playfair-Cipher | 57b2771134d041d2c8dd23e3c577efb10e14a26c | [
"MIT"
] | 1 | 2019-03-17T04:31:47.000Z | 2019-03-17T04:31:47.000Z | rowTransposition_cipher.py | Kchao1910/Playfair-Cipher | 57b2771134d041d2c8dd23e3c577efb10e14a26c | [
"MIT"
] | null | null | null | rowTransposition_cipher.py | Kchao1910/Playfair-Cipher | 57b2771134d041d2c8dd23e3c577efb10e14a26c | [
"MIT"
] | 1 | 2021-04-09T06:58:03.000Z | 2021-04-09T06:58:03.000Z | # CPSC 452 HW1 - Row Transposition Cipher
# Authors: Jake Cliff, Kenny Chao, and Scott Ng
import sys
import cipherCheck
import math
| 23.719626 | 62 | 0.552403 | # CPSC 452 HW1 - Row Transposition Cipher
# Authors: Jake Cliff, Kenny Chao, and Scott Ng
import sys
import cipherCheck
import math
def enc_rowTransposition(message, key):
rows = math.ceil(len(message)/len(key))
columns = len(key)
# Calculate number of characters to fill matrix
totalChars = rows * columns
voidLetter = 'X'
matrix = []
# Firgure out the number of chars need to fill null spaces
charsToFIll = 0
if (len(message) < totalChars):
charsToFIll = totalChars - len(message)
j = 0
for i in range(charsToFIll):
message = message + voidLetter
print(message)
for i in range(rows):
matrix.append([])
r = 0
c = 0
for j in range(totalChars):
if (c < columns):
matrix[r].append(message[j])
c = c + 1
else:
c = 0
r = r + 1
matrix[r].append(message[j])
c = c + 1
eTxt = ""
for i in range(len(key)):
columnIndex = int(key[i])-1
for row in matrix:
eTxt = eTxt + row[columnIndex]
i = i + 1
print("Encrypted Text: " + eTxt)
return eTxt
def dec_rowTransposition(message, key):
rows = math.ceil(len(message)/len(key))
columns = len(key)
# Matrix creation
matrix = []
for i in range(rows):
matrix.append([])
# Fill matrix will 'null' values for future replacement
r = 0
c = 0
for j in range(len(message)):
if (c < columns):
matrix[r].append("")
c = c + 1
else:
c = 0
r = r + 1
matrix[r].append("")
c = c + 1
# Replace spaces with ciphertext
j = 0
for i in range(len(key)):
columnIndex = int(key[i])-1
for row in matrix:
row[columnIndex] = message[j]
j = j + 1
i = i + 1
print(matrix)
dTxt = ""
for row in matrix:
decrypted = "".join(row)
dTxt = dTxt + decrypted
print("Decrypted Text: " + dTxt)
return dTxt
def rowTransposition_main(key, encDec, inputFile, outputFile):
message = cipherCheck.readFile(inputFile)
if (encDec.upper() == 'ENC'):
encryptedText = enc_rowTransposition(message, key)
cipherCheck.writeFile(outputFile, encryptedText)
elif (encDec.upper() == 'DEC'):
decryptedText = dec_rowTransposition(message, key)
cipherCheck.writeFile(outputFile, decryptedText)
else:
sys.exit(0) | 2,333 | 0 | 69 |
3066437a6dfe84eac615d21a861061ed40852a1e | 8,988 | py | Python | annogesiclib/stat_sorf.py | Sung-Huan/ANNOgesic | af3de26f6c5ff9d2218f18a84bbc863a1bb95550 | [
"0BSD"
] | 26 | 2016-02-25T19:27:55.000Z | 2022-01-22T09:54:59.000Z | annogesiclib/stat_sorf.py | Sung-Huan/ANNOgesic | af3de26f6c5ff9d2218f18a84bbc863a1bb95550 | [
"0BSD"
] | 28 | 2018-11-22T19:51:06.000Z | 2022-03-20T23:02:13.000Z | annogesiclib/stat_sorf.py | Sung-Huan/ANNOgesic | af3de26f6c5ff9d2218f18a84bbc863a1bb95550 | [
"0BSD"
] | 18 | 2016-06-01T11:53:45.000Z | 2021-12-27T03:41:03.000Z | from annogesiclib.gff3 import Gff3Parser
| 44.49505 | 78 | 0.536048 | from annogesiclib.gff3 import Gff3Parser
def create_dict(nums, strain, utr_detect):
nums[strain] = {}
if utr_detect:
types = ["all", "5'UTR_derived", "3'UTR_derived",
"interCDS", "intergenic", "antisense"]
else:
types = ["all", "intergenic", "antisense"]
for type_ in types:
nums[strain][type_] = {}
for feature in ["TSS", "sRNA", "all", "RBS", "TSS_RBS",
"TSS_sRNA_RBS", "TSS_sRNA", "RBS_sRNA"]:
nums[strain][type_][feature] = 0
return nums
def plus_data(nums, strain, sorf_types, features, utr_detect):
for sorf_type in sorf_types:
if ((not utr_detect) and (
(sorf_type == "intergenic") or (
sorf_type == "antisense") or (
sorf_type == "all"))) or (
utr_detect):
for feature in features:
nums[strain][sorf_type][feature] += 1
def print_num(out, num, nums, strain, type_):
out.write("(for genome {0}; ".format(
float(num) / float(nums[strain]["all"]["all"])))
if nums[strain][type_]["all"] == 0:
out.write("for {0} - {1})\n".format(
type_, 0))
else:
out.write("for {0} - {1})\n".format(
type_, float(num) / float(nums[strain][type_]["all"])))
def print_stat(nums, nums_best, strain, out, utr_detect):
out.write(strain + ":\n")
if utr_detect:
out.write("\ttotal sORF in this genome are {0}\n".format(
nums[strain]["all"]["all"]))
for type_, features in nums[strain].items():
out.write("\ttotal sORF of {0} sORF candidates are {1}".format(
type_, nums[strain][type_]["all"]))
out.write("(for this genome - {0})\n".format(
float(nums[strain][type_]["all"]) /
float(nums[strain]["all"]["all"])))
for feature, num in features.items():
if feature == "TSS":
out.write("\t\ttotal sORF which start "
"from TSS are {0}".format(num))
print_num(out, num, nums, strain, type_)
elif feature == "sRNA":
out.write("\t\ttotal sORF without overlap with "
"sRNA candidates are {0}".format(num))
print_num(out, num, nums, strain, type_)
elif feature == "RBS":
out.write("\t\ttotal sORF which related with "
"ribosomal binding site are {0}".format(num))
print_num(out, num, nums, strain, type_)
elif feature == "TSS_RBS":
out.write("\t\ttotal sORF which start from TSS and related "
"with ribosomal binding site are {0}".format(num))
print_num(out, num, nums, strain, type_)
elif feature == "TSS_sRNA":
out.write("\t\ttotal sORF which start from TSS and without "
"overlap with sRNA candidates are {0}".format(num))
print_num(out, num, nums, strain, type_)
elif feature == "RBS_sRNA":
out.write("\t\ttotal sORF which related with "
"ribosomal binding site and ")
out.write("without overlap with "
"sRNA candidates are {0}".format(num))
print_num(out, num, nums, strain, type_)
elif feature == "TSS_RBS_sRNA":
out.write("\t\ttotal sORF which start from TSS and "
"related with ribosomal binding site and ")
out.write("without overlap with "
"sRNA candidates are {0}".format(num))
print_num(out, num, nums, strain, type_)
if strain in nums_best.keys():
out.write("\t\tThe best sORF are {0}\n".format(
nums_best[strain][type_]["all"]))
out.write("\t\tThe best sORF which without overlap with "
"sRNA are {0}".format(nums_best[strain][type_]["sRNA"]))
print_num(out, nums_best[strain][type_]["sRNA"],
nums_best, strain, type_)
else:
out.write("\t\tThe best sORF are 0\n")
out.write("\t\tThe best sORF which without overlap with "
"sRNA are 0\n")
def read_file(sorf_gff):
sorfs = []
fh = open(sorf_gff)
for entry in Gff3Parser().entries(fh):
sorfs.append(entry)
sorfs = sorted(sorfs, key=lambda k: (k.seq_id, k.start, k.end, k.strand))
fh.close()
return sorfs
def get_stat_num(sorfs_all, utr_detect):
strain = ""
nums = {}
create_dict(nums, "total", utr_detect)
for sorf in sorfs_all:
if strain != sorf.seq_id:
create_dict(nums, sorf.seq_id, utr_detect)
strain = sorf.seq_id
if sorf.attributes["sORF_type"] == "intergenic":
sorf_type = "intergenic"
elif sorf.attributes["sORF_type"] == "antisense":
sorf_type = "antisense"
else:
if "5utr" in sorf.attributes["sORF_type"]:
sorf_type = "5'UTR_derived"
elif "3utr" in sorf.attributes["sORF_type"]:
sorf_type = "3'UTR_derived"
elif "interCDS" in sorf.attributes["sORF_type"]:
sorf_type = "interCDS"
check_class(sorf, nums, sorf_type, utr_detect, strain)
return nums
def check_class(sorf, nums, sorf_type, utr_detect, strain):
if (sorf.attributes["with_TSS"] != "NA") and \
(sorf.attributes["sRNA"] == "NA") and \
(sorf.attributes["rbs"] != "NA"):
plus_data(nums, "total", [sorf_type, "all"],
["all", "TSS", "sRNA", "RBS", "TSS_RBS",
"TSS_sRNA_RBS", "TSS_sRNA", "RBS_sRNA"], utr_detect)
plus_data(nums, strain, [sorf_type, "all"],
["all", "TSS", "sRNA", "RBS", "TSS_RBS",
"TSS_sRNA_RBS", "TSS_sRNA", "RBS_sRNA"], utr_detect)
elif (sorf.attributes["with_TSS"] != "NA") and \
(sorf.attributes["sRNA"] == "NA"):
plus_data(nums, "total", [sorf_type, "all"],
["all", "TSS", "sRNA", "TSS_sRNA"], utr_detect)
plus_data(nums, strain, [sorf_type, "all"],
["all", "TSS", "sRNA", "TSS_sRNA"], utr_detect)
elif (sorf.attributes["with_TSS"] != "NA") and \
(sorf.attributes["rbs"] != "NA"):
plus_data(nums, "total", [sorf_type, "all"],
["all", "TSS", "RBS", "TSS_RBS"], utr_detect)
plus_data(nums, strain, [sorf_type, "all"],
["all", "TSS", "RBS", "TSS_RBS"], utr_detect)
elif (sorf.attributes["rbs"] != "NA") and \
(sorf.attributes["sRNA"] == "NA"):
plus_data(nums, "total", [sorf_type, "all"],
["all", "RBS", "sRNA", "RBS_sRNA"], utr_detect)
plus_data(nums, strain, [sorf_type, "all"],
["all", "RBS", "sRNA", "RBS_sRNA"], utr_detect)
elif sorf.attributes["with_TSS"] != "NA":
plus_data(nums, "total", [sorf_type, "all"],
["all", "TSS"], utr_detect)
plus_data(nums, strain, [sorf_type, "all"],
["all", "TSS"], utr_detect)
elif sorf.attributes["sRNA"] == "NA":
plus_data(nums, "total", [sorf_type, "all"],
["all", "sRNA"], utr_detect)
plus_data(nums, strain, [sorf_type, "all"],
["all", "sRNA"], utr_detect)
elif sorf.attributes["rbs"] != "NA":
plus_data(nums, "total", [sorf_type, "all"],
["all", "RBS"], utr_detect)
plus_data(nums, strain, [sorf_type, "all"],
["all", "RBS"], utr_detect)
else:
plus_data(nums, "total", [sorf_type, "all"], ["all"], utr_detect)
plus_data(nums, strain, [sorf_type, "all"], ["all"], utr_detect)
def stat(sorf_all, sorf_best, stat_file, utr_detect):
sorfs_all = read_file(sorf_all)
sorfs_best = read_file(sorf_best)
nums = get_stat_num(sorfs_all, utr_detect)
nums_best = get_stat_num(sorfs_best, utr_detect)
out = open(stat_file, "w")
out.write("The filtering condition for the best sORF: \n")
out.write("1. If TSS file exists, it will select the "
"sORF which start from TSS.\n")
out.write("2. If TSS file exists, it will select the "
"sORF which have a ribosomal binding site ")
out.write("and the ribosomal binding site shoule after a TSS.\n")
out.write("3. If sRNA file exists and you want to "
"exclude sORF which overlap with sRNA, ")
out.write("it will select sORF which have non-overlap with sRNA.\n\n")
if len(nums) <= 2:
for strain in nums.keys():
if strain != "total":
print_stat(nums, nums_best, strain, out, utr_detect)
else:
for strain in nums.keys():
print_stat(nums, nums_best, strain, out, utr_detect)
out.close()
| 8,755 | 0 | 184 |
f0234f48201c20939a27f81f69bbf761563fe3ad | 211 | py | Python | examples/asyncawait.py | quynhanh-ngx/pytago | de976ad8d85702ae665e97978bc4a75d282c857f | [
"MIT"
] | 206 | 2021-06-24T16:16:13.000Z | 2022-03-31T07:44:17.000Z | examples/asyncawait.py | quynhanh-ngx/pytago | de976ad8d85702ae665e97978bc4a75d282c857f | [
"MIT"
] | 13 | 2021-06-24T17:51:36.000Z | 2022-02-23T10:07:17.000Z | examples/asyncawait.py | quynhanh-ngx/pytago | de976ad8d85702ae665e97978bc4a75d282c857f | [
"MIT"
] | 14 | 2021-06-26T02:19:45.000Z | 2022-03-30T03:02:49.000Z | import asyncio
if __name__ == '__main__':
asyncio.run(main())
| 13.1875 | 35 | 0.63981 | import asyncio
async def myAsyncFunction() -> int:
await asyncio.sleep(2)
return 2
async def main():
r = await myAsyncFunction()
print(r)
if __name__ == '__main__':
asyncio.run(main())
| 95 | 0 | 46 |
4a5cdd34ad4fdd7683edc1c46107d4bbe0e3badf | 21,073 | py | Python | CVPRACv2/cvprac_ContainerTest.py | networkop/CVP_Ansible_Modules | 1a850c0374ced5bdc699c1d955258826e6692b38 | [
"BSD-3-Clause"
] | null | null | null | CVPRACv2/cvprac_ContainerTest.py | networkop/CVP_Ansible_Modules | 1a850c0374ced5bdc699c1d955258826e6692b38 | [
"BSD-3-Clause"
] | null | null | null | CVPRACv2/cvprac_ContainerTest.py | networkop/CVP_Ansible_Modules | 1a850c0374ced5bdc699c1d955258826e6692b38 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
#
# Copyright (c) 2019, Arista Networks EOS+
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# Neither the name of Arista Networks nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# 'AS IS' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL ARISTA NETWORKS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
# BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
# IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import re
import time
from jinja2 import meta
import jinja2
import yaml
from cvprac.cvp_client import CvpClient
from cvprac.cvp_client_errors import CvpLoginError, CvpApiError
import argparse
import json
# Checking some Enviromental Variables
#import sys
#print '\n'.join(sys.path)
import imp
print "cvprac is here %s" %str(imp.find_module('cvprac'))
# Setting up some formated print outputs
import pprint
pp2 = pprint.PrettyPrinter(indent=2)
pp4 = pprint.PrettyPrinter(indent=4)
# Disable HTTPS Insecure Cert Warnings
import requests
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
def connect(module):
''' Connects to CVP device using user provided credentials from playbook.
:param module: Ansible module with parameters and client connection.
:return: CvpClient object with connection instantiated.
'''
client = CvpClient()
try:
client.connect([module['params']['host']],
module['params']['username'],
module['params']['password'],
protocol=module['params']['protocol'],
port=module['params']['port'],
)
except CvpLoginError, e:
module['fail']=str(e)
return client
def device_info(module):
''' Get dictionary of device info from CVP.
:param module: Ansible module with parameters and client connection.
:return: Dict of device info from CVP or exit with failure if no
info for device is found.
'''
device_info = module['client'].api.get_device_by_name(module['params']['device'])
if not device_info:
device_info['warning']="Device with name '%s' does not exist." % module['params']['device']
else:
device_info['configlets'] = module['client'].api.get_configlets_by_netelement_id(device_info['systemMacAddress'])['configletList']
return device_info
def container_info(module):
''' Get dictionary of container info from CVP.
:param module: Ansible module with parameters and client connection.
:return: Dict of container info from CVP or exit with failure if no
info for device is found.
'''
container_info = module['client'].api.get_container_by_name(module['params']['container'])
if container_info == None:
container_info = {}
container_info['warning'] = "Container with name '%s' does not exist." % module['params']['container']
else:
container_info['configlets'] = module['client'].api.get_configlets_by_container_id(container_info['key'])
return container_info
def process_configlet(module, configlet):
''' Check the current status of a configlet.
Returns a list of associated containers / devices
Returns None if the configlet has no associations.
If action = add apply configlet to device or container
if device specified only apply to device
If action = delete removes configlet from device or container
param module: Ansible module with parameters and client connection.
configlet: Name of Configlet to process
return: Dict of action taken, containers/devices affected and counts of same
'''
result = {}
# Find out if configlet is associated with any containers or devices
configlet_info = module['client'].api.get_configlet_by_name(configlet)
result['start_container_count']= configlet_info["containerCount"]
result['start_device_count'] = configlet_info["netElementCount"]
# Get details of container
if module['params']['container'] != 'None':
container_data = container_info(module)
if 'Warning' in container_data:
result['data']=container_data
container_data = "None"
container_list = module['client'].api.get_applied_containers(configlet)['data']
# Remove configlet from container if action = delete
if module['params']['action'] == "delete":
for container in container_list:
if module['params']['container'] in container['containerName']:
if configlet_info["containerCount"] > 0 and module['params']['device'] == 'None':
# Remove configlet from spcified container in module params
# If none specified then do not remove configlet
# If a device is specified in module params then do not remove configlet
result['action'] = 'delete_from_container'
if container_data != "None":
result['data'] = module['client'].api.remove_configlets_from_container("Ansible Removed Configlet",
container_data, [configlet_info])
else:
result['data'] = {'error':'container not found %s' %module['params']['container']}
if module['params']['action'] == "add":
if module['params']['device'] == 'None':
# Add configlet to spcified container in module params
# If none specified then do not add configlet
# If a device is specified in module params then do not add configlet
result['action'] = 'add_to_container'
if container_data != "None":
result['data'] = module['client'].api.apply_configlets_to_container("Ansible Add Configlet",
container_data, [configlet_info])
else:
result['data'] = {'error':'container not found %s' %module['params']['container']}
# Get details of device
# Remove configlet from specified device in module params
# If none specified then do not remove configlet
if module['params']['device'] != 'None':
device_data = device_info(module)
if "Warning" in device_data:
result['data']=device_data
device_data = "None"
# Remove configlet from device if action = delete
if module['params']['action'] == "delete":
device_list = module['client'].api.get_applied_devices(configlet)['data']
for device in device_list:
# If configlet applied to device then delete it.
if module['params']['device'] in device['hostName']:
if configlet_info["netElementCount"] > 0 and device_data != "None":
result['action'] = 'delete_from_device'
result['data'] = module['client'].api.remove_configlets_from_device("Ansible Removed Configlet",
device_data, [configlet_info])
# Add configlet to device if action = add
if module['params']['action'] == "add" and device_data != "None":
result['action'] = 'add_to_device'
result['data'] = module['client'].api.apply_configlets_to_device("Ansible Added Configlet", device_data,
[configlet_info],create_task=True)
# Check to see if any containers or devices have been added or removed
configlet_info = module['client'].api.get_configlet_by_name(configlet)
result['end_container_count']= configlet_info["containerCount"]
result['end_device_count'] = configlet_info["netElementCount"]
# Added
if result['end_container_count'] > result['start_container_count']:
result['added_container'] = container_data['name']
else:
result['added_container'] = False
if result['end_device_count'] > result['start_device_count']:
result['added_device'] = device_data['fqdn']
else:
result['added_device'] = False
# Removed
if result['end_container_count'] < result['start_container_count']:
result['removed_container'] = container_data['name']
else:
result['removed_container'] = False
if result['end_device_count'] < result['start_device_count']:
result['removed_device'] = device_data['fqdn']
else:
result['removed_device'] = False
return result
#def process_container(module, container, parent):
# ''' Check for existence of a Container and its parent in CVP.
# Returns True if the Containerand Parent exist
# Creates Container if Parent exists but Container doesn't and
# Returns True
# Returns False if the Parent container does not exist and dose not
# create the Container specified.
# '''
# containers = module['client'].api.get_containers()
#
# # Ensure the parent exists
# parent = next((item for item in containers['data'] if
# item['name'] == parent), None)
# if not parent:
# print'Parent container does not exist.'
#
# cont = next((item for item in containers['data'] if
# item['name'] == container), None)
# if not cont:
# module['client'].api.add_container(container, parent['name'],
# parent['key'])
# return True
#
# return False
def config_from_template(module):
''' Load the Jinja template and apply user provided parameters in necessary
places. Fail if template is not found. Fail if rendered template does
not reference the correct port. Fail if the template requires a VLAN
but the user did not provide one with the port_vlan parameter.
:param module: Ansible module with parameters and client connection.
:return: String of Jinja template rendered with parameters or exit with
failure.
'''
template = False
if module['params']['template']:
template_loader = jinja2.FileSystemLoader('./templates')
env = jinja2.Environment(loader=template_loader,
undefined=jinja2.DebugUndefined)
template = env.get_template(module['params']['template'])
if not template:
print'Could not find template - %s'% module['params']['template']
templateData = {}
templateData["data"] = yaml.safe_load(module['params']['data'])
templateData["device"] = module['params']['device']
templateData["container"] = module['params']['container']
temp_source = env.loader.get_source(env, module['params']['template'])[0]
parsed_content = env.parse(temp_source)
temp_vars = list(meta.find_undeclared_variables(parsed_content))
for var in temp_vars:
if str(var) not in templateData:
print 'Template %s requires %s value.'%(module['params']['template'],var)
print 'Please re-run with %s provided.'%(var)
try:
template = template.render(templateData)
except Exception as templateError:
print'Template - %s: does not render correctly: %s'%(module['params']['template'],templateError)
else:
print'Template - required but not provided'
return template
def configlet_action(module):
''' Act upon specified Configlet based on options provided.
- show - display contents of existing config let
- add - update or add new configlet to CVP
- delete - delete existing configlet
:param module: Ansible module with parameters and client connection.
:return: Dict of information to updated results with.
The configlet will be named as follows:
If associated with a device the configlet name will be
device_configletName if configletName has been provided
otherwise it will be device_template
if none of the above have been provided it will be configletName
if that was not provided a default name of Ansible_Test will be used
'''
result = dict()
result['configletAction']=module['params']['action']
changed = False
configlet_found = False
existing_config = 'None'
# Create Configlet Name
if module['params']['device'] != 'None' and module['params']['configletName'] != 'None':
configlet_name = str(module['params']['device'])+'_'+str(module['params']['configletName'])
elif module['params']['device'] != 'None' and module['params']['template'] != 'None':
configlet_name = str(module['params']['device'])+'_'+str(re.split('\.',module['params']['template'])[0])
elif module['params']['configletName'] != 'None':
configlet_name = str(module['params']['configletName'])
else:
configlet_name = "Ansible_Temp"
result['configletName'] = configlet_name
# Find Configlet in CVP if it exists
configlet_list = module['client'].api.get_configlets()['data']
for configlet in configlet_list:
if str(configlet['name']) == str(configlet_name):
configlet_data = module['client'].api.get_configlet_by_name(configlet_name)
existing_config = configlet_data['config']
configlet_found = True
# Create New config if required
if module['params']['template']:
config = config_from_template(module)
# Return current config if found and action was show
if module['params']['action'] == 'show':
if configlet_found:
result['currentConfigBlock'] = existing_config
result['newConfigBlock'] = "No Config - show only existing"
else:
result['currentConfigBlock'] = "No Config - Configlet Not Found"
result['newConfigBlock'] = "No Config - show only existing"
# Amend or Create Configlet/Config if action was add
elif module['params']['action'] == 'add':
if configlet_found:
result['currentConfigBlock'] = existing_config
result['newConfigBlock'] = config
resp = module['client'].api.update_configlet(config, configlet_data['key'],
configlet_data['name'])
module['client'].api.add_note_to_configlet(configlet_data['key'],
"## Managed by Ansible ##")
result.update(process_configlet(module, configlet_name))
changed = True
else:
result['currentConfigBlock'] = "New Configlet - No Config to return"
result['newConfigBlock'] = config
resp = module['client'].api.add_configlet(configlet_name,config)
module['client'].api.add_note_to_configlet(resp,
"## Managed by Ansible ##")
result.update(process_configlet(module, configlet_name))
changed = True
# Delete Configlet if it exists
elif module['params']['action'] == 'delete':
if configlet_found:
result['currentConfigBlock'] = existing_config
result['newConfigBlock'] = "No Config - Configlet Deleted"
result.update(process_configlet(module, configlet_name))
if result['end_container_count'] > 0 or result['end_device_count'] > 0:
changed = False
result['newConfigBlock'] = config
else:
resp = module['client'].api.delete_configlet(configlet_data['name'], configlet_data['key'])
changed = True
result['newConfigBlock'] = "No Config - Configlet Deleted"
else:
result['currentConfigBlock'] = "No Config - Configlet Not Found"
result['newConfigBlock'] = "No Config - Configlet Not Found"
else:
result['currentConfigBlock'] = "No Config - Invalid action"
result['newConfigBlock'] = "No Config - Invalid action"
# Return Results from operations
return [changed,result]
def parseArgs():
"""Gathers comand line options for the script, generates help text and performs some error checking"""
usage = "usage: %prog [options]"
parser = argparse.ArgumentParser(description="Create a configlet in CVP CVP")
parser.add_argument("--username",required=True, help='Username to log into CVP')
parser.add_argument("--password",required=True, help='Password for CVP user to login')
parser.add_argument("--host",required=True, help='CVP Host IP or Name')
parser.add_argument("--protocol", default='HTTPS', help='HTTP or HTTPs')
parser.add_argument("--port", default=443 ,help='TCP port Number default 443')
parser.add_argument("--container",default='None', help='Container to add configlet to')
parser.add_argument("--parent", default="Tennant", help='Parent container for target container')
parser.add_argument("--device", default='None', help='Device to add configlet to')
parser.add_argument("--configletName", default='None', help='Name of Configlet, can be auto-generated')
parser.add_argument("--template",required=True, default='None', help='Jinja2 Template used for Configlet')
parser.add_argument("--data",required=True, help='Yaml Data File required for Configlet Data')
parser.add_argument("--action",required=True, default='show', choices=['show', 'add', 'delete'],help='show,add,delete')
args = parser.parse_args()
return (args)
def main():
""" main entry point for module execution
"""
module = {}
#module['params'] = parseArgs()
module['params'] = vars(parseArgs())
result = dict(changed=False)
print "### Connecting to CVP ###"
module['client'] = connect(module)
# Before Starting check for existing tasks
# Pass config and module params to configlet_action to act on configlet
print "### Creating Configlet ###"
result['changed'],result['configlet_data'] = configlet_action(module)
# Check if the configlet is applied to a device or container
# Device will take priority of Container
configlet_type = "None"
if module['params']['device'] != "None":
device_data = device_info(module)
if 'warning' not in device_data:
configletList = []
print "Debug device_data-configlets:"
for configlet in device_data['configlets']:
configletList.append(configlet['name'])
pp2.pprint(configletList)
for configlet in device_data['configlets']:
# Check if Configlet is applied to Device
if configlet['name'] == result['configlet_data']['configletName']:
configlet_type = "device"
if module['params']['container'] != "None" and module['params']['device'] == "None":
container_data = container_info(module)
if 'warning' not in container_data:
configletList = []
print "Debug container_data-configlets:"
for configlet in container_data['configlets']['configletList']:
configletList.append(configlet['name'])
pp2.pprint(configletList)
for configlet in container_data['configlets']['configletList']:
# Check if Configlet is applied to Container
if configlet['name'] == result['configlet_data']['configletName']:
configlet_type = "container"
result['configlet_data']['configletType'] = configlet_type
# Check Results of configlet_action and act accordingly
if result['changed']:
pass
print "\nModule Result:"
pp4.pprint(result)
print "\nModule Data:"
pp4.pprint(module)
if __name__ == '__main__':
main()
| 47.676471 | 138 | 0.641959 | #!/usr/bin/env python
#
# Copyright (c) 2019, Arista Networks EOS+
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# Neither the name of Arista Networks nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# 'AS IS' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL ARISTA NETWORKS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
# BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
# IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import re
import time
from jinja2 import meta
import jinja2
import yaml
from cvprac.cvp_client import CvpClient
from cvprac.cvp_client_errors import CvpLoginError, CvpApiError
import argparse
import json
# Checking some Enviromental Variables
#import sys
#print '\n'.join(sys.path)
import imp
print "cvprac is here %s" %str(imp.find_module('cvprac'))
# Setting up some formated print outputs
import pprint
pp2 = pprint.PrettyPrinter(indent=2)
pp4 = pprint.PrettyPrinter(indent=4)
# Disable HTTPS Insecure Cert Warnings
import requests
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
def connect(module):
''' Connects to CVP device using user provided credentials from playbook.
:param module: Ansible module with parameters and client connection.
:return: CvpClient object with connection instantiated.
'''
client = CvpClient()
try:
client.connect([module['params']['host']],
module['params']['username'],
module['params']['password'],
protocol=module['params']['protocol'],
port=module['params']['port'],
)
except CvpLoginError, e:
module['fail']=str(e)
return client
def device_info(module):
''' Get dictionary of device info from CVP.
:param module: Ansible module with parameters and client connection.
:return: Dict of device info from CVP or exit with failure if no
info for device is found.
'''
device_info = module['client'].api.get_device_by_name(module['params']['device'])
if not device_info:
device_info['warning']="Device with name '%s' does not exist." % module['params']['device']
else:
device_info['configlets'] = module['client'].api.get_configlets_by_netelement_id(device_info['systemMacAddress'])['configletList']
return device_info
def container_info(module):
''' Get dictionary of container info from CVP.
:param module: Ansible module with parameters and client connection.
:return: Dict of container info from CVP or exit with failure if no
info for device is found.
'''
container_info = module['client'].api.get_container_by_name(module['params']['container'])
if container_info == None:
container_info = {}
container_info['warning'] = "Container with name '%s' does not exist." % module['params']['container']
else:
container_info['configlets'] = module['client'].api.get_configlets_by_container_id(container_info['key'])
return container_info
def process_configlet(module, configlet):
''' Check the current status of a configlet.
Returns a list of associated containers / devices
Returns None if the configlet has no associations.
If action = add apply configlet to device or container
if device specified only apply to device
If action = delete removes configlet from device or container
param module: Ansible module with parameters and client connection.
configlet: Name of Configlet to process
return: Dict of action taken, containers/devices affected and counts of same
'''
result = {}
# Find out if configlet is associated with any containers or devices
configlet_info = module['client'].api.get_configlet_by_name(configlet)
result['start_container_count']= configlet_info["containerCount"]
result['start_device_count'] = configlet_info["netElementCount"]
# Get details of container
if module['params']['container'] != 'None':
container_data = container_info(module)
if 'Warning' in container_data:
result['data']=container_data
container_data = "None"
container_list = module['client'].api.get_applied_containers(configlet)['data']
# Remove configlet from container if action = delete
if module['params']['action'] == "delete":
for container in container_list:
if module['params']['container'] in container['containerName']:
if configlet_info["containerCount"] > 0 and module['params']['device'] == 'None':
# Remove configlet from spcified container in module params
# If none specified then do not remove configlet
# If a device is specified in module params then do not remove configlet
result['action'] = 'delete_from_container'
if container_data != "None":
result['data'] = module['client'].api.remove_configlets_from_container("Ansible Removed Configlet",
container_data, [configlet_info])
else:
result['data'] = {'error':'container not found %s' %module['params']['container']}
if module['params']['action'] == "add":
if module['params']['device'] == 'None':
# Add configlet to spcified container in module params
# If none specified then do not add configlet
# If a device is specified in module params then do not add configlet
result['action'] = 'add_to_container'
if container_data != "None":
result['data'] = module['client'].api.apply_configlets_to_container("Ansible Add Configlet",
container_data, [configlet_info])
else:
result['data'] = {'error':'container not found %s' %module['params']['container']}
# Get details of device
# Remove configlet from specified device in module params
# If none specified then do not remove configlet
if module['params']['device'] != 'None':
device_data = device_info(module)
if "Warning" in device_data:
result['data']=device_data
device_data = "None"
# Remove configlet from device if action = delete
if module['params']['action'] == "delete":
device_list = module['client'].api.get_applied_devices(configlet)['data']
for device in device_list:
# If configlet applied to device then delete it.
if module['params']['device'] in device['hostName']:
if configlet_info["netElementCount"] > 0 and device_data != "None":
result['action'] = 'delete_from_device'
result['data'] = module['client'].api.remove_configlets_from_device("Ansible Removed Configlet",
device_data, [configlet_info])
# Add configlet to device if action = add
if module['params']['action'] == "add" and device_data != "None":
result['action'] = 'add_to_device'
result['data'] = module['client'].api.apply_configlets_to_device("Ansible Added Configlet", device_data,
[configlet_info],create_task=True)
# Check to see if any containers or devices have been added or removed
configlet_info = module['client'].api.get_configlet_by_name(configlet)
result['end_container_count']= configlet_info["containerCount"]
result['end_device_count'] = configlet_info["netElementCount"]
# Added
if result['end_container_count'] > result['start_container_count']:
result['added_container'] = container_data['name']
else:
result['added_container'] = False
if result['end_device_count'] > result['start_device_count']:
result['added_device'] = device_data['fqdn']
else:
result['added_device'] = False
# Removed
if result['end_container_count'] < result['start_container_count']:
result['removed_container'] = container_data['name']
else:
result['removed_container'] = False
if result['end_device_count'] < result['start_device_count']:
result['removed_device'] = device_data['fqdn']
else:
result['removed_device'] = False
return result
#def process_container(module, container, parent):
# ''' Check for existence of a Container and its parent in CVP.
# Returns True if the Containerand Parent exist
# Creates Container if Parent exists but Container doesn't and
# Returns True
# Returns False if the Parent container does not exist and dose not
# create the Container specified.
# '''
# containers = module['client'].api.get_containers()
#
# # Ensure the parent exists
# parent = next((item for item in containers['data'] if
# item['name'] == parent), None)
# if not parent:
# print'Parent container does not exist.'
#
# cont = next((item for item in containers['data'] if
# item['name'] == container), None)
# if not cont:
# module['client'].api.add_container(container, parent['name'],
# parent['key'])
# return True
#
# return False
def config_from_template(module):
''' Load the Jinja template and apply user provided parameters in necessary
places. Fail if template is not found. Fail if rendered template does
not reference the correct port. Fail if the template requires a VLAN
but the user did not provide one with the port_vlan parameter.
:param module: Ansible module with parameters and client connection.
:return: String of Jinja template rendered with parameters or exit with
failure.
'''
template = False
if module['params']['template']:
template_loader = jinja2.FileSystemLoader('./templates')
env = jinja2.Environment(loader=template_loader,
undefined=jinja2.DebugUndefined)
template = env.get_template(module['params']['template'])
if not template:
print'Could not find template - %s'% module['params']['template']
templateData = {}
templateData["data"] = yaml.safe_load(module['params']['data'])
templateData["device"] = module['params']['device']
templateData["container"] = module['params']['container']
temp_source = env.loader.get_source(env, module['params']['template'])[0]
parsed_content = env.parse(temp_source)
temp_vars = list(meta.find_undeclared_variables(parsed_content))
for var in temp_vars:
if str(var) not in templateData:
print 'Template %s requires %s value.'%(module['params']['template'],var)
print 'Please re-run with %s provided.'%(var)
try:
template = template.render(templateData)
except Exception as templateError:
print'Template - %s: does not render correctly: %s'%(module['params']['template'],templateError)
else:
print'Template - required but not provided'
return template
def configlet_action(module):
''' Act upon specified Configlet based on options provided.
- show - display contents of existing config let
- add - update or add new configlet to CVP
- delete - delete existing configlet
:param module: Ansible module with parameters and client connection.
:return: Dict of information to updated results with.
The configlet will be named as follows:
If associated with a device the configlet name will be
device_configletName if configletName has been provided
otherwise it will be device_template
if none of the above have been provided it will be configletName
if that was not provided a default name of Ansible_Test will be used
'''
result = dict()
result['configletAction']=module['params']['action']
changed = False
configlet_found = False
existing_config = 'None'
# Create Configlet Name
if module['params']['device'] != 'None' and module['params']['configletName'] != 'None':
configlet_name = str(module['params']['device'])+'_'+str(module['params']['configletName'])
elif module['params']['device'] != 'None' and module['params']['template'] != 'None':
configlet_name = str(module['params']['device'])+'_'+str(re.split('\.',module['params']['template'])[0])
elif module['params']['configletName'] != 'None':
configlet_name = str(module['params']['configletName'])
else:
configlet_name = "Ansible_Temp"
result['configletName'] = configlet_name
# Find Configlet in CVP if it exists
configlet_list = module['client'].api.get_configlets()['data']
for configlet in configlet_list:
if str(configlet['name']) == str(configlet_name):
configlet_data = module['client'].api.get_configlet_by_name(configlet_name)
existing_config = configlet_data['config']
configlet_found = True
# Create New config if required
if module['params']['template']:
config = config_from_template(module)
# Return current config if found and action was show
if module['params']['action'] == 'show':
if configlet_found:
result['currentConfigBlock'] = existing_config
result['newConfigBlock'] = "No Config - show only existing"
else:
result['currentConfigBlock'] = "No Config - Configlet Not Found"
result['newConfigBlock'] = "No Config - show only existing"
# Amend or Create Configlet/Config if action was add
elif module['params']['action'] == 'add':
if configlet_found:
result['currentConfigBlock'] = existing_config
result['newConfigBlock'] = config
resp = module['client'].api.update_configlet(config, configlet_data['key'],
configlet_data['name'])
module['client'].api.add_note_to_configlet(configlet_data['key'],
"## Managed by Ansible ##")
result.update(process_configlet(module, configlet_name))
changed = True
else:
result['currentConfigBlock'] = "New Configlet - No Config to return"
result['newConfigBlock'] = config
resp = module['client'].api.add_configlet(configlet_name,config)
module['client'].api.add_note_to_configlet(resp,
"## Managed by Ansible ##")
result.update(process_configlet(module, configlet_name))
changed = True
# Delete Configlet if it exists
elif module['params']['action'] == 'delete':
if configlet_found:
result['currentConfigBlock'] = existing_config
result['newConfigBlock'] = "No Config - Configlet Deleted"
result.update(process_configlet(module, configlet_name))
if result['end_container_count'] > 0 or result['end_device_count'] > 0:
changed = False
result['newConfigBlock'] = config
else:
resp = module['client'].api.delete_configlet(configlet_data['name'], configlet_data['key'])
changed = True
result['newConfigBlock'] = "No Config - Configlet Deleted"
else:
result['currentConfigBlock'] = "No Config - Configlet Not Found"
result['newConfigBlock'] = "No Config - Configlet Not Found"
else:
result['currentConfigBlock'] = "No Config - Invalid action"
result['newConfigBlock'] = "No Config - Invalid action"
# Return Results from operations
return [changed,result]
def parseArgs():
"""Gathers comand line options for the script, generates help text and performs some error checking"""
usage = "usage: %prog [options]"
parser = argparse.ArgumentParser(description="Create a configlet in CVP CVP")
parser.add_argument("--username",required=True, help='Username to log into CVP')
parser.add_argument("--password",required=True, help='Password for CVP user to login')
parser.add_argument("--host",required=True, help='CVP Host IP or Name')
parser.add_argument("--protocol", default='HTTPS', help='HTTP or HTTPs')
parser.add_argument("--port", default=443 ,help='TCP port Number default 443')
parser.add_argument("--container",default='None', help='Container to add configlet to')
parser.add_argument("--parent", default="Tennant", help='Parent container for target container')
parser.add_argument("--device", default='None', help='Device to add configlet to')
parser.add_argument("--configletName", default='None', help='Name of Configlet, can be auto-generated')
parser.add_argument("--template",required=True, default='None', help='Jinja2 Template used for Configlet')
parser.add_argument("--data",required=True, help='Yaml Data File required for Configlet Data')
parser.add_argument("--action",required=True, default='show', choices=['show', 'add', 'delete'],help='show,add,delete')
args = parser.parse_args()
return (args)
def main():
""" main entry point for module execution
"""
module = {}
#module['params'] = parseArgs()
module['params'] = vars(parseArgs())
result = dict(changed=False)
print "### Connecting to CVP ###"
module['client'] = connect(module)
# Before Starting check for existing tasks
# Pass config and module params to configlet_action to act on configlet
print "### Creating Configlet ###"
result['changed'],result['configlet_data'] = configlet_action(module)
# Check if the configlet is applied to a device or container
# Device will take priority of Container
configlet_type = "None"
if module['params']['device'] != "None":
device_data = device_info(module)
if 'warning' not in device_data:
configletList = []
print "Debug device_data-configlets:"
for configlet in device_data['configlets']:
configletList.append(configlet['name'])
pp2.pprint(configletList)
for configlet in device_data['configlets']:
# Check if Configlet is applied to Device
if configlet['name'] == result['configlet_data']['configletName']:
configlet_type = "device"
if module['params']['container'] != "None" and module['params']['device'] == "None":
container_data = container_info(module)
if 'warning' not in container_data:
configletList = []
print "Debug container_data-configlets:"
for configlet in container_data['configlets']['configletList']:
configletList.append(configlet['name'])
pp2.pprint(configletList)
for configlet in container_data['configlets']['configletList']:
# Check if Configlet is applied to Container
if configlet['name'] == result['configlet_data']['configletName']:
configlet_type = "container"
result['configlet_data']['configletType'] = configlet_type
# Check Results of configlet_action and act accordingly
if result['changed']:
pass
print "\nModule Result:"
pp4.pprint(result)
print "\nModule Data:"
pp4.pprint(module)
if __name__ == '__main__':
main()
| 0 | 0 | 0 |
7052d37f9d2a08e22c7d0f1c675de5ec802e027b | 1,510 | py | Python | invenio_app_ils/acquisition/indexer.py | jrcastro2/invenio-app-ils | 502b9e7bac737863905976a1d07e2cd924f5d779 | [
"MIT"
] | null | null | null | invenio_app_ils/acquisition/indexer.py | jrcastro2/invenio-app-ils | 502b9e7bac737863905976a1d07e2cd924f5d779 | [
"MIT"
] | 21 | 2018-11-02T14:19:53.000Z | 2021-06-25T15:16:42.000Z | invenio_app_ils/acquisition/indexer.py | topless/invenio-app-ils | 38f5a6b61cdeaf5fa5776613073fa46af28737a9 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Copyright (C) 2019-2020 CERN.
#
# invenio-app-ils is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Acquisition indexer APIs."""
from datetime import datetime
from celery import shared_task
from flask import current_app
from invenio_indexer.api import RecordIndexer
from invenio_app_ils.indexer import ReferencedRecordsIndexer
from .api import ORDER_PID_TYPE, VENDOR_PID_TYPE
from .proxies import current_ils_acq
@shared_task(ignore_result=True)
def vendor_index_referenced_records(vendor):
"""Index referenced records."""
indexer = ReferencedRecordsIndexer()
indexed = dict(pid_type=VENDOR_PID_TYPE, record=vendor)
referenced = []
# fetch and index orders
Order = current_ils_acq.order_record_cls
OrderSearch = current_ils_acq.order_search_cls
for order in (
OrderSearch().search_by_vendor_pid(vendor_pid=vendor["pid"]).scan()
):
order = Order.get_record_by_pid(order["pid"])
referenced.append(dict(pid_type=ORDER_PID_TYPE, record=order))
indexer.index(indexed, referenced)
class VendorIndexer(RecordIndexer):
"""Indexer class for Vendor record."""
def index(self, vendor, arguments=None, **kwargs):
"""Index an Vendor."""
super().index(vendor)
eta = datetime.utcnow() + current_app.config["ILS_INDEXER_TASK_DELAY"]
vendor_index_referenced_records.apply_async((vendor,), eta=eta)
| 30.816327 | 78 | 0.734437 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2019-2020 CERN.
#
# invenio-app-ils is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Acquisition indexer APIs."""
from datetime import datetime
from celery import shared_task
from flask import current_app
from invenio_indexer.api import RecordIndexer
from invenio_app_ils.indexer import ReferencedRecordsIndexer
from .api import ORDER_PID_TYPE, VENDOR_PID_TYPE
from .proxies import current_ils_acq
@shared_task(ignore_result=True)
def vendor_index_referenced_records(vendor):
"""Index referenced records."""
indexer = ReferencedRecordsIndexer()
indexed = dict(pid_type=VENDOR_PID_TYPE, record=vendor)
referenced = []
# fetch and index orders
Order = current_ils_acq.order_record_cls
OrderSearch = current_ils_acq.order_search_cls
for order in (
OrderSearch().search_by_vendor_pid(vendor_pid=vendor["pid"]).scan()
):
order = Order.get_record_by_pid(order["pid"])
referenced.append(dict(pid_type=ORDER_PID_TYPE, record=order))
indexer.index(indexed, referenced)
class VendorIndexer(RecordIndexer):
"""Indexer class for Vendor record."""
def index(self, vendor, arguments=None, **kwargs):
"""Index an Vendor."""
super().index(vendor)
eta = datetime.utcnow() + current_app.config["ILS_INDEXER_TASK_DELAY"]
vendor_index_referenced_records.apply_async((vendor,), eta=eta)
| 0 | 0 | 0 |
16213ccc93dbdff9c66b124acbe7b5cd93afad3b | 1,206 | py | Python | cohesity_management_sdk/models/type_oracle_protection_source_enum.py | chandrashekar-cohesity/management-sdk-python | 9e6ec99e8a288005804b808c4e9b19fd204e3a8b | [
"Apache-2.0"
] | 1 | 2021-01-07T20:36:22.000Z | 2021-01-07T20:36:22.000Z | cohesity_management_sdk/models/type_oracle_protection_source_enum.py | chandrashekar-cohesity/management-sdk-python | 9e6ec99e8a288005804b808c4e9b19fd204e3a8b | [
"Apache-2.0"
] | null | null | null | cohesity_management_sdk/models/type_oracle_protection_source_enum.py | chandrashekar-cohesity/management-sdk-python | 9e6ec99e8a288005804b808c4e9b19fd204e3a8b | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2019 Cohesity Inc.
class TypeOracleProtectionSourceEnum(object):
"""Implementation of the 'Type_OracleProtectionSource' enum.
Specifies the type of the managed Object in Oracle Protection Source.
'kRACRootContainer' indicates the entity is a root container to an Oracle
Real Application clusters(Oracle RAC).
'kRootContainer' indicates the entity is a root container to an Oracle
standalone server.
'kHost' indicates the entity is an Oracle host.
'kDatabase' indicates the entity is an Oracle Database.
'kTableSpace' indicates the entity is an Oracle table space.
'kTable' indicates the entity is an Oracle table.
Attributes:
KRACROOTCONTAINER: TODO: type description here.
KROOTCONTAINER: TODO: type description here.
KHOST: TODO: type description here.
KDATABASE: TODO: type description here.
KTABLESPACE: TODO: type description here.
KTABLE: TODO: type description here.
"""
KRACROOTCONTAINER = 'kRACRootContainer'
KROOTCONTAINER = 'kRootContainer'
KHOST = 'kHost'
KDATABASE = 'kDatabase'
KTABLESPACE = 'kTableSpace'
KTABLE = 'kTable'
| 30.15 | 77 | 0.708126 | # -*- coding: utf-8 -*-
# Copyright 2019 Cohesity Inc.
class TypeOracleProtectionSourceEnum(object):
"""Implementation of the 'Type_OracleProtectionSource' enum.
Specifies the type of the managed Object in Oracle Protection Source.
'kRACRootContainer' indicates the entity is a root container to an Oracle
Real Application clusters(Oracle RAC).
'kRootContainer' indicates the entity is a root container to an Oracle
standalone server.
'kHost' indicates the entity is an Oracle host.
'kDatabase' indicates the entity is an Oracle Database.
'kTableSpace' indicates the entity is an Oracle table space.
'kTable' indicates the entity is an Oracle table.
Attributes:
KRACROOTCONTAINER: TODO: type description here.
KROOTCONTAINER: TODO: type description here.
KHOST: TODO: type description here.
KDATABASE: TODO: type description here.
KTABLESPACE: TODO: type description here.
KTABLE: TODO: type description here.
"""
KRACROOTCONTAINER = 'kRACRootContainer'
KROOTCONTAINER = 'kRootContainer'
KHOST = 'kHost'
KDATABASE = 'kDatabase'
KTABLESPACE = 'kTableSpace'
KTABLE = 'kTable'
| 0 | 0 | 0 |
01819ce1843629dde3d93b6b7f35f7efe11481aa | 1,359 | py | Python | Math/Prime Sum.py | mr-mornin-star/problemSolving | c278e5d090af7370e56789e68b7bb73dc37165f8 | [
"Apache-2.0"
] | null | null | null | Math/Prime Sum.py | mr-mornin-star/problemSolving | c278e5d090af7370e56789e68b7bb73dc37165f8 | [
"Apache-2.0"
] | null | null | null | Math/Prime Sum.py | mr-mornin-star/problemSolving | c278e5d090af7370e56789e68b7bb73dc37165f8 | [
"Apache-2.0"
] | null | null | null | """
Prime Sum
Problem Description
Given an even number A ( greater than 2 ), return two prime numbers whose sum will be equal to given number. If there are more than one solutions possible, return the lexicographically smaller solution.
If [a, b] is one solution with a <= b, and [c,d] is another solution with c <= d, then
[a, b] < [c, d], If a < c OR a==c AND b < d.
NOTE: A solution will always exist. Read Goldbach's conjecture.
Problem Constraints
4 <= A <= 2*107
Input Format
First and only argument of input is an even number A.
Output Format
Return a integer array of size 2 containing primes whose sum will be equal to given number.
Example Input
4
Example Output
[2, 2]
Example Explanation
There is only 1 solution for A = 4.
"""
| 26.647059 | 204 | 0.596762 | """
Prime Sum
Problem Description
Given an even number A ( greater than 2 ), return two prime numbers whose sum will be equal to given number. If there are more than one solutions possible, return the lexicographically smaller solution.
If [a, b] is one solution with a <= b, and [c,d] is another solution with c <= d, then
[a, b] < [c, d], If a < c OR a==c AND b < d.
NOTE: A solution will always exist. Read Goldbach's conjecture.
Problem Constraints
4 <= A <= 2*107
Input Format
First and only argument of input is an even number A.
Output Format
Return a integer array of size 2 containing primes whose sum will be equal to given number.
Example Input
4
Example Output
[2, 2]
Example Explanation
There is only 1 solution for A = 4.
"""
class Solution:
# @param A : integer
# @return a list of integers
def primesum(self, A):
def SieveOfEratosthenes(n):
prime = [True for i in range(n+1)]
p = 2
while (p * p <= n):
if (prime[p] == True):
for i in range(p * p, n+1, p):
prime[i] = False
p += 1
return set([p for p in range(2,n) if prime[p]==True])
primes=SieveOfEratosthenes(A)
for i in range(2,A):
if i in primes and A-i in primes:
return [i,A-i]
| 355 | 217 | 22 |
cca5f34f22ee3f2319c9fd98aa253d8baa0f62bb | 2,755 | py | Python | pyradox/modules/dense_modules.py | p4vv37/pyradox | cfc8c07d637a1cc189dd8d200f8a55d00405b81f | [
"MIT"
] | 61 | 2021-01-10T09:31:32.000Z | 2022-02-13T13:30:48.000Z | pyradox/modules/dense_modules.py | p4vv37/pyradox | cfc8c07d637a1cc189dd8d200f8a55d00405b81f | [
"MIT"
] | 1 | 2021-04-24T12:03:19.000Z | 2021-04-24T12:03:19.000Z | pyradox/modules/dense_modules.py | p4vv37/pyradox | cfc8c07d637a1cc189dd8d200f8a55d00405b81f | [
"MIT"
] | 6 | 2021-01-17T16:17:35.000Z | 2022-02-13T13:30:49.000Z | from tensorflow.keras import layers
from tensorflow.keras.activations import swish
from tensorflow.nn import relu6
class DenselyConnected(layers.Layer):
"""Densely Connected Layer followed by Batch Normalization (optional) and Dropout (optional)
Args:
units (int): dimensionality of the output space
batch_normalization (bool): whether to use Batch Normalization, default: False
dropout (float): the dropout rate, default: 0
kwargs (keyword arguments): the arguments for Dense Layer
"""
class DenseSkipConnection(layers.Layer):
"""Implementation of a skip connection for densely connected layer
Args:
units (int): dimensionality of the output space
batch_normalization (bool): whether to use Batch Normalization, default: False
dropout (float): the dropout rate, default: 0
activation (keras Activation): activation to be applied, default: relu
kwargs (keyword arguments): the arguments for Dense Layer
""" | 34.012346 | 96 | 0.626497 | from tensorflow.keras import layers
from tensorflow.keras.activations import swish
from tensorflow.nn import relu6
def relu(x):
return layers.ReLU()(x)
def hard_sigmoid(x):
return layers.ReLU(6.0)(x + 3.0) * (1.0 / 6.0)
def hard_swish(x):
return layers.Multiply()([hard_sigmoid(x), x])
class DenselyConnected(layers.Layer):
"""Densely Connected Layer followed by Batch Normalization (optional) and Dropout (optional)
Args:
units (int): dimensionality of the output space
batch_normalization (bool): whether to use Batch Normalization, default: False
dropout (float): the dropout rate, default: 0
kwargs (keyword arguments): the arguments for Dense Layer
"""
def __init__(self, units, batch_normalization=False, dropout=0, **kwargs):
super().__init__()
self.units = units
self.batch_normalization = batch_normalization
self.dropout = dropout
self.kwargs = kwargs
def __call__(self, inputs):
x = inputs
x = layers.Dense(self.units, **self.kwargs)(x)
if self.batch_normalization:
x = layers.BatchNormalization()(x)
if self.dropout != 0:
x = layers.Dropout(self.dropout)(x)
return x
class DenseSkipConnection(layers.Layer):
"""Implementation of a skip connection for densely connected layer
Args:
units (int): dimensionality of the output space
batch_normalization (bool): whether to use Batch Normalization, default: False
dropout (float): the dropout rate, default: 0
activation (keras Activation): activation to be applied, default: relu
kwargs (keyword arguments): the arguments for Dense Layer
"""
def __init__(
self, units, batch_normalization=False, dropout=0, activation="relu", **kwargs
):
super().__init__()
self.units = units
self.batch_normalization = batch_normalization
self.dropout = dropout
self.activation = activation
self.kwargs = kwargs
def __call__(self, inputs):
x = inputs
x = layers.Dense(self.units, **self.kwargs)(x)
x1 = layers.Activation(self.activation)(x)
if self.batch_normalization:
x1 = layers.BatchNormalization()(x1)
if self.dropout > 0:
x1 = layers.Dropout(self.dropout)(x1)
x1 = layers.Dense(self.units, **self.kwargs)(x1)
x = layers.add([x, x1])
x = layers.Activation(self.activation)(x)
if self.batch_normalization:
x = layers.BatchNormalization()(x)
if self.dropout > 0:
x = layers.Dropout(self.dropout)(x)
return x | 1,503 | 0 | 177 |
52267bbd0316b959d1d58b95d150066d249081dc | 3,581 | py | Python | crypto/chainfunc/solution/solver.py | vidner/codepwnda-ctf | 7e086044b753fe555b44395b79827d2f5b89da1d | [
"Unlicense"
] | 6 | 2021-02-18T15:07:55.000Z | 2022-02-04T01:38:10.000Z | crypto/chainfunc/solution/solver.py | vidner/codepwnda-ctf | 7e086044b753fe555b44395b79827d2f5b89da1d | [
"Unlicense"
] | null | null | null | crypto/chainfunc/solution/solver.py | vidner/codepwnda-ctf | 7e086044b753fe555b44395b79827d2f5b89da1d | [
"Unlicense"
] | null | null | null | import random
import hashlib
# inp = raw_input()
enc = "3f6f706b513c5f65557a6e5a5d736979666663686073677975547a7e516665776a68696a657d6963777572674f7269716f59666f5857605a20566e7b7b5c5a75636c60596b6f68607f76547970717f784d6e6073515a5c686a81756e74755c63667468595d7f76736d5e696e5e607d63"
# print enc
for i in range(1337,7331):
ciper = enc.decode('hex')
ciper = [ord(j) for j in ciper]
x = ff4(f3(i,len(ciper)),ciper)
x = f1(ff2(x))
flag = check(x)
if flag !="Wrong Flag !":
print flag
break
# print encrypt(raw_input())
# hashlib.md5("reverse_a_function_with_a_bit_of_cryptography_isnt_that_hard_or_is_it_?_but_if_u_got_this_message_u_r_awesome_:*").hexdigest()
# reverse_a_function_with_a_bit_of_cryptography_isnt_that_hard_or_is_it_?_but_if_u_got_this_message_u_r_awesome_:*
| 25.397163 | 233 | 0.443172 | import random
import hashlib
def f0(x):
a = 0
b = 1
while(x):
a = a + b
b = a - b
x = x - 1
return (a+b)%10
def f1(x):
a = ""
for i in range(len(x)):
a += chr((ord(x[i])^f0(i))%256)
return a
def f2(x):
q, r, t, k, n, l, y = 1, 0, 1, 1, 3, 3, []
while (len(y)<len(x)):
if 4*q+r-t < n*t:
y.append(n)
nr = 10*(r-n*t)
n = ((10*(3*q+r))//t)-10*n
q *= 10
r = nr
else:
nr = (2*q+r)*l
nn = (q*(7*k)+2+(r*l))//(t*l)
q *= k
t *= l
l += 2
k += 1
n = nn
r = nr
x=x[::-1]
z = [chr(ord(x[i])-y[i]%256) for i in range(0,len(x),2)]
v = [chr((ord(x[i])+y[i])%256) for i in range(1,len(x),2)]
v += z
return v
def f3(x,z):
y = []
while x != 1:
y.append(x%z)
if x % 2 == 0:
x = int(x / 2)
else:
x = int(3 * x + 1)
else:
y.append(x%z)
if len(y)&1:
return y[1:]
else :
return y
def f4(x,y):
a=x[:len(x)/2]
b=x[len(x)/2:]
for i in range(len(a)):
z=y[a[i]]
y[a[i]]=y[b[i]]
y[b[i]]=z
z = [chr(i) for i in y]
return "".join(z)
def encrypt(x):
key = random.randint(1337,7331)
print key
x = f2(f1(x))
y = [ord(i) for i in x]
return f4(f3(key,len(x)),y).encode("hex")
def check(x):
if hashlib.md5(x).hexdigest() == "b4fdeab83ba1cab8db95127657556a40":
return "Correct, The Flag is : \ncodepwnda{%s}" % x
else:
return "Wrong Flag !"
def ff4(x,y):
a=x[:len(x)/2]
b=x[len(x)/2:]
p=[i for i in range(len(y))]
for i in range(len(a)):
z=p[a[i]]
p[a[i]]=p[b[i]]
p[b[i]]=z
q = ""
for i in range(len(y)):
for j in range(len(y)):
if p[j]==i:
q+=chr(y[j])
return q
def ff2(x):
q, r, t, k, n, l, y = 1, 0, 1, 1, 3, 3, []
while (len(y)<len(x)):
if 4*q+r-t < n*t:
y.append(n)
nr = 10*(r-n*t)
n = ((10*(3*q+r))//t)-10*n
q *= 10
r = nr
else:
nr = (2*q+r)*l
nn = (q*(7*k)+2+(r*l))//(t*l)
q *= k
t *= l
l += 2
k += 1
n = nn
r = nr
v = x[:len(x)/2]
z = x[len(x)/2:]
p = []
for i in range(len(x)):
if i&1:
p.append(chr((256+ord(v[i/2])-y[i])%256))
else:
p.append(chr(ord(z[i/2])+y[i]))
return "".join(p[::-1])
# inp = raw_input()
enc = "3f6f706b513c5f65557a6e5a5d736979666663686073677975547a7e516665776a68696a657d6963777572674f7269716f59666f5857605a20566e7b7b5c5a75636c60596b6f68607f76547970717f784d6e6073515a5c686a81756e74755c63667468595d7f76736d5e696e5e607d63"
# print enc
for i in range(1337,7331):
ciper = enc.decode('hex')
ciper = [ord(j) for j in ciper]
x = ff4(f3(i,len(ciper)),ciper)
x = f1(ff2(x))
flag = check(x)
if flag !="Wrong Flag !":
print flag
break
# print encrypt(raw_input())
# hashlib.md5("reverse_a_function_with_a_bit_of_cryptography_isnt_that_hard_or_is_it_?_but_if_u_got_this_message_u_r_awesome_:*").hexdigest()
# reverse_a_function_with_a_bit_of_cryptography_isnt_that_hard_or_is_it_?_but_if_u_got_this_message_u_r_awesome_:*
| 2,528 | 0 | 225 |
389ff2f2dc854b2241051dcae4d45fe0968eda61 | 1,583 | py | Python | tests/run.py | alibaba/FederatedScope | fcf6d237624769ea094cfd68803901622f14fc23 | [
"Apache-2.0"
] | 9 | 2022-03-24T07:59:37.000Z | 2022-03-31T06:47:52.000Z | tests/run.py | alibaba/FederatedScope | fcf6d237624769ea094cfd68803901622f14fc23 | [
"Apache-2.0"
] | 1 | 2022-03-28T13:52:17.000Z | 2022-03-28T13:52:17.000Z | tests/run.py | alibaba/FederatedScope | fcf6d237624769ea094cfd68803901622f14fc23 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) Alibaba, Inc. and its affiliates.
import argparse
import os
import sys
import unittest
file_dir = os.path.join(os.path.dirname(__file__), '..')
sys.path.append(file_dir)
parser = argparse.ArgumentParser('test runner')
parser.add_argument('--list_tests', action='store_true', help='list all tests')
parser.add_argument('--pattern', default='test_*.py', help='test file pattern')
parser.add_argument('--test_dir',
default='tests',
help='directory to be tested')
args = parser.parse_args()
if __name__ == '__main__':
main()
| 31.039216 | 79 | 0.589387 | # Copyright (c) Alibaba, Inc. and its affiliates.
import argparse
import os
import sys
import unittest
file_dir = os.path.join(os.path.dirname(__file__), '..')
sys.path.append(file_dir)
parser = argparse.ArgumentParser('test runner')
parser.add_argument('--list_tests', action='store_true', help='list all tests')
parser.add_argument('--pattern', default='test_*.py', help='test file pattern')
parser.add_argument('--test_dir',
default='tests',
help='directory to be tested')
args = parser.parse_args()
def gather_test_cases(test_dir, pattern, list_tests):
test_suite = unittest.TestSuite()
discover = unittest.defaultTestLoader.discover(test_dir,
pattern=pattern,
top_level_dir=None)
for suite_discovered in discover:
for test_case in suite_discovered:
test_suite.addTest(test_case)
if hasattr(test_case, '__iter__'):
for subcase in test_case:
if list_tests:
print(subcase)
else:
if list_tests:
print(test_case)
return test_suite
def main():
runner = unittest.TextTestRunner()
test_suite = gather_test_cases(os.path.abspath(args.test_dir),
args.pattern, args.list_tests)
if not args.list_tests:
res = runner.run(test_suite)
if not res.wasSuccessful():
exit(1)
if __name__ == '__main__':
main()
| 948 | 0 | 46 |
a89916277675d1edfd15479740762e0cc90acb4f | 953 | py | Python | twitoff/models.py | yestrella14/twitoff-ye | 2ebe67d1a822fedbe0877482bd9c7577d9a1ac13 | [
"MIT"
] | null | null | null | twitoff/models.py | yestrella14/twitoff-ye | 2ebe67d1a822fedbe0877482bd9c7577d9a1ac13 | [
"MIT"
] | null | null | null | twitoff/models.py | yestrella14/twitoff-ye | 2ebe67d1a822fedbe0877482bd9c7577d9a1ac13 | [
"MIT"
] | null | null | null | from flask_sqlalchemy import SQLAlchemy
DB = SQLAlchemy()
| 28.878788 | 79 | 0.655824 | from flask_sqlalchemy import SQLAlchemy
DB = SQLAlchemy()
class User(DB.Model):
id = DB.Column(DB.BigInteger, primary_key=True)
name = DB.Column(DB.String, nullable=False)
def __repr__(self):
return "<User: {}>".format(self.name)
class Tweet(DB.Model):
id = DB.Column(DB.BigInteger, primary_key=True)
text = DB.Column(DB.Unicode(300))
vect = DB.Column(DB.PickleType, nullable=False)
user_id = DB.Column(DB.BigInteger, DB.ForeignKey("user.id"),nullable=False)
user = DB.relationship("User", backref=DB.backref("tweets", lazy=True))
def __repr__(self):
return "<Tweet: {}>".format(self.text)
def insert_data():
Rafy= User(id=2, name="Rafy")
Arm = User(id=3, name="Armandina")
Yera = User(id=14, name="Yeraldina")
Alesa = User(id=22, name="Alessandra")
DB.session.add(Rafy)
DB.session.add(Arm)
DB.session.add(Yera)
DB.session.add(Alesa)
DB.session.commit()
| 372 | 453 | 69 |
8dead120547a2997e99d30a60ce6d94b80b6393c | 4,459 | py | Python | users/forms.py | bycristhian/psp | 019825e010386b6acc8c5466e7a6765218cb10d9 | [
"MIT"
] | 2 | 2020-09-04T17:06:41.000Z | 2020-10-05T01:46:20.000Z | users/forms.py | bycristhian/psp | 019825e010386b6acc8c5466e7a6765218cb10d9 | [
"MIT"
] | null | null | null | users/forms.py | bycristhian/psp | 019825e010386b6acc8c5466e7a6765218cb10d9 | [
"MIT"
] | null | null | null |
# Django
from django import forms
from django.utils.translation import gettext as _
from django.conf import settings
# Models
from django.contrib.auth.models import User
from users.models import ExperienceCompany, PositionCompany, Profile
# Utils
from users.utils import GENERES
from services.email import EmailService
import threading
import random
| 28.954545 | 100 | 0.629289 |
# Django
from django import forms
from django.utils.translation import gettext as _
from django.conf import settings
# Models
from django.contrib.auth.models import User
from users.models import ExperienceCompany, PositionCompany, Profile
# Utils
from users.utils import GENERES
from services.email import EmailService
import threading
import random
class UserUpdateForm(forms.ModelForm):
genere = forms.CharField(max_length=12)
username = forms.CharField(max_length=15, min_length=3)
email = forms.EmailField(max_length=50, required=True)
class Meta:
model = User
fields = ('email', 'first_name', 'last_name')
widgets = {
'email': forms.EmailInput(attrs={
'class': 'form-control input-profile',
'id': 'emailInput',
'disabled': 'true'
})
}
def clean_genere(self):
genere = self.cleaned_data["genere"]
if genere not in GENERES:
raise forms.ValidationError(_("The genere isn't allowed"))
return genere
def clean_username(self):
username = self.cleaned_data['username']
if 'username' in self.changed_data:
if User.objects.filter(username=username).exists():
raise forms.ValidationError(_('The username already exists'))
if ' ' in username:
raise forms.ValidationError(_("The username can't have spaces in black"))
return username
def clean_email(self):
email = self.cleaned_data['email']
if 'email' in self.changed_data:
if User.objects.filter(email=email).exists():
raise forms.ValidationError(_('The email already exists'))
return email
def save(self, user):
data = self.cleaned_data
profile = user.get_profile
user.username = data['username']
user.email = data['email']
user.first_name = data['first_name']
user.last_name = data['last_name']
profile.genere = data['genere']
user.save()
profile.save()
return user
class CreateUserForm(forms.ModelForm):
confirm_password = forms.CharField(max_length=55)
username = forms.CharField(min_length=3, max_length=15)
email = forms.EmailField(max_length=55, required=True)
class Meta:
model = User
fields = ('username', 'email', 'first_name', 'last_name', 'password')
def clean_username(self):
username = self.cleaned_data['username']
if ' ' in username:
raise forms.ValidationError(_("The username can't have spaces in black"))
return username
def clean_email(self):
email = self.cleaned_data['email']
if User.objects.filter(email=email).exists():
raise forms.ValidationError(_("The email you are trying to register is already in use"))
return email
def clean_confirm_password(self):
confirm_password = self.cleaned_data['confirm_password']
password = self.cleaned_data['password']
if confirm_password != password:
raise forms.ValidationError(_("Passwords do not match"))
def save(self):
data = self.cleaned_data
data.pop('confirm_password')
user = User.objects.create_user(**data)
Profile.objects.create(user=user)
data_email = {
'to_user': user,
'subject': 'Welcome to PSP',
'template_name': 'users/registered_user.html',
'context': {
'user': user,
'password_user': data['password']
}
}
if settings.DEBUG:
EmailService.send_email_local(**data_email)
else:
EmailService.send_email_production(**data_email)
class CreateExperencieCompanyForm(forms.Form):
name_company = forms.CharField(max_length=30)
position_company = forms.CharField(max_length=70)
years_position = forms.IntegerField(min_value=1, max_value=120)
def clean_position_company(self):
data = self.cleaned_data["position_company"]
try:
return PositionCompany.objects.get(name=data)
except PositionCompany.DoesNotExist:
raise forms.ValidationError(_("The position doesn't exists"))
def save(self, user):
data = self.cleaned_data
data["user"] = user
ExperienceCompany.objects.create(**data) | 2,758 | 1,277 | 69 |
1c657c0b6b5200566cc29f1fcebab6562ec885a9 | 843 | py | Python | tests/api/test_misc.py | weimens/seahub | 5ecf78ed7a2ddc72a23961804ee41be21c24893f | [
"Apache-2.0"
] | 420 | 2015-01-03T11:34:46.000Z | 2022-03-10T07:15:41.000Z | tests/api/test_misc.py | weimens/seahub | 5ecf78ed7a2ddc72a23961804ee41be21c24893f | [
"Apache-2.0"
] | 735 | 2015-01-04T21:22:51.000Z | 2022-03-31T09:26:07.000Z | tests/api/test_misc.py | weimens/seahub | 5ecf78ed7a2ddc72a23961804ee41be21c24893f | [
"Apache-2.0"
] | 379 | 2015-01-05T17:08:03.000Z | 2022-03-06T00:11:50.000Z | import json
import pytest
import requests
from django.test import TestCase
from seahub import settings
from tests.api.apitestbase import ApiTestBase
from tests.api.urls import SERVER_INFO_URL
| 30.107143 | 76 | 0.710558 | import json
import pytest
import requests
from django.test import TestCase
from seahub import settings
from tests.api.apitestbase import ApiTestBase
from tests.api.urls import SERVER_INFO_URL
class MiscApiTest(ApiTestBase, TestCase):
def test_server_info(self):
r = requests.get(SERVER_INFO_URL)
r.raise_for_status()
info = r.json()
self.assertTrue('version' in info)
self.assertTrue('seafile-basic' in info['features'])
self.assertFalse('disable-sync-with-any-folder' in info['features'])
@pytest.mark.xfail
def test_server_info_with_disable_sync(self):
settings.DISABLE_SYNC_WITH_ANY_FOLDER = True
resp = self.client.get('/api2/server-info/')
info = json.loads(resp.content)
self.assertTrue('disable-sync-with-any-folder' in info['features'])
| 529 | 97 | 23 |
0905002943804cede15361fe93d5932702ebcb3e | 1,230 | py | Python | mdf/tests/test_context.py | manahl/mdf | 4b2c78084467791ad883c0b4c53832ad70fc96ef | [
"MIT"
] | 100 | 2015-06-14T23:38:41.000Z | 2019-10-28T03:23:28.000Z | mdf/tests/test_context.py | ahlmss/mdf | 4b2c78084467791ad883c0b4c53832ad70fc96ef | [
"MIT"
] | 6 | 2016-12-28T11:22:12.000Z | 2018-12-02T23:01:08.000Z | mdf/tests/test_context.py | manahl/mdf | 4b2c78084467791ad883c0b4c53832ad70fc96ef | [
"MIT"
] | 40 | 2015-07-21T17:13:56.000Z | 2019-07-29T21:26:49.000Z | from datetime import datetime
from mdf import (
MDFContext,
varnode,
nansumnode,
evalnode,
now,
shift,
)
from numpy.testing.utils import assert_array_almost_equal
from pandas.core import datetools
import pandas as pd
import unittest
A = varnode()
@nansumnode
@evalnode
@evalnode
| 22.777778 | 96 | 0.599187 | from datetime import datetime
from mdf import (
MDFContext,
varnode,
nansumnode,
evalnode,
now,
shift,
)
from numpy.testing.utils import assert_array_almost_equal
from pandas.core import datetools
import pandas as pd
import unittest
A = varnode()
@nansumnode
def B():
return A() + now().year - 1970
@evalnode
def C():
while True:
yield shift(B, A, [1, 2, 3])
@evalnode
def D():
while True:
yield shift(B, shift_sets=[{A : 1}, {A : 2}, {A : 3}])
class ContextTest(unittest.TestCase):
def setUp(self):
self.daterange = pd.bdate_range(datetime(1970, 1, 1), periods=3, freq=datetools.yearEnd)
self.ctx = MDFContext()
def test_shift(self):
# C yields a value based on shifted contexts
res = []
for t in self.daterange:
self.ctx.set_date(t)
res.append(self.ctx[C])
assert_array_almost_equal(res, [(1,2,3), (3,5,7), (6,9,12)])
def test_shift2(self):
# C yields a value based on shifted contexts
res = []
for t in self.daterange:
self.ctx.set_date(t)
res.append(self.ctx[D])
assert_array_almost_equal(res, [(1,2,3), (3,5,7), (6,9,12)])
| 737 | 16 | 169 |
cbe87168ed3aaf354d439a33c7e9f73c3e98e082 | 5,732 | py | Python | analysis/graph/messages.py | giovannistanco/iot-trust-task-alloc | 47e0c8186db32ecd563241d05ebdaaf23713a83f | [
"MIT"
] | 8 | 2021-03-10T17:12:47.000Z | 2021-12-09T13:30:56.000Z | analysis/graph/messages.py | giovannistanco/iot-trust-task-alloc | 47e0c8186db32ecd563241d05ebdaaf23713a83f | [
"MIT"
] | 12 | 2020-06-29T13:49:47.000Z | 2022-02-28T13:01:12.000Z | analysis/graph/messages.py | giovannistanco/iot-trust-task-alloc | 47e0c8186db32ecd563241d05ebdaaf23713a83f | [
"MIT"
] | 3 | 2021-05-04T16:24:08.000Z | 2021-12-16T16:47:23.000Z | #!/usr/bin/env python3
from __future__ import annotations
import math
import pathlib
from ipaddress import IPv6Address
from pprint import pprint
from datetime import datetime, timedelta
import numpy as np
import scipy.stats as stats
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
from analysis.parser.pyshark_pcap import main as parse
from analysis.graph.util import savefig
plt.rcParams['text.usetex'] = True
plt.rcParams['font.size'] = 12
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Messages sent and received')
parser.add_argument('--log-dir', type=pathlib.Path, default=["results"], nargs='+', help='The directory which contains the log output')
parser.add_argument("--tx-ymax", type=float, default=None, help="The tx ymax")
parser.add_argument("--rx-ymax", type=float, default=None, help="The rx ymax")
args = parser.parse_args()
for log_dir in args.log_dir:
print(f"Graphing for {log_dir}")
main(log_dir, args.tx_ymax, args.rx_ymax)
| 33.91716 | 181 | 0.594906 | #!/usr/bin/env python3
from __future__ import annotations
import math
import pathlib
from ipaddress import IPv6Address
from pprint import pprint
from datetime import datetime, timedelta
import numpy as np
import scipy.stats as stats
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
from analysis.parser.pyshark_pcap import main as parse
from analysis.graph.util import savefig
plt.rcParams['text.usetex'] = True
plt.rcParams['font.size'] = 12
def packet_length(packet) -> int:
# Count the length of the fragments, if this packet was fragmented
if '6lowpan' in packet and hasattr(packet['6lowpan'], "reassembled_length"):
return int(packet['6lowpan'].reassembled_length)
else:
return int(packet.length)
def main(log_dir: pathlib.Path, tx_ymax: Optional[float], rx_ymax: Optional[float]):
(log_dir / "graphs").mkdir(parents=True, exist_ok=True)
results = parse(log_dir, quiet=True)
XYs_tx = {
hostname: [
(name, [datetime.fromtimestamp(float(value.sniff_timestamp)) for value in values], [packet_length(value) for value in values])
for (name, values)
in result.tx.items()
]
for (hostname, result)
in results.items()
}
XYs_rx = {
hostname: [
(name, [datetime.fromtimestamp(float(value.sniff_timestamp)) for value in values], [packet_length(value) for value in values])
for (name, values)
in result.rx.items()
]
for (hostname, result)
in results.items()
}
to_graph = {
("tx", tx_ymax): XYs_tx,
("rx", rx_ymax): XYs_rx,
}
bin_width = timedelta(minutes=6)
min_time = min(r.min_snift_time for r in results.values())
max_time = min(r.max_snift_time for r in results.values())
min_time = datetime.fromtimestamp(min_time)
max_time = datetime.fromtimestamp(max_time)
bins = [min_time]
while bins[-1] + bin_width < max_time:
bins.append(bins[-1] + bin_width)
bins.append(max_time)
# Make the colors the same between rx and tx graphs
kinds1 = {name for nvs in XYs_tx.values() for (name, times, lengths) in nvs}
kinds2 = {name for nvs in XYs_rx.values() for (name, times, lengths) in nvs}
kinds = kinds1 | kinds2
ckind = {
kind: plt.cm.get_cmap('tab20')(i)
for i, kind in enumerate(sorted(kinds))
}
for ((name, ymax), XYs) in to_graph.items():
for (hostname, metric_values) in XYs.items():
fig = plt.figure()
ax = fig.gca()
labels, values, weights = zip(*sorted(metric_values, key=lambda x: x[0]))
colors = [ckind[label] for label in labels]
ax.hist(values, bins=bins, histtype='bar', stacked=True, label=labels, weights=weights, color=colors, rwidth=1)
ax.set_xlabel('Time')
ax.set_ylabel(f'Message Length (bytes) {"Sent" if name == "tx" else "Received"} During Window')
ax.set_ylim(0, ymax)
ax.xaxis.set_major_formatter(mdates.DateFormatter('%H:%M'))
ax.legend(ncol=3, loc="center", fontsize="small", bbox_to_anchor=(0.5,1.125))
savefig(fig, log_dir / "graphs" / f"{name}-by-type-{hostname}.pdf")
# Table of the percentage of bytes in each category
hostnames = sorted(set(XYs_tx.keys()) | set(XYs_rx.keys()))
for hostname in hostnames:
#print(hostname)
log_file = log_dir / "graphs" / f"{hostname}-messages.tex"
with open(log_file, "w") as f:
print("\\begin{table}[t]", file=f)
print("\\centering", file=f)
print("\\begin{tabular}{l S[table-format=6] S[table-format=3.1] S[table-format=6] S[table-format=3.1]}", file=f)
print(" \\toprule", file=f)
print(" ~ & \\multicolumn{2}{c}{Tx} & \\multicolumn{2}{c}{Rx} \\\\", file=f)
print(" Category & {(\\si{\\byte})} & {(\\%)} & {(\\si{\\byte})} & {(\\%)} \\\\", file=f)
print(" \\midrule", file=f)
XY_tx = XYs_tx.get(hostname, [])
XY_rx = XYs_rx.get(hostname, [])
XY_tx = {
name: sum(lengths)
for (name, dates, lengths) in XY_tx
}
total_tx = sum(XY_tx.values())
XY_rx = {
name: sum(lengths)
for (name, dates, lengths) in XY_rx
}
total_rx = sum(XY_rx.values())
names = sorted(set(XY_tx.keys()) | set(XY_rx.keys()))
for name in names:
print(f"{name} & {XY_tx.get(name, 0)} & {round(100*XY_tx.get(name, 0)/total_tx,1)} & {XY_rx.get(name, 0)} & {round(100*XY_rx.get(name, 0)/total_rx,1)} \\\\", file=f)
print("\\midrule", file=f)
print(f"Total & {total_tx} & 100 & {total_rx} & 100 \\\\", file=f)
print("\\bottomrule", file=f)
print("\\end{tabular}", file=f)
print(f"\\caption{{Message tx and rx for {hostname}}}", file=f)
#print("\\label{tab:ram-flash-usage}", file=f)
print("\\end{table}", file=f)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Messages sent and received')
parser.add_argument('--log-dir', type=pathlib.Path, default=["results"], nargs='+', help='The directory which contains the log output')
parser.add_argument("--tx-ymax", type=float, default=None, help="The tx ymax")
parser.add_argument("--rx-ymax", type=float, default=None, help="The rx ymax")
args = parser.parse_args()
for log_dir in args.log_dir:
print(f"Graphing for {log_dir}")
main(log_dir, args.tx_ymax, args.rx_ymax)
| 4,629 | 0 | 46 |
928224abb04ea9ced0d80a67994664dec958690d | 1,484 | py | Python | solutions/056.merge-intervals/merge-intervals.py | wangsongiam/leetcode | 96ff21bca1871816ae51fccb1fa13587b378dc50 | [
"MIT"
] | 3 | 2018-11-25T15:19:57.000Z | 2019-09-28T03:01:11.000Z | solutions/056.merge-intervals/merge-intervals.py | casprwang/leetcode | 96ff21bca1871816ae51fccb1fa13587b378dc50 | [
"MIT"
] | null | null | null | solutions/056.merge-intervals/merge-intervals.py | casprwang/leetcode | 96ff21bca1871816ae51fccb1fa13587b378dc50 | [
"MIT"
] | 3 | 2018-02-11T20:23:44.000Z | 2020-06-05T15:39:56.000Z | # Definition for an interval.
# class Interval:
# def __init__(self, s=0, e=0):
# self.start = s
# self.end = e
| 24.733333 | 52 | 0.421159 | # Definition for an interval.
# class Interval:
# def __init__(self, s=0, e=0):
# self.start = s
# self.end = e
class Solution:
def merge(self, intervals):
"""
:type intervals: List[Interval]
:rtype: List[Interval]
"""
intervals.sort(key=lambda x: x.start)
ret = []
if not intervals:
return ret
start = intervals[0].start
end = intervals[0].end
for i in range(len(intervals)):
if i == 0 and i != len(intervals) - 1:
continue
elif i == 0 and i == len(intervals) - 1:
ret += [[start, end]]
return ret
cur_start = intervals[i].start
cur_end = intervals[i].end
"""
1 .merge second start and reset end
1 3
start end
2 4
cur_start cur_end
2. push first and reset start & end
1 2
3 4
"""
if cur_start <= end:
end = max(end, cur_end)
if i == len(intervals) - 1:
ret += [[start, end]]
continue
elif cur_start >= end:
ret += [[start, end]]
start = cur_start
end = cur_end
if i == len(intervals) - 1:
ret += [[start, end]]
continue
return ret
| 0 | 1,329 | 23 |
1c45a859a5271dffa80a1d5cc1763cd482c9913a | 2,912 | py | Python | test/integration_tests/test_roles.py | poldracklab/bids-core | b87a1ef2d3e1c5a79a98c0f0ba82b1b2634bce0e | [
"MIT"
] | 1 | 2016-03-09T01:24:02.000Z | 2016-03-09T01:24:02.000Z | test/integration_tests/test_roles.py | poldracklab/bids-core | b87a1ef2d3e1c5a79a98c0f0ba82b1b2634bce0e | [
"MIT"
] | 15 | 2016-02-17T19:11:32.000Z | 2018-04-12T23:33:06.000Z | test/integration_tests/test_roles.py | poldracklab/bids-core | b87a1ef2d3e1c5a79a98c0f0ba82b1b2634bce0e | [
"MIT"
] | 4 | 2017-04-05T17:34:59.000Z | 2018-01-22T01:40:51.000Z | import requests
import os
import json
import time
from nose.tools import with_setup
base_url = 'http://localhost:8080/api'
adm_user = 'test@user.com'
user = 'other@user.com'
test_data = type('',(object,),{})()
@with_setup(setup_db, teardown_db)
| 25.321739 | 71 | 0.595467 | import requests
import os
import json
import time
from nose.tools import with_setup
base_url = 'http://localhost:8080/api'
adm_user = 'test@user.com'
user = 'other@user.com'
test_data = type('',(object,),{})()
def setup_db():
global session
session = requests.Session()
# all the requests will be performed as root
session.params = {
'user': adm_user,
'root': True
}
# Create a group
test_data.group_id = 'test_group_' + str(int(time.time()*1000))
payload = {
'_id': test_data.group_id
}
payload = json.dumps(payload)
r = session.post(base_url + '/groups', data=payload)
assert r.ok
payload = {
'_id': user,
'firstname': 'Other',
'lastname': 'User',
}
payload = json.dumps(payload)
r = session.post(base_url + '/users', data=payload)
assert r.ok
session.params = {}
def teardown_db():
session.params = {
'user': adm_user,
'root': True
}
r = session.delete(base_url + '/groups/' + test_data.group_id)
assert r.ok
r = session.delete(base_url + '/users/' + user)
assert r.ok
def _build_url_and_payload(method, user, access, site='local'):
url = os.path.join(base_url, 'groups', test_data.group_id, 'roles')
if method == 'POST':
payload = {
'_id': user,
'site': site,
'access': access
}
return url, json.dumps(payload)
else:
return os.path.join(url, site, user), None
@with_setup(setup_db, teardown_db)
def test_roles():
session.params = {
'user': adm_user
}
url_get, _ = _build_url_and_payload('GET', user, None)
r = session.get(url_get)
assert r.status_code == 404
url_post, payload = _build_url_and_payload('POST', user, 'rw')
r = session.post(url_post, data=payload)
assert r.ok
r = session.get(url_get)
assert r.ok
content = json.loads(r.content)
assert content['access'] == 'rw'
assert content['_id'] == user
session.params = {
'user': user
}
url_get_not_auth, _ = _build_url_and_payload('GET', adm_user, None)
r = session.get(url_get_not_auth)
assert r.status_code == 403
session.params = {
'user': adm_user
}
payload = json.dumps({'access':'admin'})
r = session.put(url_get, data=payload)
assert r.ok
session.params = {
'user': user
}
r = session.get(url_get_not_auth)
assert r.ok
session.params = {
'user': adm_user
}
payload = json.dumps({'access':'rw'})
r = session.put(url_get, data=payload)
assert r.ok
session.params = {
'user': user
}
r = session.get(url_get_not_auth)
assert r.status_code == 403
session.params = {
'user': adm_user
}
r = session.delete(url_get)
assert r.ok
r = session.get(url_get)
assert r.status_code == 404
| 2,573 | 0 | 91 |
7b0d0d6d1fd9941ba19251b6de0c2755dddee22a | 1,458 | py | Python | src/cs165/passport_holder/views.py | kenserr/cs165 | f6f818ae1c05fe492817da8dc460917a0b3020d2 | [
"bzip2-1.0.6"
] | null | null | null | src/cs165/passport_holder/views.py | kenserr/cs165 | f6f818ae1c05fe492817da8dc460917a0b3020d2 | [
"bzip2-1.0.6"
] | null | null | null | src/cs165/passport_holder/views.py | kenserr/cs165 | f6f818ae1c05fe492817da8dc460917a0b3020d2 | [
"bzip2-1.0.6"
] | null | null | null | from django.shortcuts import render, get_object_or_404, redirect
from .models import passport_holder
from .forms import passport_holder_form
# Create your views here. | 26.509091 | 66 | 0.768176 | from django.shortcuts import render, get_object_or_404, redirect
from .models import passport_holder
from .forms import passport_holder_form
# Create your views here.
def passport_holder_detail_view(request, id):
obj = passport_holder.objects.get(passport_no=id)
context = {
"object": obj
}
return render(request, "passport_holder/detail.html", context)
def passport_holder_create_view(request):
form = passport_holder_form(request.POST or None)
if form.is_valid():
form.save()
return redirect(passport_holder_list_view)
context = {
'form': form
}
return render(request,"passport_holder/create.html", context)
def passport_holder_list_view(request):
queryset = passport_holder.objects.all()
context = {
"object_list" : queryset
}
return render(request, "passport_holder/list.html", context)
def passport_holder_update_view(request, id):
obj = get_object_or_404(passport_holder, passport_no=id)
form = passport_holder_form(request.POST or None, instance = obj)
if form.is_valid():
form.save()
return redirect(passport_holder_list_view)
context = {
"form": form
}
return render(request, "passport_holder/update.html", context)
def passport_holder_delete_view(request, id):
obj = get_object_or_404(passport_holder, passport_no=id)
if request.method == 'POST':
obj.delete()
return redirect(passport_holder_list_view)
context = {
"obj" : obj
}
return render(request, "passport_holder/delete.html", context) | 1,178 | 0 | 114 |
4048a5206d42424533da25b4a6ffb0c2d6c8b696 | 2,401 | py | Python | global_finprint/annotation/migrations/0019_drop_obs_columns.py | GlobalFinPrint/global_finprint | 8a91ceaaed42aaa716d8c9f27518ba673ebf351c | [
"Apache-2.0"
] | null | null | null | global_finprint/annotation/migrations/0019_drop_obs_columns.py | GlobalFinPrint/global_finprint | 8a91ceaaed42aaa716d8c9f27518ba673ebf351c | [
"Apache-2.0"
] | 6 | 2020-06-05T18:42:32.000Z | 2022-01-13T00:48:57.000Z | global_finprint/annotation/migrations/0019_drop_obs_columns.py | GlobalFinPrint/global_finprint | 8a91ceaaed42aaa716d8c9f27518ba673ebf351c | [
"Apache-2.0"
] | null | null | null | from __future__ import unicode_literals
import config.current_user
from django.conf import settings
import django.contrib.gis.db.models.fields
from django.db import migrations, models
import django.db.models.deletion
import django.db.models.manager
| 28.583333 | 122 | 0.557684 | from __future__ import unicode_literals
import config.current_user
from django.conf import settings
import django.contrib.gis.db.models.fields
from django.db import migrations, models
import django.db.models.deletion
import django.db.models.manager
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('annotation', '0018_migrate_obs_to_event'),
]
operations = [
migrations.RemoveField(
model_name='observationimage',
name='observation',
),
migrations.RemoveField(
model_name='observationimage',
name='user',
),
migrations.RemoveField(
model_name='observationimage',
name='video',
),
migrations.RemoveField(
model_name='siteimage',
name='user',
),
migrations.RemoveField(
model_name='siteimage',
name='video',
),
migrations.RemoveField(
model_name='animalobservation',
name='behaviors',
),
migrations.RemoveField(
model_name='animalobservation',
name='features',
),
migrations.RemoveField(
model_name='observation',
name='extent',
),
migrations.RemoveField(
model_name='observation',
name='initial_observation_time',
),
migrations.AlterField(
model_name='animalobservation',
name='sex',
field=models.CharField(choices=[('F', 'Female'), ('M', 'Male'), ('U', 'Unknown')], default='U', max_length=1),
),
migrations.AlterField(
model_name='observation',
name='comment',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='observation',
name='type',
field=models.CharField(choices=[('I', 'Of interest'), ('A', 'Animal')], default='I', max_length=1),
),
migrations.DeleteModel(
name='AnimalBehavior',
),
migrations.DeleteModel(
name='ObservationFeature',
),
migrations.DeleteModel(
name='ObservationImage',
),
migrations.DeleteModel(
name='SiteImage',
),
]
| 0 | 2,126 | 23 |
6b50a35bca39b52edd97dc306a00624664338d72 | 645 | py | Python | launcher_pins.py | videoman/T-ShirtCannon | b88553a6ac0aeac2a2c61a92aed17da093bf72b7 | [
"BSD-3-Clause"
] | null | null | null | launcher_pins.py | videoman/T-ShirtCannon | b88553a6ac0aeac2a2c61a92aed17da093bf72b7 | [
"BSD-3-Clause"
] | null | null | null | launcher_pins.py | videoman/T-ShirtCannon | b88553a6ac0aeac2a2c61a92aed17da093bf72b7 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/python
# This file defines the output and input pins for the T-Shirt Launcher
# Define our relay outputs
# Each output is triggered via a ULN2803 Darlington Driver
TSHIRT1=4
TSHIRT2=17
TSHIRT3=22
TSHIRT4=18
# Define our LED outputs
# LED status for each barrel
LED1=2
LED2=14
LED3=3
LED4=15
# Either for errors or for the championship games- confetti
LED5=24
# Define our input buttons here
# BUTTON1 is for Selecting
BUTTON1=27
# BUTTON2 is for firing a t-shirt
BUTTON2=23
# This is how long we hold open the solenoid
# This build now uses Toro TPV100 Series Sprinkler Valves (175PSI with 1000PSI burst)
valve_sleep_time=.16
| 20.15625 | 85 | 0.773643 | #!/usr/bin/python
# This file defines the output and input pins for the T-Shirt Launcher
# Define our relay outputs
# Each output is triggered via a ULN2803 Darlington Driver
TSHIRT1=4
TSHIRT2=17
TSHIRT3=22
TSHIRT4=18
# Define our LED outputs
# LED status for each barrel
LED1=2
LED2=14
LED3=3
LED4=15
# Either for errors or for the championship games- confetti
LED5=24
# Define our input buttons here
# BUTTON1 is for Selecting
BUTTON1=27
# BUTTON2 is for firing a t-shirt
BUTTON2=23
# This is how long we hold open the solenoid
# This build now uses Toro TPV100 Series Sprinkler Valves (175PSI with 1000PSI burst)
valve_sleep_time=.16
| 0 | 0 | 0 |
53c94e390b69858f0922b4ee1a0ba97d14588749 | 4,362 | py | Python | aws_instance/backend.py | geekysuavo/aws-instance-tool | 811c8a1a476916b7dad5f5287f83f7fd56280029 | [
"MIT"
] | null | null | null | aws_instance/backend.py | geekysuavo/aws-instance-tool | 811c8a1a476916b7dad5f5287f83f7fd56280029 | [
"MIT"
] | null | null | null | aws_instance/backend.py | geekysuavo/aws-instance-tool | 811c8a1a476916b7dad5f5287f83f7fd56280029 | [
"MIT"
] | null | null | null |
import os
import yaml
import functools
import subprocess
import boto3.ec2
from botocore.exceptions import ClientError
from typing import Dict, List, Tuple
from dataclasses import dataclass
# use a global EC2 handle.
ec2 = boto3.client("ec2")
def run(command) -> Dict:
"""Execute a command after an initial dry-run"""
try:
command(DryRun=True)
except ClientError as err:
if "DryRunOperation" not in str(err):
raise
return command(DryRun=False)
def start(ids: List[str]):
"""Start one or more instances"""
return functools.partial(ec2.start_instances, InstanceIds=ids)
def stop(ids: List[str]):
"""Stop one or more instances"""
return functools.partial(ec2.stop_instances, InstanceIds=ids)
def describe(ids: List[str]):
"""Describe one or more instances"""
return functools.partial(ec2.describe_instances, InstanceIds=ids)
@dataclass(frozen=True)
@dataclass(frozen=True)
| 27.433962 | 72 | 0.582072 |
import os
import yaml
import functools
import subprocess
import boto3.ec2
from botocore.exceptions import ClientError
from typing import Dict, List, Tuple
from dataclasses import dataclass
# use a global EC2 handle.
ec2 = boto3.client("ec2")
def run(command) -> Dict:
"""Execute a command after an initial dry-run"""
try:
command(DryRun=True)
except ClientError as err:
if "DryRunOperation" not in str(err):
raise
return command(DryRun=False)
def start(ids: List[str]):
"""Start one or more instances"""
return functools.partial(ec2.start_instances, InstanceIds=ids)
def stop(ids: List[str]):
"""Stop one or more instances"""
return functools.partial(ec2.stop_instances, InstanceIds=ids)
def describe(ids: List[str]):
"""Describe one or more instances"""
return functools.partial(ec2.describe_instances, InstanceIds=ids)
@dataclass(frozen=True)
class Instance:
name: str
inst_id: str
@property
def address(self) -> str:
"""IP Address"""
response = run(describe(ids=[self.inst_id]))
for res in response["Reservations"]:
inst_info = res["Instances"][0]
if inst_info["InstanceId"] == self.inst_id:
return inst_info["PublicIpAddress"]
def start(self) -> Tuple[str, str]:
"""Start the instance"""
response = run(start(ids=[self.inst_id]))
prev = response["StartingInstances"][0]["PreviousState"]["Name"]
curr = response["StartingInstances"][0]["CurrentState"]["Name"]
return (prev, curr)
def stop(self) -> Tuple[str, str]:
"""Stop the instance"""
response = run(stop(ids=[self.inst_id]))
prev = response["StoppingInstances"][0]["PreviousState"]["Name"]
curr = response["StoppingInstances"][0]["CurrentState"]["Name"]
return (prev, curr)
@dataclass(frozen=True)
class Config:
ident: str
username: str
instances: Dict[str, str]
default_port: int = 9999
def __len__(self) -> int:
"""Number of instances"""
return len(self.instances)
def __iter__(self):
"""Iterate over the instances"""
return iter(self.instances.items())
def __contains__(self, name: str) -> bool:
"""Check if an instance name is supported"""
return name in self.instances
def __getitem__(self, name: str) -> Instance:
"""Get an instance"""
return Instance(name, self.instances[name])
def start(self, name: str) -> Tuple[str, str]:
"""Start an instance"""
return self[name].start()
def stop(self, name: str) -> Tuple[str, str]:
"""Stop an instance"""
return self[name].stop()
def ssh(self, name: str):
"""Start a shell on an instance"""
ip = self[name].address
subprocess.run([
"ssh", "-i", self.ident,
f"{self.username}@{ip}",
])
def tunnel(self, name: str, port: int):
"""Connect to a port on the instance"""
ip = self[name].address
subprocess.run([
"ssh", "-i", self.ident, "-N", "-L",
f"{port}:localhost:{port}",
f"{self.username}@{ip}",
])
@property
def names(self) -> Tuple[str, ...]:
"""Supported instance names"""
return tuple(self.instances.keys())
@property
def instance_ids(self) -> Tuple[str, ...]:
"""Suported instance ids"""
return tuple(self.instances.values())
@property
def states(self) -> Tuple[str, ...]:
"""Instance statuses"""
response = run(describe(ids=list(self.instance_ids)))
states = []
for name, inst_id in self:
for res in response["Reservations"]:
inst_info = res["Instances"][0]
if inst_info["InstanceId"] == inst_id:
state = inst_info["State"]["Name"]
states.append(state)
return tuple(states)
@classmethod
def load(cls):
"""Instantiate a Config from its yaml source"""
filename = os.path.join(
os.path.expanduser("~"),
".config",
"aws-instance.yaml",
)
with open(filename, "rt", encoding="utf-8") as fh:
config = yaml.safe_load(fh)
return cls(**config)
| 0 | 3,364 | 44 |
8c004459e0c60e668996195da9668c54fa852bd0 | 138 | py | Python | scripts/npc/autogen_naomi.py | hsienjan/SideQuest-Server | 3e88debaf45615b759d999255908f99a15283695 | [
"MIT"
] | null | null | null | scripts/npc/autogen_naomi.py | hsienjan/SideQuest-Server | 3e88debaf45615b759d999255908f99a15283695 | [
"MIT"
] | null | null | null | scripts/npc/autogen_naomi.py | hsienjan/SideQuest-Server | 3e88debaf45615b759d999255908f99a15283695 | [
"MIT"
] | null | null | null | # Character field ID when accessed: 600020000
# ObjectID: 1000000
# ParentID: 9201051
# Object Position X: -937
# Object Position Y: 2658
| 23 | 45 | 0.753623 | # Character field ID when accessed: 600020000
# ObjectID: 1000000
# ParentID: 9201051
# Object Position X: -937
# Object Position Y: 2658
| 0 | 0 | 0 |
3f91c07a9469a5f52847fe24a2e19954225a1ab5 | 353 | py | Python | interview-test/src/commands/list_command.py | warstick/phyton | d8b0ee971808af5a0f519ec123940569040846f3 | [
"MIT"
] | null | null | null | interview-test/src/commands/list_command.py | warstick/phyton | d8b0ee971808af5a0f519ec123940569040846f3 | [
"MIT"
] | null | null | null | interview-test/src/commands/list_command.py | warstick/phyton | d8b0ee971808af5a0f519ec123940569040846f3 | [
"MIT"
] | null | null | null | """
IDE: Visual Code
Author: Mani
Date: 01-05-2020
"""
from module.module import Module
# This method returns the information about the installed modules / packages
| 23.533333 | 80 | 0.623229 | """
IDE: Visual Code
Author: Mani
Date: 01-05-2020
"""
from module.module import Module
class ListCommand:
# This method returns the information about the installed modules / packages
def execute(self, args):
result = dict()
for m in Module.getInstalled():
result[m.getName()] = []
return result
| 126 | -3 | 49 |
52bdc212ef837796a1b53425e694e3c862058e72 | 600 | py | Python | questions/14.py | xiaochus/LeetCode | bf4d7a39fd6b0fb2682490f90999cb218a910e14 | [
"MIT"
] | 1 | 2018-06-18T04:40:23.000Z | 2018-06-18T04:40:23.000Z | questions/14.py | xiaochus/LeetCode | bf4d7a39fd6b0fb2682490f90999cb218a910e14 | [
"MIT"
] | null | null | null | questions/14.py | xiaochus/LeetCode | bf4d7a39fd6b0fb2682490f90999cb218a910e14 | [
"MIT"
] | 1 | 2020-02-03T12:58:26.000Z | 2020-02-03T12:58:26.000Z | """14. Longest Common Prefix
Write a function to find the longest common prefix string amongst
an array of strings.
"""
| 22.222222 | 65 | 0.506667 | """14. Longest Common Prefix
Write a function to find the longest common prefix string amongst
an array of strings.
"""
class Solution:
def longestCommonPrefix(self, strs):
"""
:type strs: List[str]
:rtype: str
"""
if not strs:
return ""
if len(strs) == 1:
return strs[0]
prefix = strs[0]
for i in range(1, len(strs)):
while not strs[i].startswith(prefix):
prefix = prefix[: len(prefix) - 1]
if not prefix:
return ""
return prefix
| 0 | 456 | 23 |
4cfa9bf59c00d3403e437d983b24edcae02cc734 | 466 | py | Python | Module 1/task1_2.py | bondss/python_scripts | e1fd96b15c22811e43f665ec354563f35522d2ad | [
"Apache-2.0"
] | null | null | null | Module 1/task1_2.py | bondss/python_scripts | e1fd96b15c22811e43f665ec354563f35522d2ad | [
"Apache-2.0"
] | null | null | null | Module 1/task1_2.py | bondss/python_scripts | e1fd96b15c22811e43f665ec354563f35522d2ad | [
"Apache-2.0"
] | null | null | null | # TASK:
# Вхідні дані: 3 дійсних числа -- аргументи командного рядка.
# Вихідні дані: результат обчислення формули
# SOLUTION:
# Importing modules to work with embedded functions
import sys
import math
# Assigning variables values of three cmd-line arguments
x = float(sys.argv[1])
mu = float(sys.argv[2])
sigma = float(sys.argv[3])
# Calculate given formula
result = (1/(sigma*math.sqrt(2*math.pi)))*math.exp(-((math.pow((x-mu), 2)/2*sigma**2)))
print(result)
| 24.526316 | 87 | 0.72103 | # TASK:
# Вхідні дані: 3 дійсних числа -- аргументи командного рядка.
# Вихідні дані: результат обчислення формули
# SOLUTION:
# Importing modules to work with embedded functions
import sys
import math
# Assigning variables values of three cmd-line arguments
x = float(sys.argv[1])
mu = float(sys.argv[2])
sigma = float(sys.argv[3])
# Calculate given formula
result = (1/(sigma*math.sqrt(2*math.pi)))*math.exp(-((math.pow((x-mu), 2)/2*sigma**2)))
print(result)
| 0 | 0 | 0 |
13a30cd02d37af150e969fe94bd9fdd6a3e486b5 | 3,562 | py | Python | GAScore/testbench/handler_wrapper.py | sharm294/shoal | db7dd08a70882585fb9740a39b57b4b7a48b3081 | [
"MIT"
] | 1 | 2021-04-12T06:41:33.000Z | 2021-04-12T06:41:33.000Z | GAScore/testbench/handler_wrapper.py | UofT-HPRC/shoal | db7dd08a70882585fb9740a39b57b4b7a48b3081 | [
"MIT"
] | null | null | null | GAScore/testbench/handler_wrapper.py | UofT-HPRC/shoal | db7dd08a70882585fb9740a39b57b4b7a48b3081 | [
"MIT"
] | null | null | null | import os
from sonar.testbench import Testbench, Module, TestVector, Thread
from sonar.interfaces import AXIS, SAXILite
from sonar_strToInt import strToInt
handler_wrapper = Testbench.default('handler_wrapper')
filepath = os.path.join(os.path.dirname(__file__), 'build/handler_wrapper/')
dut = Module.default("DUT")
dut.add_clock_port('clock', '20ns')
dut.add_reset_port('reset_n')
dut.add_port('address_offset', 'input', 16)
# dut.add_port('interrupt_0', 'output')
# dut.add_port('interrupt_1', 'output')
axis_handler = AXIS('axis_handler', 'slave', 'clock', c_struct='axis_word', c_stream='uaxis_n')
axis_handler.port.init_channels('default', 64, False)
dut.add_interface(axis_handler)
ctrl_bus_0 = SAXILite('s_axi_ctrl_bus_00', 'clock', 'reset_n')
ctrl_bus_0.add_register('config', 0x10)
ctrl_bus_0.add_register('arg', 0x18)
ctrl_bus_0.add_register('counter', 0x20)
ctrl_bus_0.add_register('barrier', 0x28)
ctrl_bus_0.add_register('memory', 0x30)
ctrl_bus_0.set_address('4K', 0) # address range is 4K at an offset of 0
ctrl_bus_0.port.init_channels(mode='default', dataWidth=32, addrWidth=6)
dut.add_interface(ctrl_bus_0)
ctrl_bus_1 = SAXILite('s_axi_ctrl_bus_01', 'clock', 'reset_n')
ctrl_bus_1.add_register('config', 0x10)
ctrl_bus_1.add_register('arg', 0x18)
ctrl_bus_1.add_register('counter', 0x20)
ctrl_bus_1.add_register('barrier', 0x28)
ctrl_bus_1.add_register('memory', 0x30)
ctrl_bus_1.set_address('4K', 0) # address range is 4K at an offset of 0
ctrl_bus_1.port.init_channels(mode='default', dataWidth=32, addrWidth=6)
dut.add_interface(ctrl_bus_1)
handler_wrapper.add_module(dut)
################################################################################
# Test Vectors
################################################################################
# Initialization thread (added to each test vector to reset everything)
initT = Thread()
initT.init_signals()
initT.wait_negedge('clock')
initT.add_delay('40ns')
initT.set_signal('reset_n', 1)
initT.set_signal('address_offset', 0)
#-------------------------------------------------------------------------------
#
#-------------------------------------------------------------------------------
short_message_A = TestVector()
short_message_A.add_thread(initT)
smA_t1 = Thread()
smA_t1.add_delay('100ns')
smA_t1.init_timer()
# ctrl_bus_0.write(smA_t1, 'counter_threshold', 1)
# ctrl_bus_0.write(smA_t1, 'config', 2)
# ctrl_bus_1.write(smA_t1, 'counter_threshold', 4)
# ctrl_bus_1.write(smA_t1, 'config', 4)
axis_handler.write(smA_t1, strToInt("{AMHeader,0xAA,0x1,0xC,2,0x5,1}"))
axis_handler.write(smA_t1, 5, tlast=1)
# axis_handler.write(smA_t1, strToInt("{AMHeader,0xAA,0x0,0xC,1,0x5,0}"), tlast=1)
smA_t1.set_flag(0)
short_message_A.add_thread(smA_t1)
smA_t2 = short_message_A.add_thread()
smA_t2.wait_flag(0)
smA_t2.add_delay('800ns')
ctrl_bus_1.read(smA_t2, "counter", 5)
# ctrl_bus_0.read(smA_t2, "memory", 1)
smA_t2.print_elapsed_time("short_message_A")
smA_t2.end_vector()
short_message_B = TestVector()
short_message_B.add_thread(initT)
smB_t1 = Thread()
smB_t1.add_delay('100ns')
smB_t1.init_timer()
axis_handler.write(smB_t1, strToInt("{AMHeader,0xAA,0x1,0xC,3,0x5,0}"), tlast=1)
smB_t1.set_flag(0)
short_message_B.add_thread(smB_t1)
smB_t2 = short_message_B.add_thread()
smB_t2.wait_flag(0)
smB_t2.add_delay('800ns')
ctrl_bus_1.read(smB_t2, "barrier", 1)
smB_t2.print_elapsed_time("short_message_B")
smB_t2.end_vector()
handler_wrapper.add_test_vector(short_message_A)
handler_wrapper.add_test_vector(short_message_B)
handler_wrapper.generateTB(filepath, 'sv')
| 33.92381 | 95 | 0.709994 | import os
from sonar.testbench import Testbench, Module, TestVector, Thread
from sonar.interfaces import AXIS, SAXILite
from sonar_strToInt import strToInt
handler_wrapper = Testbench.default('handler_wrapper')
filepath = os.path.join(os.path.dirname(__file__), 'build/handler_wrapper/')
dut = Module.default("DUT")
dut.add_clock_port('clock', '20ns')
dut.add_reset_port('reset_n')
dut.add_port('address_offset', 'input', 16)
# dut.add_port('interrupt_0', 'output')
# dut.add_port('interrupt_1', 'output')
axis_handler = AXIS('axis_handler', 'slave', 'clock', c_struct='axis_word', c_stream='uaxis_n')
axis_handler.port.init_channels('default', 64, False)
dut.add_interface(axis_handler)
ctrl_bus_0 = SAXILite('s_axi_ctrl_bus_00', 'clock', 'reset_n')
ctrl_bus_0.add_register('config', 0x10)
ctrl_bus_0.add_register('arg', 0x18)
ctrl_bus_0.add_register('counter', 0x20)
ctrl_bus_0.add_register('barrier', 0x28)
ctrl_bus_0.add_register('memory', 0x30)
ctrl_bus_0.set_address('4K', 0) # address range is 4K at an offset of 0
ctrl_bus_0.port.init_channels(mode='default', dataWidth=32, addrWidth=6)
dut.add_interface(ctrl_bus_0)
ctrl_bus_1 = SAXILite('s_axi_ctrl_bus_01', 'clock', 'reset_n')
ctrl_bus_1.add_register('config', 0x10)
ctrl_bus_1.add_register('arg', 0x18)
ctrl_bus_1.add_register('counter', 0x20)
ctrl_bus_1.add_register('barrier', 0x28)
ctrl_bus_1.add_register('memory', 0x30)
ctrl_bus_1.set_address('4K', 0) # address range is 4K at an offset of 0
ctrl_bus_1.port.init_channels(mode='default', dataWidth=32, addrWidth=6)
dut.add_interface(ctrl_bus_1)
handler_wrapper.add_module(dut)
################################################################################
# Test Vectors
################################################################################
# Initialization thread (added to each test vector to reset everything)
initT = Thread()
initT.init_signals()
initT.wait_negedge('clock')
initT.add_delay('40ns')
initT.set_signal('reset_n', 1)
initT.set_signal('address_offset', 0)
#-------------------------------------------------------------------------------
#
#-------------------------------------------------------------------------------
short_message_A = TestVector()
short_message_A.add_thread(initT)
smA_t1 = Thread()
smA_t1.add_delay('100ns')
smA_t1.init_timer()
# ctrl_bus_0.write(smA_t1, 'counter_threshold', 1)
# ctrl_bus_0.write(smA_t1, 'config', 2)
# ctrl_bus_1.write(smA_t1, 'counter_threshold', 4)
# ctrl_bus_1.write(smA_t1, 'config', 4)
axis_handler.write(smA_t1, strToInt("{AMHeader,0xAA,0x1,0xC,2,0x5,1}"))
axis_handler.write(smA_t1, 5, tlast=1)
# axis_handler.write(smA_t1, strToInt("{AMHeader,0xAA,0x0,0xC,1,0x5,0}"), tlast=1)
smA_t1.set_flag(0)
short_message_A.add_thread(smA_t1)
smA_t2 = short_message_A.add_thread()
smA_t2.wait_flag(0)
smA_t2.add_delay('800ns')
ctrl_bus_1.read(smA_t2, "counter", 5)
# ctrl_bus_0.read(smA_t2, "memory", 1)
smA_t2.print_elapsed_time("short_message_A")
smA_t2.end_vector()
short_message_B = TestVector()
short_message_B.add_thread(initT)
smB_t1 = Thread()
smB_t1.add_delay('100ns')
smB_t1.init_timer()
axis_handler.write(smB_t1, strToInt("{AMHeader,0xAA,0x1,0xC,3,0x5,0}"), tlast=1)
smB_t1.set_flag(0)
short_message_B.add_thread(smB_t1)
smB_t2 = short_message_B.add_thread()
smB_t2.wait_flag(0)
smB_t2.add_delay('800ns')
ctrl_bus_1.read(smB_t2, "barrier", 1)
smB_t2.print_elapsed_time("short_message_B")
smB_t2.end_vector()
handler_wrapper.add_test_vector(short_message_A)
handler_wrapper.add_test_vector(short_message_B)
handler_wrapper.generateTB(filepath, 'sv')
| 0 | 0 | 0 |
b9c1745c8dd2080b3f7d77015b4539d8932c318d | 4,139 | py | Python | maxwell/master.py | maxwell-dev/maxwell-client-python | 9d68ae57974c29d7454f4e95ff3c103e45ac48c7 | [
"Apache-2.0"
] | 1 | 2020-11-18T14:39:19.000Z | 2020-11-18T14:39:19.000Z | maxwell/master.py | maxwell-dev/maxwell-client-python | 9d68ae57974c29d7454f4e95ff3c103e45ac48c7 | [
"Apache-2.0"
] | null | null | null | maxwell/master.py | maxwell-dev/maxwell-client-python | 9d68ae57974c29d7454f4e95ff3c103e45ac48c7 | [
"Apache-2.0"
] | 1 | 2021-07-15T01:59:19.000Z | 2021-07-15T01:59:19.000Z | import asyncio
import pycommons.logger
import maxwell.protocol.maxwell_protocol_pb2 as protocol_types
from maxwell.connection import Code
from maxwell.connection import Event
logger = pycommons.logger.get_instance(__name__)
# ===========================================
# apis
# ===========================================
# ===========================================
# connector
# ===========================================
# ===========================================
# internal coroutines
# ===========================================
# ===========================================
# req builders
# ===========================================
# ===========================================
# urls
# ===========================================
| 34.206612 | 70 | 0.597487 | import asyncio
import pycommons.logger
import maxwell.protocol.maxwell_protocol_pb2 as protocol_types
from maxwell.connection import Code
from maxwell.connection import Event
logger = pycommons.logger.get_instance(__name__)
class Master(object):
# ===========================================
# apis
# ===========================================
def __init__(self, endpoints, connection_mgr, options, loop):
self.__endpoints = endpoints
self.__connection_mgr = connection_mgr
self.__options = options
self.__loop = loop
self.__endpoint_index = -1
self.__frontend_endpoint = None
self.__backend_endpoints = {}
self.__open_event = asyncio.Event(loop=self.__loop)
self.__connection = None
self.__connect_to_master()
def close(self):
self.__disconnect_from_master()
async def resolve_frontend(self, cache=True):
if cache:
if self.__frontend_endpoint != None:
return self.__frontend_endpoint
return await self.__resolve_frontend()
async def resolve_backend(self, topic, cache=True):
if cache:
endpoint = self.__backend_endpoints.get(topic)
if endpoint != None:
return endpoint
return await self.__resolve_backend(topic)
# ===========================================
# connector
# ===========================================
def __connect_to_master(self):
self.__connection = self.__connection_mgr.fetch(
self.__next_endpoint()
)
self.__connection.add_listener(
Event.ON_CONNECTED,
self.__on_connect_to_master_done
)
self.__connection.add_listener(
(Event.ON_ERROR, Code.FAILED_TO_CONNECT),
self.__on_connect_to_master_failed
)
def __disconnect_from_master(self):
self.__connection.delete_listener(
Event.ON_CONNECTED,
self.__on_connect_to_master_done
)
self.__connection.delete_listener(
(Event.ON_ERROR, Code.FAILED_TO_CONNECT),
self.__on_connect_to_master_failed
)
self.__connection_mgr.release(self.__connection)
self.__connection = None
def __on_connect_to_master_done(self):
self.__open_event.set()
def __on_connect_to_master_failed(self, _code):
self.__open_event.clear()
self.__disconnect_from_master()
self.__loop.call_later(1, self.__connect_to_master)
# ===========================================
# internal coroutines
# ===========================================
async def __resolve_frontend(self):
resolve_frontend_rep = await self.__request(
self.__build_resolve_frontend_req()
)
self.__frontend_endpoint = resolve_frontend_rep.endpoint
return self.__frontend_endpoint
async def __resolve_backend(self, topic):
resolve_backend_rep = await self.__request(
self.__build_resolve_backend_req(topic)
)
self.__backend_endpoints[topic] = resolve_backend_rep.endpoint
return resolve_backend_rep.endpoint
async def __request(self, action):
await self.__open_event.wait()
return await self.__connection.request(action)
# ===========================================
# req builders
# ===========================================
def __build_resolve_frontend_req(self):
resolve_frontend_req = protocol_types.resolve_frontend_req_t()
return resolve_frontend_req
def __build_resolve_backend_req(self, topic):
resolve_backend_req = protocol_types.resolve_backend_req_t()
resolve_backend_req.topic = topic
return resolve_backend_req
# ===========================================
# urls
# ===========================================
def __next_endpoint(self):
self.__endpoint_index += 1
if self.__endpoint_index >= len(self.__endpoints):
self.__endpoint_index = 0
return self.__endpoints[self.__endpoint_index]
| 2,929 | 0 | 396 |
6ebae8b3507ad4205d902002a6f8493e3b1c9856 | 9,497 | py | Python | psychoblend/ui.py | johannesvollmer/psychopath | 21cd04ce95deea631ef2c7ce50e200215668a3c0 | [
"MIT"
] | null | null | null | psychoblend/ui.py | johannesvollmer/psychopath | 21cd04ce95deea631ef2c7ce50e200215668a3c0 | [
"MIT"
] | null | null | null | psychoblend/ui.py | johannesvollmer/psychopath | 21cd04ce95deea631ef2c7ce50e200215668a3c0 | [
"MIT"
] | null | null | null | import bpy
# Use some of the existing buttons.
from bl_ui import properties_render
properties_render.RENDER_PT_render.COMPAT_ENGINES.add('PSYCHOPATH_RENDER')
properties_render.RENDER_PT_dimensions.COMPAT_ENGINES.add('PSYCHOPATH_RENDER')
properties_render.RENDER_PT_output.COMPAT_ENGINES.add('PSYCHOPATH_RENDER')
del properties_render
from bl_ui import properties_data_camera
properties_data_camera.DATA_PT_lens.COMPAT_ENGINES.add('PSYCHOPATH_RENDER')
properties_data_camera.DATA_PT_camera.COMPAT_ENGINES.add('PSYCHOPATH_RENDER')
properties_data_camera.DATA_PT_camera_display.COMPAT_ENGINES.add('PSYCHOPATH_RENDER')
properties_data_camera.DATA_PT_custom_props_camera.COMPAT_ENGINES.add('PSYCHOPATH_RENDER')
del properties_data_camera
| 32.635739 | 116 | 0.679688 | import bpy
# Use some of the existing buttons.
from bl_ui import properties_render
properties_render.RENDER_PT_render.COMPAT_ENGINES.add('PSYCHOPATH_RENDER')
properties_render.RENDER_PT_dimensions.COMPAT_ENGINES.add('PSYCHOPATH_RENDER')
properties_render.RENDER_PT_output.COMPAT_ENGINES.add('PSYCHOPATH_RENDER')
del properties_render
from bl_ui import properties_data_camera
properties_data_camera.DATA_PT_lens.COMPAT_ENGINES.add('PSYCHOPATH_RENDER')
properties_data_camera.DATA_PT_camera.COMPAT_ENGINES.add('PSYCHOPATH_RENDER')
properties_data_camera.DATA_PT_camera_display.COMPAT_ENGINES.add('PSYCHOPATH_RENDER')
properties_data_camera.DATA_PT_custom_props_camera.COMPAT_ENGINES.add('PSYCHOPATH_RENDER')
del properties_data_camera
class PsychopathPanel():
COMPAT_ENGINES = {'PSYCHOPATH_RENDER'}
@classmethod
def poll(cls, context):
rd = context.scene.render
return (rd.use_game_engine is False) and (rd.engine in cls.COMPAT_ENGINES)
class RENDER_PT_psychopath_render_settings(PsychopathPanel, bpy.types.Panel):
bl_label = "Render Settings"
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = "render"
def draw(self, context):
scene = context.scene
layout = self.layout
col = layout.column()
col.label(text="Sampling")
col.prop(scene.psychopath, "spp")
col.label(text="Dicing")
col.prop(scene.psychopath, "dicing_rate")
col.label(text="Motion Blur")
col.prop(scene.psychopath, "motion_blur_segments")
col.prop(scene.psychopath, "shutter_start")
col.prop(scene.psychopath, "shutter_end")
col.label(text="Performance")
col.prop(scene.psychopath, "max_samples_per_bucket")
class RENDER_PT_psychopath_export_settings(PsychopathPanel, bpy.types.Panel):
bl_label = "Export Settings"
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = "render"
def draw(self, context):
scene = context.scene
layout = self.layout
col = layout.column()
col.prop(scene.psychopath, "export_path")
class WORLD_PT_psychopath_background(PsychopathPanel, bpy.types.Panel):
bl_label = "Background"
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = "world"
@classmethod
def poll(cls, context):
return context.world and PsychopathPanel.poll(context)
def draw(self, context):
layout = self.layout
world = context.world
layout.prop(world, "horizon_color", text="Color")
class DATA_PT_psychopath_camera_dof(PsychopathPanel, bpy.types.Panel):
bl_label = "Depth of Field"
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = "data"
@classmethod
def poll(cls, context):
engine = context.scene.render.engine
return context.camera and PsychopathPanel.poll(context)
def draw(self, context):
ob = context.active_object
layout = self.layout
col = layout.column()
col.prop(ob.data, "dof_object")
col.prop(ob.data, "dof_distance")
col.prop(ob.data.psychopath, "aperture_radius")
class DATA_PT_psychopath_lamp(PsychopathPanel, bpy.types.Panel):
bl_label = "Lamp"
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = "data"
@classmethod
def poll(cls, context):
engine = context.scene.render.engine
return context.lamp and PsychopathPanel.poll(context)
def draw(self, context):
ob = context.active_object
layout = self.layout
col = layout.column()
row = col.row()
row.prop(ob.data, "type", expand=True)
if ob.data.type != 'HEMI' and ob.data.type != 'AREA':
col.prop(ob.data, "shadow_soft_size")
col.prop(ob.data.psychopath, "color_type")
if ob.data.psychopath.color_type == 'Rec709':
col.prop(ob.data, "color")
elif ob.data.psychopath.color_type == 'Blackbody' or ob.data.psychopath.color_type == 'ColorTemperature':
col.prop(ob.data.psychopath, "color_blackbody_temp")
col.prop(ob.data, "energy")
class DATA_PT_psychopath_area_lamp(PsychopathPanel, bpy.types.Panel):
bl_label = "Area Shape"
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = "data"
@classmethod
def poll(cls, context):
lamp = context.lamp
engine = context.scene.render.engine
return (lamp and lamp.type == 'AREA') and (engine in cls.COMPAT_ENGINES)
def draw(self, context):
layout = self.layout
lamp = context.lamp
col = layout.column()
col.row().prop(lamp, "shape", expand=True)
sub = col.row(align=True)
if lamp.shape == 'SQUARE':
sub.prop(lamp, "size")
elif lamp.shape == 'RECTANGLE':
sub.prop(lamp, "size", text="Size X")
sub.prop(lamp, "size_y", text="Size Y")
class DATA_PT_psychopath_mesh(PsychopathPanel, bpy.types.Panel):
bl_label = "Psychopath Mesh Properties"
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = "data"
@classmethod
def poll(cls, context):
engine = context.scene.render.engine
return context.mesh and (engine in cls.COMPAT_ENGINES)
def draw(self, context):
layout = self.layout
mesh = context.mesh
layout.row().prop(mesh.psychopath, "is_subdivision_surface")
class MATERIAL_PT_psychopath_context_material(PsychopathPanel, bpy.types.Panel):
bl_label = ""
bl_space_type = "PROPERTIES"
bl_region_type = "WINDOW"
bl_context = "material"
bl_options = {'HIDE_HEADER'}
@classmethod
def poll(cls, context):
return (context.material or context.object) and PsychopathPanel.poll(context)
def draw(self, context):
layout = self.layout
mat = context.material
ob = context.object
slot = context.material_slot
space = context.space_data
if ob:
row = layout.row()
row.template_list("MATERIAL_UL_matslots", "", ob, "material_slots", ob, "active_material_index", rows=1)
col = row.column(align=True)
col.operator("object.material_slot_add", icon='ZOOMIN', text="")
col.operator("object.material_slot_remove", icon='ZOOMOUT', text="")
col.menu("MATERIAL_MT_specials", icon='DOWNARROW_HLT', text="")
if ob.mode == 'EDIT':
row = layout.row(align=True)
row.operator("object.material_slot_assign", text="Assign")
row.operator("object.material_slot_select", text="Select")
row.operator("object.material_slot_deselect", text="Deselect")
split = layout.split(percentage=0.65)
if ob:
split.template_ID(ob, "active_material", new="material.new")
row = split.row()
if slot:
row.prop(slot, "link", text="")
else:
row.label()
elif mat:
split.template_ID(space, "pin_id")
split.separator()
class MATERIAL_PT_psychopath_surface(PsychopathPanel, bpy.types.Panel):
bl_label = "Surface"
bl_space_type = "PROPERTIES"
bl_region_type = "WINDOW"
bl_context = "material"
@classmethod
def poll(cls, context):
return context.material and PsychopathPanel.poll(context)
def draw(self, context):
layout = self.layout
col = layout.column()
mat = context.material
col.prop(mat.psychopath, "surface_shader_type")
col.prop(mat.psychopath, "color_type")
if mat.psychopath.color_type == 'Rec709':
col.prop(mat.psychopath, "color")
elif mat.psychopath.color_type == 'Blackbody' or mat.psychopath.color_type == 'ColorTemperature':
col.prop(mat.psychopath, "color_blackbody_temp")
if mat.psychopath.surface_shader_type == 'GTR':
layout.prop(mat.psychopath, "roughness")
layout.prop(mat.psychopath, "tail_shape")
layout.prop(mat.psychopath, "fresnel")
if mat.psychopath.surface_shader_type == 'GGX':
layout.prop(mat.psychopath, "roughness")
layout.prop(mat.psychopath, "fresnel")
def register():
bpy.utils.register_class(RENDER_PT_psychopath_render_settings)
bpy.utils.register_class(RENDER_PT_psychopath_export_settings)
bpy.utils.register_class(WORLD_PT_psychopath_background)
bpy.utils.register_class(DATA_PT_psychopath_camera_dof)
bpy.utils.register_class(DATA_PT_psychopath_mesh)
bpy.utils.register_class(DATA_PT_psychopath_lamp)
bpy.utils.register_class(DATA_PT_psychopath_area_lamp)
bpy.utils.register_class(MATERIAL_PT_psychopath_context_material)
bpy.utils.register_class(MATERIAL_PT_psychopath_surface)
def unregister():
bpy.utils.unregister_class(RENDER_PT_psychopath_render_settings)
bpy.utils.unregister_class(RENDER_PT_psychopath_export_settings)
bpy.utils.unregister_class(WORLD_PT_psychopath_background)
bpy.utils.unregister_class(DATA_PT_psychopath_camera_dof)
bpy.utils.register_class(DATA_PT_psychopath_mesh)
bpy.utils.unregister_class(DATA_PT_psychopath_lamp)
bpy.utils.unregister_class(DATA_PT_psychopath_area_lamp)
bpy.utils.unregister_class(MATERIAL_PT_psychopath_context_material)
bpy.utils.unregister_class(MATERIAL_PT_psychopath_surface)
| 6,289 | 2,187 | 276 |
5aabcd4d51b7a5f6cec8a178ccd6918f0ddb1af7 | 293 | py | Python | IsItAPrivate192Address.py | steveirwincyber/Scripts | 4352804e23b5def283c6b0600701896be56aa910 | [
"Unlicense"
] | 1 | 2020-10-19T17:22:55.000Z | 2020-10-19T17:22:55.000Z | IsItAPrivate192Address.py | steveirwincyber/Scripts | 4352804e23b5def283c6b0600701896be56aa910 | [
"Unlicense"
] | null | null | null | IsItAPrivate192Address.py | steveirwincyber/Scripts | 4352804e23b5def283c6b0600701896be56aa910 | [
"Unlicense"
] | null | null | null | #used to determine if its a private ip address in 192.168.0.0/16
| 29.3 | 64 | 0.590444 | def isprivate(ip):
octet = ip.split('.')
octets = list(map(int,octet))
if octets[0] == 192 and octets[1] == 168:
if 0 <= octets[2] <= 255 and 0 <= octets[3] <= 255:
return True
return False
pass
#used to determine if its a private ip address in 192.168.0.0/16
| 206 | 0 | 22 |
22ad9e929211ef81079ae89356a6d499c45ec457 | 3,621 | py | Python | attachments/migrations/0001_initial.py | liddiard/appletini | fa006e1b71ac60738bb5353e949676755530c781 | [
"MIT"
] | 1 | 2015-03-17T04:13:47.000Z | 2015-03-17T04:13:47.000Z | attachments/migrations/0001_initial.py | liddiard/appletini | fa006e1b71ac60738bb5353e949676755530c781 | [
"MIT"
] | 27 | 2015-04-14T20:18:24.000Z | 2015-06-16T20:01:06.000Z | attachments/migrations/0001_initial.py | liddiard/appletini | fa006e1b71ac60738bb5353e949676755530c781 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.core.validators
| 43.107143 | 143 | 0.554543 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('authors', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Audio',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('caption', models.TextField(blank=True)),
('title', models.CharField(max_length=128)),
('file', models.FileField(upload_to=b'attachments/audio/%Y/%m/%d')),
('credit', models.ManyToManyField(related_name='news_audio', to='authors.Author', blank=True)),
],
options={
'verbose_name_plural': 'Audio',
},
),
migrations.CreateModel(
name='Image',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('caption', models.TextField(blank=True)),
('file', models.ImageField(upload_to=b'attachments/image/%Y/%m/%d')),
('request_id', models.PositiveIntegerField(null=True, blank=True)),
('credit', models.ManyToManyField(related_name='news_image', to='authors.Author', blank=True)),
('request_type', models.ForeignKey(blank=True, to='contenttypes.ContentType', null=True)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Poll',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('question', models.CharField(max_length=128)),
('is_open', models.BooleanField(default=True)),
],
),
migrations.CreateModel(
name='PollChoice',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('text', models.CharField(max_length=128)),
('votes', models.PositiveIntegerField(default=0)),
('question', models.ForeignKey(to='attachments.Poll')),
],
),
migrations.CreateModel(
name='Review',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('item', models.CharField(max_length=64)),
('line_1', models.CharField(max_length=128, blank=True)),
('line_2', models.CharField(max_length=128, blank=True)),
('rating', models.PositiveSmallIntegerField(blank=True, null=True, validators=[django.core.validators.MaxValueValidator(10)])),
],
),
migrations.CreateModel(
name='Video',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('caption', models.TextField(blank=True)),
('title', models.CharField(max_length=128)),
('youtube_id', models.CharField(max_length=16)),
('credit', models.ManyToManyField(related_name='news_video', to='authors.Author', blank=True)),
],
options={
'abstract': False,
},
),
]
| 0 | 3,461 | 23 |
8a01d5ebf3645cc2775e942cf410251a913020cd | 5,531 | py | Python | paddlevideo/modeling/heads/base.py | ppppeng/cvsport22-figure-skating | c94f9a668f53c8ae34db5a917b5ad1deda8da0c5 | [
"Apache-2.0"
] | null | null | null | paddlevideo/modeling/heads/base.py | ppppeng/cvsport22-figure-skating | c94f9a668f53c8ae34db5a917b5ad1deda8da0c5 | [
"Apache-2.0"
] | null | null | null | paddlevideo/modeling/heads/base.py | ppppeng/cvsport22-figure-skating | c94f9a668f53c8ae34db5a917b5ad1deda8da0c5 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
from abc import abstractmethod
import paddle
import paddle.nn as nn
import paddle.nn.functional as F
from ..builder import build_loss
from paddlevideo.utils import get_logger, get_dist_info
logger = get_logger("paddlevideo")
class BaseHead(nn.Layer):
"""Base class for head part.
All head should subclass it.
All subclass should overwrite:
- Methods: ```init_weights```, initializing weights.
- Methods: ```forward```, forward function.
Args:
num_classes (int): The number of classes to be classified.
in_channels (int): The number of channels in input feature.
loss_cfg (dict): Config for building loss. Default: dict(type='CrossEntropyLoss').
ls_eps (float): label smoothing epsilon. Default: 0. .
"""
@abstractmethod
def forward(self, x):
"""Define how the head is going to run.
"""
raise NotImplemented
def loss(self, scores, labels, valid_mode=False, **kwargs):
"""Calculate the loss accroding to the model output ```scores```,
and the target ```labels```.
Args:
scores (paddle.Tensor): The output of the model.
labels (paddle.Tensor): The target output of the model.
Returns:
losses (dict): A dict containing field 'loss'(mandatory) and 'top1_acc', 'top5_acc'(optional).
"""
if len(labels) == 1: # commonly case
labels = labels[0]
losses = dict()
if self.ls_eps != 0. and not valid_mode: # label_smooth
loss = self.label_smooth_loss(scores, labels, **kwargs)
else:
loss = self.loss_func(scores, labels, **kwargs)
top1, top5 = self.get_acc(scores, labels, valid_mode)
losses['top1'] = top1
losses['top5'] = top5
losses['loss'] = loss
return losses
elif len(labels) == 3: # mix_up
labels_a, labels_b, lam = labels
lam = lam[0] # get lam value
losses = dict()
if self.ls_eps != 0:
loss_a = self.label_smooth_loss(scores, labels_a, **kwargs)
loss_b = self.label_smooth_loss(scores, labels_b, **kwargs)
else:
loss_a = self.loss_func(scores, labels_a, **kwargs)
loss_b = self.loss_func(scores, labels_b, **kwargs)
loss = lam * loss_a + (1 - lam) * loss_b
# loss = self.my_loss_filter(loss) # todo mycode
top1a, top5a = self.get_acc(scores, labels_a, valid_mode)
top1b, top5b = self.get_acc(scores, labels_b, valid_mode)
top1 = lam * top1a + (1 - lam) * top1b
top5 = lam * top5a + (1 - lam) * top5b
losses['top1'] = top1
losses['top5'] = top5
losses['loss'] = loss
return losses
else:
raise NotImplemented
| 37.120805 | 107 | 0.607304 | # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
from abc import abstractmethod
import paddle
import paddle.nn as nn
import paddle.nn.functional as F
from ..builder import build_loss
from paddlevideo.utils import get_logger, get_dist_info
logger = get_logger("paddlevideo")
class BaseHead(nn.Layer):
"""Base class for head part.
All head should subclass it.
All subclass should overwrite:
- Methods: ```init_weights```, initializing weights.
- Methods: ```forward```, forward function.
Args:
num_classes (int): The number of classes to be classified.
in_channels (int): The number of channels in input feature.
loss_cfg (dict): Config for building loss. Default: dict(type='CrossEntropyLoss').
ls_eps (float): label smoothing epsilon. Default: 0. .
"""
def __init__(
self,
num_classes,
in_channels,
loss_cfg=dict(
name="CrossEntropyLoss"
), # TODO(shipping): only pass a name or standard build cfg format.
# multi_class=False, NOTE(shipping): not supported now.
ls_eps=0.):
super().__init__()
self.num_classes = num_classes
self.in_channels = in_channels
self.loss_func = build_loss(loss_cfg)
# self.multi_class = multi_class NOTE(shipping): not supported now
self.ls_eps = ls_eps
@abstractmethod
def forward(self, x):
"""Define how the head is going to run.
"""
raise NotImplemented
def my_loss_filter(self, loss):
# loss:(64:1)
mu = loss.mean()
sigma = loss.var()
threshold = mu + 3 * sigma
select = loss > threshold
reduce = 1 + (loss - threshold) / (sigma+0.001) * 1.5
loss[select] = loss[select] / reduce[select]
# loss[select]=0
return loss.mean()
def loss(self, scores, labels, valid_mode=False, **kwargs):
"""Calculate the loss accroding to the model output ```scores```,
and the target ```labels```.
Args:
scores (paddle.Tensor): The output of the model.
labels (paddle.Tensor): The target output of the model.
Returns:
losses (dict): A dict containing field 'loss'(mandatory) and 'top1_acc', 'top5_acc'(optional).
"""
if len(labels) == 1: # commonly case
labels = labels[0]
losses = dict()
if self.ls_eps != 0. and not valid_mode: # label_smooth
loss = self.label_smooth_loss(scores, labels, **kwargs)
else:
loss = self.loss_func(scores, labels, **kwargs)
top1, top5 = self.get_acc(scores, labels, valid_mode)
losses['top1'] = top1
losses['top5'] = top5
losses['loss'] = loss
return losses
elif len(labels) == 3: # mix_up
labels_a, labels_b, lam = labels
lam = lam[0] # get lam value
losses = dict()
if self.ls_eps != 0:
loss_a = self.label_smooth_loss(scores, labels_a, **kwargs)
loss_b = self.label_smooth_loss(scores, labels_b, **kwargs)
else:
loss_a = self.loss_func(scores, labels_a, **kwargs)
loss_b = self.loss_func(scores, labels_b, **kwargs)
loss = lam * loss_a + (1 - lam) * loss_b
# loss = self.my_loss_filter(loss) # todo mycode
top1a, top5a = self.get_acc(scores, labels_a, valid_mode)
top1b, top5b = self.get_acc(scores, labels_b, valid_mode)
top1 = lam * top1a + (1 - lam) * top1b
top5 = lam * top5a + (1 - lam) * top5b
losses['top1'] = top1
losses['top5'] = top5
losses['loss'] = loss
return losses
else:
raise NotImplemented
def label_smooth_loss(self, scores, labels, **kwargs):
labels = F.one_hot(labels, self.num_classes)
labels = F.label_smooth(labels, epsilon=self.ls_eps)
labels = paddle.squeeze(labels, axis=1)
loss = self.loss_func(scores, labels, soft_label=True, **kwargs)
# loss = self.loss_func(scores, labels, reduction='none', soft_label=True, **kwargs) # todo mycode
return loss
def get_acc(self, scores, labels, valid_mode):
top1 = paddle.metric.accuracy(input=scores, label=labels, k=1)
top5 = paddle.metric.accuracy(input=scores, label=labels, k=5)
_, world_size = get_dist_info()
# NOTE(shipping): deal with multi cards validate
if world_size > 1 and valid_mode: # reduce sum when valid
top1 = paddle.distributed.all_reduce(
top1, op=paddle.distributed.ReduceOp.SUM) / world_size
top5 = paddle.distributed.all_reduce(
top5, op=paddle.distributed.ReduceOp.SUM) / world_size
return top1, top5
| 1,872 | 0 | 108 |
4067e2254c7fff7016edbb7c7552de11a6ab99a3 | 4,617 | py | Python | measure/performance/MSGo.py | isabella232/media-delivery | 663d0a02295aa79c7bdf5338798e409d3476f708 | [
"MIT"
] | 21 | 2020-08-04T04:27:50.000Z | 2022-03-31T22:03:43.000Z | measure/performance/MSGo.py | intel/media-delivery | 7494115ce70788cc2fa29343a8f2cf0c16419b3a | [
"MIT"
] | 24 | 2020-07-03T00:49:23.000Z | 2021-12-02T22:50:58.000Z | measure/performance/MSGo.py | isabella232/media-delivery | 663d0a02295aa79c7bdf5338798e409d3476f708 | [
"MIT"
] | 12 | 2020-07-03T01:47:28.000Z | 2022-03-23T07:40:29.000Z | #! /usr/bin/python3
##################################################################################
# Copyright (c) 2020 Intel Corporation
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
##################################################################################
########### James.Iwan@intel.com Measure Perf ####################################
########### Scott.Rowe@intel.com Measure Perf ####################################
import subprocess, sys, os, re, argparse, time, statistics, signal
temp_path = "/tmp/perf/"
###################################################################
# This shell script is currently not being Run/Execute on this automation.
# Creating the file only for debug purposes.
###################################################################
shell_script_mms = temp_path + "mms.sh"
d = open(shell_script_mms, 'r')
mediacmd_temp = []
clip_session_iter_tag = ""
for dispatch_cmdline in d:
if re.search("echo ", dispatch_cmdline):
clip_session_iter_tag = re.sub(r'echo ', "", dispatch_cmdline.rstrip())
continue
else:
mediacmd_temp.append(dispatch_cmdline)
d.close()
#Execute Media MultiStreams
processes = [subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE) for cmd in mediacmd_temp]
# TOP monitors for specified period. Later we filter TOP output by PID to avoid
# any conflicts with data for other processes.
cpu_mem_monitor_cmd = "top -b -d 0.01 -i > " + temp_path + clip_session_iter_tag + "_TopSummary.txt &"
top_cpu_mem_process = subprocess.Popen(cpu_mem_monitor_cmd, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
#Monitor GPU_MEM Utilization
gpu_mem_monitor_cmd = "watch -n 0.01 -t -c 'sudo cat /sys/kernel/debug/dri/0/i915_gem_objects >> " + temp_path + clip_session_iter_tag + "_GemObjectSummary.txt 2>&1' &"
gem_gpu_mem_process = subprocess.Popen(gpu_mem_monitor_cmd, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
gem_gpu_mem_process_pid = gem_gpu_mem_process.pid
for p in processes:
p.wait()
os.system("killall top") # Kill Top Application.
for p in processes:
if p.returncode != 0:
exit(p.returncode)
top_cpu_mem_process.wait()
gem_gpu_mem_process.kill() # stop the watch command everytime multistreams process has finished
os.system("killall watch") # kill all watch command , workaround.
############################################################################################
# Top CPU MEM filtered by applications
############################################################################################
top_cpu_mem_grep_cmd = "grep -E '(sample|ffmpeg)' " + temp_path + clip_session_iter_tag + "_TopSummary.txt > " + temp_path + clip_session_iter_tag + "_cpumem_trace.txt"
top_cpu_mem_grep_process = subprocess.Popen(top_cpu_mem_grep_cmd, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
top_cpu_mem_grep_process.wait()
############################################################################################
# GemObject GPU MEM filtered by applications
############################################################################################
gemobject_gpu_mem_trace_grep_cmd = "grep -E '(sample_multi|ffmpeg|sample_decode)' " + temp_path + clip_session_iter_tag + "_GemObjectSummary.txt | grep -v '0 active' > " + temp_path + clip_session_iter_tag + "_gpumem_trace.txt"
gemobject_gpu_mem_trace_grep_process = subprocess.Popen(gemobject_gpu_mem_trace_grep_cmd, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
gemobject_gpu_mem_trace_grep_process.wait()
| 51.876404 | 234 | 0.639593 | #! /usr/bin/python3
##################################################################################
# Copyright (c) 2020 Intel Corporation
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
##################################################################################
########### James.Iwan@intel.com Measure Perf ####################################
########### Scott.Rowe@intel.com Measure Perf ####################################
import subprocess, sys, os, re, argparse, time, statistics, signal
temp_path = "/tmp/perf/"
###################################################################
# This shell script is currently not being Run/Execute on this automation.
# Creating the file only for debug purposes.
###################################################################
shell_script_mms = temp_path + "mms.sh"
d = open(shell_script_mms, 'r')
mediacmd_temp = []
clip_session_iter_tag = ""
for dispatch_cmdline in d:
if re.search("echo ", dispatch_cmdline):
clip_session_iter_tag = re.sub(r'echo ', "", dispatch_cmdline.rstrip())
continue
else:
mediacmd_temp.append(dispatch_cmdline)
d.close()
#Execute Media MultiStreams
processes = [subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE) for cmd in mediacmd_temp]
# TOP monitors for specified period. Later we filter TOP output by PID to avoid
# any conflicts with data for other processes.
cpu_mem_monitor_cmd = "top -b -d 0.01 -i > " + temp_path + clip_session_iter_tag + "_TopSummary.txt &"
top_cpu_mem_process = subprocess.Popen(cpu_mem_monitor_cmd, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
#Monitor GPU_MEM Utilization
gpu_mem_monitor_cmd = "watch -n 0.01 -t -c 'sudo cat /sys/kernel/debug/dri/0/i915_gem_objects >> " + temp_path + clip_session_iter_tag + "_GemObjectSummary.txt 2>&1' &"
gem_gpu_mem_process = subprocess.Popen(gpu_mem_monitor_cmd, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
gem_gpu_mem_process_pid = gem_gpu_mem_process.pid
for p in processes:
p.wait()
os.system("killall top") # Kill Top Application.
for p in processes:
if p.returncode != 0:
exit(p.returncode)
top_cpu_mem_process.wait()
gem_gpu_mem_process.kill() # stop the watch command everytime multistreams process has finished
os.system("killall watch") # kill all watch command , workaround.
############################################################################################
# Top CPU MEM filtered by applications
############################################################################################
top_cpu_mem_grep_cmd = "grep -E '(sample|ffmpeg)' " + temp_path + clip_session_iter_tag + "_TopSummary.txt > " + temp_path + clip_session_iter_tag + "_cpumem_trace.txt"
top_cpu_mem_grep_process = subprocess.Popen(top_cpu_mem_grep_cmd, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
top_cpu_mem_grep_process.wait()
############################################################################################
# GemObject GPU MEM filtered by applications
############################################################################################
gemobject_gpu_mem_trace_grep_cmd = "grep -E '(sample_multi|ffmpeg|sample_decode)' " + temp_path + clip_session_iter_tag + "_GemObjectSummary.txt | grep -v '0 active' > " + temp_path + clip_session_iter_tag + "_gpumem_trace.txt"
gemobject_gpu_mem_trace_grep_process = subprocess.Popen(gemobject_gpu_mem_trace_grep_cmd, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
gemobject_gpu_mem_trace_grep_process.wait()
| 0 | 0 | 0 |
f40b7eb8a68e2df24b5da0b5c9bffaa6102f02b6 | 1,712 | py | Python | plaso/events/text_events.py | cvandeplas/plaso | b625a2c267ed09505cfac84c9593d8c0922852b1 | [
"Apache-2.0"
] | 3 | 2016-03-11T02:47:08.000Z | 2016-12-24T03:19:27.000Z | plaso/events/text_events.py | cvandeplas/plaso | b625a2c267ed09505cfac84c9593d8c0922852b1 | [
"Apache-2.0"
] | null | null | null | plaso/events/text_events.py | cvandeplas/plaso | b625a2c267ed09505cfac84c9593d8c0922852b1 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2014 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This file contains the text format specific event object classes."""
from plaso.events import time_events
from plaso.lib import eventdata
class TextEvent(time_events.TimestampEvent):
"""Convenience class for a text format-based event."""
DATA_TYPE = 'text:entry'
def __init__(self, timestamp, offset, attributes):
"""Initializes a text event object.
Args:
timestamp: The timestamp time value. The timestamp contains the
number of microseconds since Jan 1, 1970 00:00:00 UTC.
offset: The offset of the attributes.
attributes: A dict that contains the events attributes.
"""
super(TextEvent, self).__init__(
timestamp, eventdata.EventTimestamp.WRITTEN_TIME)
self.offset = offset
for name, value in attributes.iteritems():
# TODO: Revisit this constraints and see if we can implement
# it using a more sane solution.
if isinstance(value, basestring) and not value:
continue
setattr(self, name, value)
| 34.938776 | 74 | 0.721379 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2014 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This file contains the text format specific event object classes."""
from plaso.events import time_events
from plaso.lib import eventdata
class TextEvent(time_events.TimestampEvent):
"""Convenience class for a text format-based event."""
DATA_TYPE = 'text:entry'
def __init__(self, timestamp, offset, attributes):
"""Initializes a text event object.
Args:
timestamp: The timestamp time value. The timestamp contains the
number of microseconds since Jan 1, 1970 00:00:00 UTC.
offset: The offset of the attributes.
attributes: A dict that contains the events attributes.
"""
super(TextEvent, self).__init__(
timestamp, eventdata.EventTimestamp.WRITTEN_TIME)
self.offset = offset
for name, value in attributes.iteritems():
# TODO: Revisit this constraints and see if we can implement
# it using a more sane solution.
if isinstance(value, basestring) and not value:
continue
setattr(self, name, value)
| 0 | 0 | 0 |
614ff5b172842cc55b6f1c0f4657babdd9574e43 | 1,145 | py | Python | openstack_dashboard/dashboards/idm/dashboard.py | agaldemas/horizon | 38c4fb7ea1f1d5a00a8e1fae0f7a864a7d09f1ab | [
"Apache-2.0"
] | null | null | null | openstack_dashboard/dashboards/idm/dashboard.py | agaldemas/horizon | 38c4fb7ea1f1d5a00a8e1fae0f7a864a7d09f1ab | [
"Apache-2.0"
] | null | null | null | openstack_dashboard/dashboards/idm/dashboard.py | agaldemas/horizon | 38c4fb7ea1f1d5a00a8e1fae0f7a864a7d09f1ab | [
"Apache-2.0"
] | null | null | null | # Copyright (C) 2014 Universidad Politecnica de Madrid
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import horizon
horizon.register(Idm)
| 30.945946 | 75 | 0.674236 | # Copyright (C) 2014 Universidad Politecnica de Madrid
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import horizon
class Idm(horizon.Dashboard):
name = ("Identity Manager")
name_sm = ("IdM")
slug = "idm"
panels = ('home', 'home_orgs', 'organizations',
'members', 'myApplications', 'users')
default_panel = 'home'
def nav(self, context):
if (context['request'].organization.id
!= context['request'].user.default_project_id):
default_panel = 'home_orgs'
return True
else:
default_panel = 'home'
return True
horizon.register(Idm)
| 247 | 233 | 23 |
ce7842459573a9a9d59aaf86fabfbefc1bab3549 | 1,297 | py | Python | download_dataset.py | eltonfss/TMDAutoML | cf51d9dc4b7acc4c69a83e248f107bca9d008eac | [
"MIT"
] | 4 | 2019-01-20T19:13:18.000Z | 2022-02-22T22:10:02.000Z | download_dataset.py | eltonfss/TMDAutoML | cf51d9dc4b7acc4c69a83e248f107bca9d008eac | [
"MIT"
] | 1 | 2021-06-18T07:26:56.000Z | 2021-06-29T15:38:42.000Z | download_dataset.py | eltonfss/TMDAutoML | cf51d9dc4b7acc4c69a83e248f107bca9d008eac | [
"MIT"
] | null | null | null | import urllib
import os
# Constant
dataset_dir = './TransportationData'
datasetBalanced = dataset_dir + '/datasetBalanced'
rawOriginaldata = dataset_dir + '/_RawDataOriginal'
url_list = ['http://cs.unibo.it/projects/us-tm2017/static/dataset/extension/5second/dataset_5secondWindow.csv',
'http://cs.unibo.it/projects/us-tm2017/static/dataset/extension/halfsecond/dataset_halfSecondWindow.csv']
#'http://cs.unibo.it/projects/us-tm2017/static/dataset/raw_data/raw_data.tar.gz']
dataset5second = 'dataset_5secondWindow.csv'
datasethalfsecond = 'dataset_halfSecondWindow.csv'
#if __name__ == "__main__":
# create folders
if not os.path.exists(dataset_dir):
os.makedirs(dataset_dir)
if not os.path.exists(datasetBalanced):
os.makedirs(datasetBalanced)
if not os.path.exists(rawOriginaldata):
os.makedirs(rawOriginaldata)
print ("DOWNLOAD........")
for url in url_list:
response = urllib.request.urlopen(url)
csv = response.read()
if url == 'http://cs.unibo.it/projects/us-tm2017/static/dataset/extension/5second/dataset_5secondWindow.csv':
outfile = datasetBalanced + '/' +dataset5second
else:
outfile = datasetBalanced + '/' + datasethalfsecond
with open(outfile, 'wb') as f:
f.write(csv)
print ("DOWNLOAD ENDED.") | 34.131579 | 117 | 0.728604 | import urllib
import os
# Constant
dataset_dir = './TransportationData'
datasetBalanced = dataset_dir + '/datasetBalanced'
rawOriginaldata = dataset_dir + '/_RawDataOriginal'
url_list = ['http://cs.unibo.it/projects/us-tm2017/static/dataset/extension/5second/dataset_5secondWindow.csv',
'http://cs.unibo.it/projects/us-tm2017/static/dataset/extension/halfsecond/dataset_halfSecondWindow.csv']
#'http://cs.unibo.it/projects/us-tm2017/static/dataset/raw_data/raw_data.tar.gz']
dataset5second = 'dataset_5secondWindow.csv'
datasethalfsecond = 'dataset_halfSecondWindow.csv'
#if __name__ == "__main__":
# create folders
if not os.path.exists(dataset_dir):
os.makedirs(dataset_dir)
if not os.path.exists(datasetBalanced):
os.makedirs(datasetBalanced)
if not os.path.exists(rawOriginaldata):
os.makedirs(rawOriginaldata)
print ("DOWNLOAD........")
for url in url_list:
response = urllib.request.urlopen(url)
csv = response.read()
if url == 'http://cs.unibo.it/projects/us-tm2017/static/dataset/extension/5second/dataset_5secondWindow.csv':
outfile = datasetBalanced + '/' +dataset5second
else:
outfile = datasetBalanced + '/' + datasethalfsecond
with open(outfile, 'wb') as f:
f.write(csv)
print ("DOWNLOAD ENDED.") | 0 | 0 | 0 |