max_stars_repo_path stringlengths 3 269 | max_stars_repo_name stringlengths 4 119 | max_stars_count int64 0 191k | id stringlengths 1 7 | content stringlengths 6 1.05M | score float64 0.23 5.13 | int_score int64 0 5 |
|---|---|---|---|---|---|---|
css/executor/stitcher.py | desh2608/css | 7 | 12759951 | import torch
class Stitcher:
def __init__(self, stitching_config, sr=16000):
self.eval_win = stitching_config["eval_win"]
self.eval_hop = stitching_config["eval_hop"]
self.fft_hop = stitching_config["hop_size"]
self.sr = sr
self.stitch_margin = int(
(self.eval_win * 10 - self.eval_hop * 10) / 10 * self.sr / self.fft_hop
) # 16ms window
def get_stitch(self, x, masks):
"""
this method use mask as stitching rule
x: original magnitude spectrogram features corresponding to each window (expects single channel)
masks: mask for each window (2 sources + 1 noise)
"""
PERM = []
for n in range(len(masks) - 1):
# first find the permutations for each segments
past = masks[n][:, :, :-1].permute(2, 0, 1) # 2 x F x T
now = masks[n + 1][:, :, :-1].permute(2, 0, 1) # 2 x F x T
E_prev = past * torch.abs(x[n]) # 2 x F x T
E_now = now * torch.abs(x[n + 1])
# Calculate a similarity matrix.
similarity_matrix = torch.zeros((2, 2))
for i in range(2):
for j in range(2):
d = (
E_prev[j, :, -self.stitch_margin :]
- E_now[i, :, : self.stitch_margin]
)
similarity_matrix[i, j] = -torch.sum(
torch.pow(torch.abs(d), 0.5)
) # 0.5
sim0 = similarity_matrix[0, 0] + similarity_matrix[1, 1]
sim1 = similarity_matrix[0, 1] + similarity_matrix[1, 0]
if sim0 >= sim1:
perm = [0, 1]
else:
perm = [1, 0]
PERM.append(perm)
return PERM
def get_connect(self, PERM, mask):
state = 0
N_M1 = [0]
for i, item in enumerate(PERM):
if item[0] == 1:
state = 1 - state
N_M1.append(state)
res1 = []
res2 = []
noise = []
# perm
for i in range(len(N_M1)):
if N_M1[i] == 0:
res1.append(mask[i][:, :, 0])
res2.append(mask[i][:, :, 1])
else:
res1.append(mask[i][:, :, 1])
res2.append(mask[i][:, :, 0])
noise.append(mask[i][:, :, 2])
# winner-take-tall
for i, (r1, r2, n) in enumerate(zip(res1, res2, noise)):
m = torch.stack((r1, r2, n), dim=2) # F x T x 3
m_max = torch.amax(m, dim=2, keepdim=True)
m = torch.where(m == m_max, m, torch.tensor(1e-4, dtype=torch.float32))
res1[i] = m[:, :, 0]
res2[i] = m[:, :, 1]
noise[i] = m[:, :, 2]
# Average the masks of the overlapping region.
hop = int(self.eval_hop * self.sr / self.fft_hop)
F, win = res1[0].shape
all_L = int(hop * (len(mask) - 1) + win)
res_1 = torch.zeros((F, all_L))
res_2 = torch.zeros((F, all_L))
res_noise = torch.zeros((F, all_L))
indicator = torch.zeros((1, all_L))
for i in range(len(mask)):
wav = mask[i]
st = hop * i
if wav.shape[1] < win:
en = st + wav.shape[1]
else:
en = st + win
# need to normalize it
res_1[:, st:en] += res1[i]
res_2[:, st:en] += res2[i]
res_noise[:, st:en] += noise[i]
indicator[:, st:en] += 1
indicator[indicator == 0] = 1
return (res_1 / indicator, res_2 / indicator, res_noise / indicator)
| 2.328125 | 2 |
models/modules/AffineCoupling.py | Euiyeon-Kim/Glow-Pytorch | 2 | 12759952 | # This file contains content licensed by https://github.com/chaiyujin/glow-pytorch/blob/master/LICENSE
import torch
import torch.nn as nn
from models.modules import thops
from models.modules.layers import Conv2d, Conv2dZeros
class AffineCoupling(nn.Module):
def __init__(self, in_channels, out_channels, hidden_channels):
super().__init__()
self.NN = nn.Sequential(
Conv2d(in_channels, hidden_channels), # ActNorm
nn.ReLU(inplace=False),
Conv2d(hidden_channels, hidden_channels, kernel_size=[1, 1]), # ActNorm
nn.ReLU(inplace=False),
Conv2dZeros(hidden_channels, out_channels) # w/o ActNorm
)
def forward(self, inp, logdet=None, reverse=False):
a, b = thops.split_feature(inp, "split")
h = self.NN(a)
shift, scale = thops.split_feature(h, "cross")
scale = torch.sigmoid(scale + 2.)
if not reverse: # Normal flow
b += shift
b *= scale
d_logdet = thops.sum(torch.log(scale), dim=[1, 2, 3])
else:
b = b / scale
b = b - shift
d_logdet = -thops.sum(torch.log(scale), dim=[1, 2, 3])
logdet = logdet + d_logdet
z = thops.cat_feature(a, b)
return z, logdet
| 2.1875 | 2 |
02_DSA/homework/yoonghee/1week/sort/merge_sort.py | dudaji/dcc | 0 | 12759953 | '''
merge_sort는 길이 n이 1이 될 때까지 2로 나누어주는 BST와 마찬가지로 divide하여
merge하므로 BST와 같은원리로 (logn)만큼 divide 해주고
merge하면서 값을 비교하여 정렬 하므로 최대 n번 정도의 복잡도로 동작하므로
안좋아도 nlogn의 성능을 갖게 됩니다.
quick_sort pivot값 잘 골랐을 때와 같은 성능을 나타내고
임시로 저장할 공간을 못쓸 때는 quick_sort를 사용합니다.
time complexity : O(nlogn)
백준에서 compile했을 때 에러남
'''
import sys
from sys import stdin
sys.setrecursionlimit(1500)
def merge_sort(arr):
if len(arr) <= 1:
return arr
pivot = len(arr)//2
left = merge_sort(arr[:len(arr)//2])
right = merge_sort(arr[len(arr)//2:])
return merge(left, right)
def merge(left, right):
l_idx, r_idx, temp = 0, 0, []
while l_idx < len(left) and r_idx < len(right):
if left[l_idx] <= right[r_idx]:
temp.append(left[l_idx])
l_idx += 1
else:
temp.append(right[r_idx])
r_idx += 1
temp += left[l_idx:]
temp += right[r_idx:]
return temp
def sort(arr):
return merge_sort(arr)
'''
arr = []
N = int(stdin.readline())
for x in range(N):
arr.append(int(stdin.readline()))
for y in sort(arr):
print(y)
''' | 3.171875 | 3 |
f4pga/common_modules/io_rename.py | antmicro/f4pga-docs | 10 | 12759954 | <gh_stars>1-10
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2022 F4PGA Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: Apache-2.0
"""
Rename (ie. change) dependencies and values of a module. This module wraps another,
module whoose name is specified in `params.module` and changes the names of the
dependencies and values it relies on. The parmeters for the wrapped module can be
specified through `params.params`. dict. There are three mapping for the names:
* `params.rename_takes` - mapping for inputs ("takes")
* `params.rename_produces` - mapping for outputs ("products")
* `params.rename_values` - mapping for values
Keys represent the names visible to the wrpped module and values represent the
names visible to the modules outside.
Not specifying a mapping for a given entry will leave it with its original name.
---------------
Accepted module parameters:
* `module` (string, required)
* `params` (dict[string -> any], optional)
* `rename_takes` (dict[string -> string], optional)
* `rename_produces` (dict[string -> string], optional)
* `rename_values` (dict[string -> string], optional)
"""
from f4pga.common import *
from f4pga.module import Module, ModuleContext
from f4pga.module_runner import get_module
def _switch_keys(d: 'dict[str, ]', renames: 'dict[str, str]') -> 'dict[str, ]':
newd = {}
for k, v in d.items():
r = renames.get(k)
if r is not None:
newd[r] = v
else:
newd[k] = v
return newd
def _switchback_attrs(d: Namespace, renames: 'dict[str, str]') -> SimpleNamespace:
newn = SimpleNamespace()
for k, v in vars(d).items():
setattr(newn, k, v)
for k, r in renames.items():
if hasattr(newn, r):
v = getattr(newn, r)
delattr(newn, r)
setattr(newn, k, v)
return newn
def _switch_entries(l: 'list[str]', renames: 'dict[str, str]') -> 'list[str]':
newl = []
for e in l:
r = renames.get(e)
if r is not None:
_, q = decompose_depname(e)
newl.append(with_qualifier(r, q))
else:
newl.append(r if r is not None else e)
return newl
def _or_empty_dict(d: 'dict | None'):
return d if d is not None else {}
class IORenameModule(Module):
module: Module
rename_takes: 'dict[str, str]'
rename_produces: 'dict[str, str]'
rename_values: 'dict[str, str]'
def map_io(self, ctx: ModuleContext):
newctx = ctx.shallow_copy()
newctx.takes = _switchback_attrs(ctx.takes, self.rename_takes)
newctx.values = _switchback_attrs(ctx.values, self.rename_values)
r = self.module.map_io(newctx)
return _switch_keys(r, self.rename_produces)
def execute(self, ctx: ModuleContext):
newctx = ctx.shallow_copy()
newctx.takes = _switchback_attrs(ctx.takes, self.rename_takes)
newctx.values = _switchback_attrs(ctx.values, self.rename_values)
newctx.outputs = _switchback_attrs(ctx.produces, self.rename_produces)
print(newctx.takes)
return self.module.execute(newctx)
def __init__(self, params):
mod_path = resolve_modstr(params["module"])
module_class = get_module(mod_path)
module: Module = module_class(params.get("params"))
self.rename_takes = _or_empty_dict(params.get("rename_takes"))
self.rename_produces = _or_empty_dict(params.get("rename_produces"))
self.rename_values = _or_empty_dict(params.get("rename_values"))
self.module = module
self.name = f'{module.name}-io_renamed'
self.no_of_phases = module.no_of_phases
self.takes = _switch_entries(module.takes, self.rename_takes)
self.produces = _switch_entries(module.produces, self.rename_produces)
self.values = _switch_entries(module.values, self.rename_values)
if hasattr(module, 'prod_meta'):
self.prod_meta = _switch_keys(module.prod_meta, self.rename_produces)
ModuleClass = IORenameModule | 1.8125 | 2 |
mtrain.py | trevor-wieland/MTrainAI | 0 | 12759955 | <reponame>trevor-wieland/MTrainAI<filename>mtrain.py
import random
import treeclasses
import playerclasses
import dominoclasses
import pandas as pd
import numpy as np
import copy
def generate_players(num_players, modes, domino_size, filename):
"""
Creates player objects based on modes passed to this script
Returns a list of the player objects in order of creation
"""
players = []
for num in range(0, num_players):
if modes[num] == "Greedy":
players.append(playerclasses.GreedyPlayer(num))
elif modes[num] == "Random":
players.append(playerclasses.RandomPlayer(num))
elif modes[num] == "Probability":
players.append(playerclasses.ProbabilityPlayer(num, domino_size))
elif modes[num] == "Neural":
players.append(playerclasses.NeuralPlayer(num, domino_size, filename, num_players))
return players
def create_one_hot(domino_size, dominos):
"""
Creates a list of 0s and 1s where each 0 or 1 corresponds to having a specific domino
Used for collecting data on what dominos are in a hand, in a train, etc.
This is later decoded when training the neural net
"""
domino_count = int(domino_size + 1 + ((domino_size + 1) * domino_size) / 2.0)
one_hot = np.zeros(domino_count)
for domino in dominos:
try:
location = int(max(domino) * (domino_size + 1 - (max(domino) + 1) / 2.0) + min(domino))
except TypeError:
print(dominos)
print(type(domino))
print(domino)
print(domino[0])
print(domino[1])
raise TypeError
one_hot[location] = 1
return one_hot.tolist()
def strip_potential_plays(potential_plays, domino_size):
"""
Filters through potential_plays (A matrix of plays) and creates a list of all dominos potentially to be played
Returns a one_hot of the dominos that can potentially be played
"""
dominos = []
for potentials in potential_plays:
for play in potentials:
dominos.append(play[-1])
return create_one_hot(domino_size, dominos)
def mexicantrain(num_players=2, domino_size=12, data_collection=False, debug=True,
modes=["Greedy", "Random"], data_index=0, file_name="PlayData/data2_12_100"):
"""
A function that runs a single game of mexican train from start to finish. A full guide of the
rules can be found in the README.MD file.
Default parameters support a head-to-head match, but passing in additional players and changing the
parameters accordingly is supported as well.
When not using a Neural player, file_name does not matter
Returns the scores, the index of the winning player, and the data collected if in data_collection mode
"""
#Check player number
if not num_players in range(2, 9):
raise ValueError("Number of players must be between 2 and 8, inclusive")
#Set up DataFrame for data collection mode
column_names = ["round_number", "turn_number", "player_number", "play",
"t_num", "hand", "unknown", "potential_plays", "points"]
for num in range(0, num_players + 1):
column_names.append("train_" + str(num))
column_names.append("marker_" + str(num))
data = pd.DataFrame(dtype=object, columns=column_names)
#Hand size rule
hand_sizes = [16, 16, 15, 14, 12, 10, 9]
hand_size = hand_sizes[num_players - 2]
scores = []
for ind in range(0, num_players):
scores.append(0)
#Generate the players for the game
players = generate_players(num_players, modes, domino_size, file_name)
#Start game
for round_number in range(domino_size, -1, -1):
if debug: print("Round start: " + str(round_number))
#Create Shuffled Deck
if debug: print("Creating Deck")
deck = dominoclasses.Deck(domino_size)
trains = []
for playernum in range(0, num_players + 1):
trains.append(dominoclasses.Train())
trains[num_players].set_marker(True)
#Generate Random Hands for each player
if debug: print("Creating Hands")
hands = []
for playernum in range(0, num_players):
dominos = deck.draw(hand_size)
hands.append(dominoclasses.Hand(dominos))
#Check who has the current target double, if no one has it, everyone draws one domino
if debug: print("Checking for player with needed domino:")
start_player = -1
draw_again = True
while draw_again:
for playernum in range(0, num_players):
if hands[playernum].check_double(round_number) == 1:
draw_again = False
start_player = playernum
hands[start_player].remove_domino((round_number, round_number))
break
if draw_again:
for playernum in range(0, num_players):
domino = deck.draw(1)
hands[playernum].add_dominos(domino)
else:
break
if debug: print("Domino found, round beginning")
#Start round
round_over = False
current_player = start_player
double_up = (False, -1)
doom_counter = 0
turn_number = 1
while not round_over:
if debug: print("Player " + str(current_player) + " is now playing")
end_turn = False
active_player = players[current_player]
#If another player has played a double, the double must be covered
if double_up[0]:
#Get potential play
if debug: print("Forced to play on double on train " + str(double_up[1]))
if debug: print("Last domino on train " + str(double_up[1]) + " is " + str(trains[double_up[1]].get_last()))
if debug: print("Current player's hand is: " + str(hands[current_player].dominos))
play, play_data = active_player.play_forced_double(hands[current_player].dominos, trains[double_up[1]])
#If no play exists, try again
if len(play) == 0:
if debug: print("No play available, drawing again")
hands[current_player].add_dominos(deck.draw(1))
if debug: print("Current player's hand is: " + str(hands[current_player].dominos))
play, play_data = active_player.play_forced_double(hands[current_player].dominos, trains[double_up[1]])
if len(play) == 0:
if debug: print("No play available, continuing to next player")
trains[current_player].set_marker(True)
end_turn = True
doom_counter += 1
#Play play if it exists, and return double state to normal
if not end_turn:
#Collect data on the play if necessary
if data_collection:
data.loc[data_index, "round_number"] = round_number
data.loc[data_index, "turn_number"] = turn_number / num_players
data.loc[data_index, "player_number"] = current_player
data.loc[data_index, "play"] = create_one_hot(domino_size, play)
data.loc[data_index, "t_num"] = double_up[1]
data.loc[data_index, "hand"] = create_one_hot(domino_size, hands[current_player].dominos)
unknown = copy.deepcopy(deck.dominos)
for x in range(0, len(hands)):
if x == current_player:
continue
else:
unknown += hands[x].dominos
data.loc[data_index, "unknown"] = create_one_hot(domino_size, unknown)
data.loc[data_index, "potential_plays"] = create_one_hot(domino_size, play_data)
for x in range(0, len(trains)):
data.loc[data_index, "train_" + str(x)] = create_one_hot(domino_size, trains[x].train_list)
if trains[x].marker_up:
data.loc[data_index, "marker_" + str(x)] = 1
else:
data.loc[data_index, "marker_" + str(x)] = 0
data_index += 1
#Play the play onto the target train and remove from hand
trains[double_up[1]].add_domino(play[0])
for pl in play:
hands[current_player].remove_domino(pl)
#End the double_up mode and reset the doom_counter
double_up = (False, -1)
doom_counter = 0
#If no double is needed to be covered and the player hasn't played their train yet,
#they play their train now
elif trains[current_player].empty():
#Get the train play from the player class
play = active_player.play_train(hands[current_player].dominos, round_number)
#Verify an actual train is being played, if not draw and try again
if len(play) == 0:
hands[current_player].add_dominos(deck.draw(1))
play = active_player.play_train(hands[current_player].dominos, round_number)
if len(play) == 0:
trains[current_player].set_marker(True)
doom_counter += 1
end_turn = True
#Play the train the player came up with
if not end_turn:
doom_counter = 0
trains[current_player].add_train(play)
for pl in play:
hands[current_player].remove_domino(pl)
#Check if the final domino played is a double, if so deal with that case
if(play[-1][0] == play[-1][1]):
hands[current_player].add_dominos(deck.draw(1))
play = active_player.play_forced_double(hands[current_player].dominos, trains[double_up[1]])
if len(play) == 0:
trains[current_player].set_marker(True)
end_turn = True
doom_counter += 1
#End the turn afterwards
end_turn = True
#In most cases during the game, this last else will occur, which causes players to play normally
else:
#Get a standard play from the current player
t_num, play, play_data = active_player.play_normally(hands[current_player].dominos, trains, round_number, turn_number)
#If the play doesn't exist, try again and process this
if len(play) == 0:
hands[current_player].add_dominos(deck.draw(1))
t_num, play, play_data = active_player.play_normally(hands[current_player].dominos, trains, round_number, turn_number)
if len(play) == 0:
trains[current_player].set_marker(True)
doom_counter += 1
end_turn = True
#If the play does exist, deal with the three possibilities the play could look like
if not end_turn:
doom_counter = 0
#Single play that isn't a double, the easiest case to deal with
if len(play) == 1 and not (play[0][0] == play[0][1]):
#Collect data as necessary
if data_collection:
data.loc[data_index, "round_number"] = round_number
data.loc[data_index, "turn_number"] = turn_number / num_players
data.loc[data_index, "player_number"] = current_player
data.loc[data_index, "play"] = create_one_hot(domino_size, play)
data.loc[data_index, "t_num"] = t_num
data.loc[data_index, "hand"] = create_one_hot(domino_size, hands[current_player].dominos)
unknown = copy.deepcopy(deck.dominos)
for x in range(0, len(hands)):
if x == current_player:
continue
else:
unknown += hands[x].dominos
data.loc[data_index, "unknown"] = create_one_hot(domino_size, unknown)
data.loc[data_index, "potential_plays"] = strip_potential_plays(play_data, domino_size)
for x in range(0, len(trains)):
data.loc[data_index, "train_" + str(x)] = create_one_hot(domino_size, trains[x].train_list)
if trains[x].marker_up:
data.loc[data_index, "marker_" + str(x)] = 1
else:
data.loc[data_index, "marker_" + str(x)] = 0
data_index += 1
#Play domino on train and remove it from the player's hand
trains[t_num].add_domino(play[0])
for pl in play:
hands[current_player].remove_domino(pl)
#Single play that is a double, signally cover double mode
elif len(play) == 1 and (play[0][0] == play[0][1]):
#Collect data as necessary
if data_collection:
data.loc[data_index, "round_number"] = round_number
data.loc[data_index, "turn_number"] = turn_number / num_players
data.loc[data_index, "player_number"] = current_player
data.loc[data_index, "play"] = create_one_hot(domino_size, play)
data.loc[data_index, "t_num"] = t_num
data.loc[data_index, "hand"] = create_one_hot(domino_size, hands[current_player].dominos)
unknown = copy.deepcopy(deck.dominos)
for x in range(0, len(hands)):
if x == current_player:
continue
else:
unknown += hands[x].dominos
data.loc[data_index, "unknown"] = create_one_hot(domino_size, unknown)
data.loc[data_index, "potential_plays"] = strip_potential_plays(play_data, domino_size)
for x in range(0, len(trains)):
data.loc[data_index, "train_" + str(x)] = create_one_hot(domino_size, trains[x].train_list)
if trains[x].marker_up:
data.loc[data_index, "marker_" + str(x)] = 1
else:
data.loc[data_index, "marker_" + str(x)] = 0
data_index += 1
#Play domino and remove from hand of player
trains[t_num].add_domino(play[0])
for pl in play:
hands[current_player].remove_domino(pl)
#Draw to attempt to cover the domino
hands[current_player].add_dominos(deck.draw(1))
play_2, play_data_2 = active_player.play_forced_double(hands[current_player].dominos, trains[t_num])
#If no play is available, start double_up mode
if len(play_2) == 0:
double_up = (True, t_num)
trains[current_player].set_marker(True)
end_turn = True
#If a play is available, play it and collect data
if not end_turn:
#Collect Data as neccesary
if data_collection:
data.loc[data_index, "round_number"] = round_number
data.loc[data_index, "turn_number"] = turn_number / num_players
data.loc[data_index, "player_number"] = current_player
data.loc[data_index, "play"] = create_one_hot(domino_size, play_2)
data.loc[data_index, "t_num"] = t_num
data.loc[data_index, "hand"] = create_one_hot(domino_size, hands[current_player].dominos)
unknown = copy.deepcopy(deck.dominos)
for x in range(0, len(hands)):
if x == current_player:
continue
else:
unknown += hands[x].dominos
data.loc[data_index, "unknown"] = create_one_hot(domino_size, unknown)
data.loc[data_index, "potential_plays"] = create_one_hot(domino_size, play_data_2)
for x in range(0, len(trains)):
data.loc[data_index, "train_" + str(x)] = create_one_hot(domino_size, trains[x].train_list)
if trains[x].marker_up:
data.loc[data_index, "marker_" + str(x)] = 1
else:
data.loc[data_index, "marker_" + str(x)] = 0
data_index += 1
#Play domino drawn to train and remove from hand
trains[t_num].add_domino(play_2[0])
for pl in play_2:
hands[current_player].remove_domino(pl)
#Case when a double and a followup are played
else:
#Collect Data as needed
if data_collection:
data.loc[data_index, "round_number"] = round_number
data.loc[data_index, "turn_number"] = turn_number / num_players
data.loc[data_index, "player_number"] = current_player
data.loc[data_index, "play"] = create_one_hot(domino_size, play)
data.loc[data_index, "t_num"] = t_num
data.loc[data_index, "hand"] = create_one_hot(domino_size, hands[current_player].dominos)
unknown = copy.deepcopy(deck.dominos)
for x in range(0, len(hands)):
if x == current_player:
continue
else:
unknown += hands[x].dominos
data.loc[data_index, "unknown"] = create_one_hot(domino_size, unknown)
data.loc[data_index, "potential_plays"] = strip_potential_plays(play_data, domino_size)
for x in range(0, len(trains)):
data.loc[data_index, "train_" + str(x)] = create_one_hot(domino_size, trains[x].train_list)
if trains[x].marker_up:
data.loc[data_index, "marker_" + str(x)] = 1
else:
data.loc[data_index, "marker_" + str(x)] = 0
data_index += 1
#Play both the dominos to the train and remove from the player's hand
trains[t_num].add_domino(play[0])
trains[t_num].add_domino(play[1])
for pl in play:
hands[current_player].remove_domino(pl)
end_turn = True
#Check if the current player is out of dominos, or if doom counter has been exceeded
if hands[current_player].winning():
if debug: print("Player " + str(current_player) + " ran out of dominos!")
round_over = True
elif doom_counter > num_players * 5:
if debug:
print("Doom counter exceeded maximum!")
for hand in hands:
print(hand.dominos)
round_over = True
#Continue on to the next player and increase the turn_number
current_player += 1
if current_player > num_players - 1:
current_player = 0
turn_number += 1
if debug and turn_number % 5 == 0: print("Turn %s now", turn_number)
#Once the round is over, calculate the scores of each player for that round
round_scores = []
for playernum in range(0, num_players):
scores[playernum] += hands[playernum].score
round_scores.append(hands[playernum].score)
if debug:
print("Round " + str(round_number) + "over")
print("Round Scores: ")
print(round_scores)
print("Total Scores: ")
print(scores)
#Collect data as needed
if data_collection:
for ind in data.index:
data.loc[ind, "points"] = hands[int(data.loc[ind, "player_number"])].score
#Find overall winner
lowest_score = 1000000
index = -1
for playernum in range(0, num_players):
if scores[playernum] < lowest_score:
lowest_score = scores[playernum]
index = playernum
#Return results of game
if debug: print("Game over, player" + str(index) + " won")
return scores, index, data | 3.109375 | 3 |
v1/pipe/src/data_pipe_utils.py | elsdes3/bikeshare-dash | 0 | 12759956 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Data pipeline utilities."""
# pylint: disable=invalid-name,dangerous-default-value
# pylint: disable=too-many-arguments
from typing import Dict, List, Tuple
import pandas as pd
import pandera as pa
import prefect
from prefect import task
import src.aggregate_data as ad
import src.city_neighbourhoods as cn
import src.city_pub_data as cpd
import src.trips as bt
from src.process_trips import process_trips_data
from src.stations_metadata import get_stations_metadata, transform_metadata
from src.utils import export_df_to_multiple_csv_files
@task
def get_bikeshare_stations_metadata(
open_tor_data_url: str,
stations_params: Dict[str, str],
stations_cols_wanted: List[str],
) -> pd.DataFrame:
"""Retrieve and process bikeshare stations metadata."""
logger = prefect.context.get("logger")
df = get_stations_metadata(open_tor_data_url, stations_params)
df = transform_metadata(df, stations_cols_wanted)
logger.info(f"Retrieved {len(df):,} rows of bikeshare station metadata.")
return df
@task
def get_bikeshare_trips_data(
trips_data_glob_str: str,
trips_nan_cols: List[str],
trips_duplicated_cols: List[str],
) -> pd.DataFrame:
"""Retrieve and process bikeshare trips data."""
logger = prefect.context.get("logger")
df = bt.load_trips_data(trips_data_glob_str)
df = process_trips_data(df, trips_nan_cols, trips_duplicated_cols)
logger.info(f"Retrieved {len(df):,} rows of bikeshare trips data.")
return df
@task
def get_city_cultural_hotspots_data(
open_tor_data_url: str, ch_params: Dict[str, str]
) -> pd.DataFrame:
"""Retrieve cultural hotspots open dataset."""
logger = prefect.context.get("logger")
df = cpd.get_cultural_hotspots(open_tor_data_url, ch_params)
logger.info(
f"Retrieved {len(df):,} rows of citywide cultural hotspot data."
)
return df
@task
def get_city_points_of_interest_data(
open_tor_data_url: str, poi_params: Dict[str, str]
) -> pd.DataFrame:
"""Retrieve points of interest open dataset."""
logger = prefect.context.get("logger")
df = cpd.get_poi_data(open_tor_data_url, poi_params)
logger.info(
f"Retrieved {len(df):,} rows of citywide points-of-interest data."
)
return df
@task
def get_city_neighbourhood_boundary_data(
open_tor_data_url: str,
neigh_boundary_params: Dict[str, str],
neigh_cols_to_show: List[str],
) -> pd.DataFrame:
"""Retrieve city neighbourhood boundary open dataset."""
logger = prefect.context.get("logger")
gdf = cpd.get_neighbourhood_boundary_land_area_data(
open_tor_data_url, neigh_boundary_params, neigh_cols_to_show
)
logger.info(
f"Retrieved {len(gdf):,} rows of city neighbourhood boundary data."
)
return gdf
@task
def get_city_public_transit_locations_data(
open_tor_data_url: str, pt_params: Dict[str, str]
) -> pd.DataFrame:
"""Retrieve city public transit locations open dataset."""
logger = prefect.context.get("logger")
df = cpd.get_public_transit_locations(open_tor_data_url, pt_params)
logger.info(
f"Retrieved {len(df):,} rows of city public transit location data."
)
return df
@task
def get_city_college_university_locations_data() -> pd.DataFrame:
"""Retrieve city college and university location data."""
logger = prefect.context.get("logger")
df = cpd.get_coll_univ_locations()
logger.info(
f"Retrieved {len(df):,} rows of city college-univ location data."
)
return df
@task
def get_neighbourhood_profile_data(
open_tor_data_url: str, neigh_profile_params: Dict[str, str]
) -> pd.DataFrame:
"""Retrieve city neighbourhood profiles open dataset."""
logger = prefect.context.get("logger")
df = cn.get_neighbourhood_profile_data(
open_tor_data_url, neigh_profile_params
)
logger.info(
f"Retrieved {len(df):,} rows of city neighbourhood profile data."
)
return df
@task(nout=6)
def aggregate_data(
gdf: pd.DataFrame,
df_poi: pd.DataFrame,
dfch_essentials: pd.DataFrame,
df_coll_univ: pd.DataFrame,
df_pt_slice: pd.DataFrame,
df_neigh_demog: pd.DataFrame,
df_stations: pd.DataFrame,
) -> Tuple[pd.DataFrame]:
"""Combine neighbourhood stats and hourly bikeshare trips."""
geo_cols = ["AREA_NAME", "geometry", "Shape__Area"]
logger = prefect.context.get("logger")
# Add neighbourhood to points-of-interest data
df_poi_new = pa.check_io(out=ad.poi_new_schema)(
cn.get_data_with_neighbourhood
)(
gdf[geo_cols],
df_poi.rename(
columns={
"POI_LATITUDE": "lat",
"POI_LONGITUDE": "lon",
}
)[["ID", "NAME", "lat", "lon"]],
"lat",
"lon",
"ID",
use_prefect=True,
)
logger.info("Added neighbourhood to city points-of-interest data")
# Add neighbourhood to cultural hotspots data
dfch_essentials_new = pa.check_output(ad.ch_essentials_new_schema)(
cn.get_data_with_neighbourhood
)(
gdf[geo_cols],
dfch_essentials.rename(
columns={
"POI_LATITUDE": "lat",
"POI_LONGITUDE": "lon",
}
)[["ID", "NAME", "lat", "lon"]],
"lat",
"lon",
"ID",
use_prefect=True,
)
logger.info("Added neighbourhood to city cultural hotspots data")
# Add neighbourhood to college and university location data
df_coll_univ_new = pa.check_output(ad.coll_univ_schema_new)(
cn.get_data_with_neighbourhood
)(
gdf[geo_cols],
df_coll_univ,
"lat",
"lon",
"institution_id",
use_prefect=True,
)
logger.info(
"Added neighbourhood to city college and university locations data"
)
# Add neighbourhood to public transit locations data
df_pt_slice_new = pa.check_output(ad.pub_trans_locations_schema_new)(
cn.get_data_with_neighbourhood
)(
gdf[geo_cols],
df_pt_slice,
"lat",
"lon",
"stop_id",
use_prefect=True,
)
logger.info("Added neighbourhood to city public transit locations data")
# Aggregate above neighbourhood stats and combine with demographics
df_neigh_stats = ad.combine_neigh_stats(
gdf,
df_pt_slice_new,
df_coll_univ_new,
dfch_essentials_new,
df_poi_new,
df_neigh_demog,
)
logger.info("Aggregated statistics per city neighbourhood")
# Add neighbourhood to stations locations
df_stations_new = pa.check_output(ad.stations_schema_merged)(
cn.get_data_with_neighbourhood
)(
gdf[geo_cols],
df_stations,
"lat",
"lon",
"station_id",
use_prefect=True,
)
logger.info("Added stations to bikeshare station metadata")
# Add stations to combined+aggregated neighbourhood stats
df_stations_new = ad.combine_stations_metadata_neighbourhood(
df_stations_new, df_neigh_stats
)
logger.info(
"Combined stats and bikeshare station metadata per neighbourhood"
)
return [
df_poi_new,
dfch_essentials_new,
df_coll_univ_new,
df_pt_slice_new,
df_neigh_stats,
df_stations_new,
]
@task
def combine_trips_neighbourhood_data(
df: pd.DataFrame, cols: List[str], df_stations_new: pd.DataFrame
) -> pd.DataFrame:
"""Combine hourly ridership and neighbourhood aggregated stats."""
logger = prefect.context.get("logger")
df_hour_by_station_merged = ad.combine_hourly_trips_per_station(
df, cols, df_stations_new
)
logger.info(
"Created aggregation of hourly trips per station with "
"neighbourhood stats"
)
return df_hour_by_station_merged
@task
def export_aggregated_data_multiple_csvs(
df: pd.DataFrame,
cols_to_export: List[str],
nrows_per_staged_csv_file: int,
) -> None:
"""Split a single DataFrame into multiple CSV files."""
pa.check_input(ad.hourly_trips_by_station_merged_schema)(
export_df_to_multiple_csv_files
)(
df,
cols_to_export,
"local_stage",
nrows_per_staged_csv_file,
use_prefect=True,
)
| 2.390625 | 2 |
src/process_bill_cycle.py | sekka1/kubernetes-cost-attribution | 4 | 12759957 | <filename>src/process_bill_cycle.py
from src import logger_config
from src import prometheus_data
from src import find
from src import cost_assumptions
from src import calculate_cost
from time import sleep
logger = logger_config.logger
def process(promehtheus_info_dict, start_time, end_time):
"""
Process the billing cycle for this minute
Returns a list including all of the detailed billing info for this cycle
"""
# Dict holding the current billing cycles information
current_billing_cycle_list = []
# Dict holding the current period's billing information
kube_pod_info_dict = {}
kube_node_labels_dict = {}
kube_pod_container_resource_limits_cpu_cores_dict = {}
kube_pod_container_resource_limits_memory_bytes_dict = {}
# Getting the values
kube_pod_info_dict = prometheus_data.get_kube_pod_info_dict(promehtheus_info_dict, start_time, end_time)
while kube_pod_info_dict == []:
sleep(1)
kube_pod_info_dict = prometheus_data.get_kube_pod_info_dict(promehtheus_info_dict, start_time, end_time)
sleep(1)
kube_node_labels_dict = prometheus_data.get_kube_node_labels_dict(promehtheus_info_dict, start_time, end_time)
while kube_node_labels_dict == []:
sleep(1)
kube_node_labels_dict = prometheus_data.get_kube_node_labels_dict(promehtheus_info_dict, start_time, end_time)
sleep(0.5)
kube_pod_container_resource_limits_cpu_cores_dict = prometheus_data.get_kube_pod_container_resource_limits_cpu_cores_dict(promehtheus_info_dict, start_time, end_time)
while kube_pod_container_resource_limits_cpu_cores_dict == []:
sleep(1)
kube_pod_container_resource_limits_cpu_cores_dict = prometheus_data.get_kube_pod_container_resource_limits_cpu_cores_dict(promehtheus_info_dict, start_time, end_time)
sleep(0.5)
kube_pod_container_resource_limits_memory_bytes_dict = prometheus_data.get_kube_pod_container_resource_limits_memory_bytes_dict(promehtheus_info_dict, start_time, end_time)
while kube_pod_container_resource_limits_memory_bytes_dict == []:
sleep(1)
kube_pod_container_resource_limits_memory_bytes_dict = prometheus_data.get_kube_pod_container_resource_limits_memory_bytes_dict(promehtheus_info_dict, start_time, end_time)
# Get cost assumption file(s)
cost_assumptions_dict = cost_assumptions.get()
#print(cost_assumptions_dict)
#
# Loop through the list of pods and calculate how much each pod cost per min
#
# Everytime we touch anything in this loop we have to verify the numbers match up to
# what is calculated in the purple top right section of this spread sheet:
# https://docs.google.com/spreadsheets/d/1r05JBmegiQ9LiFy9nHixmd2PdFSKp6Bi_a6O-xfRcRw/edit#gid=0
#
for pod_row in kube_pod_info_dict:
print("xxxxxxxxxxxxxxxxxxxx")
print(pod_row)
print("xxxxxxxxxxxxxxxxxxxxx")
if 'node' in pod_row['metric'] and 'exported_namespace' in pod_row['metric'] and 'pod' in pod_row['metric']:
exported_namespace = pod_row['metric']['exported_namespace']
node = pod_row['metric']['node']
pod = pod_row['metric']['pod']
if (node != ''):
logger.info("exported_namespace - "+exported_namespace)
logger.info("node - "+node)
logger.info("pod - "+pod)
# Get cpu core limit
cpu_core_limit = find.pods_resource_limits_cpu_cores(kube_pod_container_resource_limits_cpu_cores_dict,
exported_namespace,
node,
pod)
# Get memory limit bytes
memory_bytes_limit = find.pods_resource_limits_memory_bytes(kube_pod_container_resource_limits_memory_bytes_dict,
exported_namespace,
node,
pod)
# Get machine info dict
machine_info_dict = find.machine_info_by_hostname(kube_node_labels_dict, node)
print("============")
print(node)
print(kube_node_labels_dict)
print(machine_info_dict)
print("============")
cost_assumptions_memory_percentage = 0.5
cost_assumptions_cpu_percentage = 0.5
markup = 0
if machine_info_dict != None:
machine_spot_or_on_demand = None
if machine_info_dict['isSpot'] == "true":
machine_spot_or_on_demand = 'spot'
else:
machine_spot_or_on_demand = 'on_demand'
logger.info("cpu core limit: "+str(cpu_core_limit))
logger.info("memory bytes limit: "+str(memory_bytes_limit))
logger.info("machine_spot_or_on_demand: "+machine_spot_or_on_demand)
logger.info("machine type: "+machine_info_dict['instance_type'])
logger.info("machine hourly cost: "+str(cost_assumptions_dict['ec2_info'][machine_info_dict['instance_type']]['hourly_cost'][machine_spot_or_on_demand]))
logger.info("cost_assumptions_dict memory_percentage: "+str(cost_assumptions_memory_percentage))
logger.info("cost_assumptions_dict cpu percentage: "+str(cost_assumptions_cpu_percentage))
logger.info("machine mark up: "+str(markup))
logger.info("ec2 Machine total memory: "+str(cost_assumptions_dict['ec2_info'][machine_info_dict['instance_type']]['memory']))
logger.info("ec2 Machine total cpu: "+str(cost_assumptions_dict['ec2_info'][machine_info_dict['instance_type']]['cpu']))
current_pod_info = {
'namespace': exported_namespace,
'start_time': start_time,
'end_time': end_time,
'node': node,
'pod': pod,
'memory_bytes_limit': memory_bytes_limit,
'cpu_core_limit': cpu_core_limit,
'machine_spot_or_on_demand': machine_spot_or_on_demand,
'instance_type': machine_info_dict['instance_type'],
'instance_hourly_cost': cost_assumptions_dict['ec2_info'][machine_info_dict['instance_type']]['hourly_cost'][machine_spot_or_on_demand],
'cost_assumptions_memory_percentage': cost_assumptions_memory_percentage,
'cost_assumptions_cpu_percentage': cost_assumptions_cpu_percentage,
'instance_markup': markup,
'instance_total_memory': cost_assumptions_dict['ec2_info'][machine_info_dict['instance_type']]['memory'],
'instance_total_cpu': cost_assumptions_dict['ec2_info'][machine_info_dict['instance_type']]['cpu']
}
cost_per_min_dict = calculate_cost.get_cost_per_min(
current_pod_info['cost_assumptions_memory_percentage'],
current_pod_info['cost_assumptions_cpu_percentage'],
current_pod_info['instance_hourly_cost'],
current_pod_info['instance_markup'],
current_pod_info['instance_total_memory'],
current_pod_info['instance_total_cpu'],
current_pod_info['memory_bytes_limit'],
current_pod_info['cpu_core_limit']
)
logger.info("cost_per_min_dict - total: "+str(cost_per_min_dict['total']))
logger.info("cost_per_min_dict - memory: "+str(cost_per_min_dict['memory']))
logger.info("cost_per_min_dict - cpu: "+str(cost_per_min_dict['cpu']))
# Adding the calculated cost into the dict
current_pod_info['cost_per_min_total'] = cost_per_min_dict['total']
current_pod_info['cost_per_min_memory'] = cost_per_min_dict['memory']
current_pod_info['cost_per_min_cpu'] = cost_per_min_dict['cpu']
current_billing_cycle_list.append(current_pod_info)
logger.info(current_pod_info)
logger.info("###################################################################")
else:
logger.warning("Did not find the node in "+node+" in find.machine_info_by_hostname")
else:
logger.warning("Did not find node, exported_namespace, or pod in the dict: pod_row")
return current_billing_cycle_list
| 2.65625 | 3 |
Recursion/Aditya_Verma/Hypothesis_Method/Sort_A_Stack.py | prash-kr-meena/GoogleR | 0 | 12759958 | # For using stack in python
# https://www.geeksforgeeks.org/stack-in-python/
# https://www.youtube.com/watch?v=zwb3GmNAtFk&ab_channel=codebasics << NOTE good resource
from Abstract_Data_Type.StackADT import StackADT
def insert_into_sorted_stack(stack, element) -> None:
# Edge case
if stack is None:
raise ValueError("Invalid Stack")
# Base case
if stack.size() == 0 or stack.peek() <= element:
stack.push(element)
return
top_element = stack.pop()
insert_into_sorted_stack(stack, element) # Hypothesis
stack.push(top_element)
# No induction Step
def sort_stack(stack) -> None:
# Edge case
if stack is None:
raise ValueError("Invalid Stack")
# Base case
if stack.size() == 0 or stack.size() == 1: # Already sorted
return
top_element = stack.pop()
sort_stack(stack) # stack has been reduced
insert_into_sorted_stack(stack, top_element)
if __name__ == "__main__":
s = StackADT([5, 1, 0, 2])
s.print() # unsorted stack
sort_stack(s)
s.print() # sorted stack
| 4.21875 | 4 |
gface/learning/learn_gface.py | scpepper69/ml | 0 | 12759959 | <filename>gface/learning/learn_gface.py
import os
os.environ["KERAS_BACKEND"] = "tensorflow"
kerasBKED = os.environ["KERAS_BACKEND"]
print(kerasBKED)
import numpy as np
import scipy.misc
import tensorflow as tf
from tensorflow.python.framework import graph_util
from tensorflow.python.framework import graph_io
from tensorflow.python import keras
from tensorflow.python.keras import backend as K
from tensorflow.python.keras import initializers
from tensorflow.python.keras.layers import Conv2D, Convolution2D, MaxPooling2D
from tensorflow.python.keras.layers import Dense, Dropout, Activation, Flatten
from tensorflow.python.keras.models import Sequential
from tensorflow.python.keras.preprocessing.image import ImageDataGenerator
from tensorflow.python.keras.utils import np_utils
from sklearn.model_selection import train_test_split
nb_classes = 3 #class count
# input image dimensions
img_rows, img_cols = 128, 128 # image size
# img_rows, img_cols = 127, 128
ary = np.load("gface.npz")['arr_0'].reshape([-1, 64, 64, 3]).astype(np.float32) / 15
X_train = np.zeros([nb_classes * 20, img_rows, img_cols, 3], dtype=np.float32)
for i in range(nb_classes * 20):
X_train[i] = scipy.misc.imresize(ary[i], (img_rows, img_cols, 3), mode=None)
# X_train[i] = ary[i]
Y_train = np.repeat(np.arange(nb_classes), 20)
X_train, X_test, Y_train, Y_test = train_test_split(X_train, Y_train, test_size=0.2)
X_train = X_train.reshape(X_train.shape[0], img_rows, img_cols, 3)
X_test = X_test.reshape(X_test.shape[0], img_rows, img_cols, 3)
input_shape = (img_rows, img_cols, 3)
# convert class vectors to binary class matrices
Y_train = np_utils.to_categorical(Y_train, nb_classes)
Y_test = np_utils.to_categorical(Y_test, nb_classes)
datagen = ImageDataGenerator(rotation_range=15, zoom_range=0.20)
datagen.fit(X_train)
model = Sequential()
def my_init(shape, dtype=None):
# return K.random_normal(shape, dtype=dtype)
return initializers.TruncatedNormal(shape, scale=0.1, name=name)
# return initializations.normal(shape, scale=0.1, name=name)
# Best val_loss: 0.0205 - val_acc: 0.9978 (just tried only once)
# 30 minutes on Amazon EC2 g2.2xlarge (NVIDIA GRID K520)
def m6_1():
# model.add(Conv2D(32, (3, 3), kernel_initializer=my_init, input_shape=input_shape))
model.add(Conv2D(32, (3, 3), kernel_initializer=initializers.TruncatedNormal(stddev=0.1), input_shape=input_shape))
model.add(Activation('relu'))
model.add(Conv2D(32, (3, 3), kernel_initializer=initializers.TruncatedNormal(stddev=0.1)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.5))
model.add(Conv2D(64, (3, 3), kernel_initializer=initializers.TruncatedNormal(stddev=0.1)))
model.add(Activation('relu'))
model.add(Conv2D(64, (3, 3), kernel_initializer=initializers.TruncatedNormal(stddev=0.1)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.5))
model.add(Flatten())
model.add(Dense(256, kernel_initializer=initializers.TruncatedNormal(stddev=0.1)))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(nb_classes))
model.add(Activation('softmax'))
def classic_neural():
model.add(Flatten(input_shape=input_shape))
model.add(Dense(256))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(nb_classes))
model.add(Activation('softmax'))
m6_1()
# classic_neural()
model.summary()
model.compile(loss='categorical_crossentropy', optimizer='adadelta', metrics=['accuracy'])
model.fit_generator(datagen.flow(X_train, Y_train, batch_size=16), steps_per_epoch=X_train.shape[0],epochs=20, validation_data=(X_test, Y_test))
#epochs=400
# additional
score = model.evaluate(X_train, Y_train, verbose=0)
print('Test loss:', score[0])
print('Test accuracy:', score[1])
ksess = K.get_session()
print(ksess)
K.set_learning_phase(0)
graph = ksess.graph
kgraph = graph.as_graph_def()
print(kgraph)
# define output node
num_output = 1
prefix = "output"
pred = [None]*num_output
outputName = [None]*num_output
for i in range(num_output):
outputName[i] = prefix + str(i)
pred[i] = tf.identity(model.get_output_at(i), name=outputName[i])
print('output name: ', outputName)
# convert variables in the model graph to constants
constant_graph = graph_util.convert_variables_to_constants(ksess, ksess.graph.as_graph_def(), outputName)
output_dir = "./"
output_graph_name = "gface.pb"
output_text_name = "gface.txt"
graph_io.write_graph(constant_graph, output_dir, output_graph_name, as_text=False)
graph_io.write_graph(constant_graph, output_dir, output_text_name, as_text=True)
print('saved graph .pb at: {0}\nsaved graph .txt at: {1}'.format(
os.path.join(output_dir, output_graph_name),
os.path.join(output_dir, output_text_name)))
| 2.484375 | 2 |
applications/GeoMechanicsApplication/python_scripts/gap_closure_interface_activation_process.py | lkusch/Kratos | 778 | 12759960 | import KratosMultiphysics
import KratosMultiphysics.GeoMechanicsApplication as KratosGeo
def Factory(settings, Model):
if(type(settings) != KratosMultiphysics.Parameters):
raise Exception("expected input shall be a Parameters object, encapsulating a json string")
return GapClosureInterfaceActivationProcess(Model, settings["Parameters"])
## All the python processes should be derived from "python_process"
class GapClosureInterfaceActivationProcess(KratosMultiphysics.Process):
def __init__(self, Model, settings ):
KratosMultiphysics.Process.__init__(self)
model_part = Model[settings["model_part_name"].GetString()]
params = KratosMultiphysics.Parameters("{}")
params.AddValue("model_part_name",settings["model_part_name"])
params.AddValue("gap_width_threshold",settings["gap_width_threshold"])
params.AddValue("consider_gap_closure",settings["consider_gap_closure"])
self.process = KratosGeo.GapClosureInterfaceProcess(model_part, params)
def ExecuteInitialize(self):
self.process.ExecuteInitialize()
def ExecuteInitializeSolutionStep(self):
self.process.ExecuteInitializeSolutionStep()
def ExecuteFinalizeSolutionStep(self):
self.process.ExecuteFinalizeSolutionStep()
def ExecuteFinalize(self):
self.process.ExecuteFinalize()
| 2.265625 | 2 |
examples/robodk/simulation.py | StrayRobots/stray | 1 | 12759961 | import numpy as np
import robodk
import time
import queue
from scipy.spatial.transform import Rotation, Slerp
from PIL import Image
from robolink import *
from matplotlib import pyplot as plt
import multiprocessing
from constants import BELT_VELOCITY
BOX_RANDOM_ANGLE = np.pi / 8.0
BOX_X_RANDOM = 50.0
GRAVITY = -9.81
class SimulationLoop:
CONVEYOR_BELT_END = 100.0
def __init__(self, queue, lock):
self.sleep_for = 1.0 / 60.0
self.link = Robolink()
self.box_velocity = np.array([0.0, -BELT_VELOCITY, 0.0])
self.paused = False
self.done = False
self.previous_sim_time = None
self.queue = queue
self.box = self.link.Item('Box')
self.write_lock = lock
def run(self):
self.link.setSimulationSpeed(1.0)
self.previous_sim_time = self.link.SimulationTime()
while not self.done:
self._read_queue()
if self.paused:
time.sleep(0.05)
continue
self._step_simulation()
time.sleep(self.sleep_for)
def _read_queue(self):
try:
msg = self.queue.get(False)
try:
self.write_lock.acquire()
getattr(self, msg[0])(*msg[1:])
finally:
self.write_lock.release()
except queue.Empty:
pass
def _step_simulation(self):
current_time = self.link.SimulationTime()
diff = current_time - self.previous_sim_time
try:
self.write_lock.acquire()
self.previous_sim_time = current_time
if self.box.Parent().Name() != 'picking_setup':
# Box is in the robot's hand. Don't do anything.
return
current_pose = np.array(self.box.Pose().Rows())
if current_pose[1, 3] < self.CONVEYOR_BELT_END:
self.reset_box()
return
if self.box.Parent().Name() == "picking_setup":
# On conveyor belt. Let's move it.
current_pose[:3, 3] += diff * self.box_velocity * 1000.0 # Pose is in millimeters.
if current_pose[2, 3] > 5.0:
z = current_pose[2, 3]
current_pose[2, 3] = max(0.0, z + diff * GRAVITY * 1000.0)
self.box.setPose(robodk.Mat(current_pose.tolist()))
finally:
self.write_lock.release()
def reset_box(self):
gripper = self.link.Item('Gripper')
gripper.DetachAll()
try:
box = self.link.Item('Box')
if box.Name() == "Box":
box.Delete()
except Exception as e:
print(e)
box_template = self.link.Item('BoxTemplate')
box_template.Copy()
self.box = self.link.Paste(self.link.Item('picking_setup'))
self.box.setName("Box")
self.box.setParent(self.link.Item('picking_setup'))
box_pose = np.array(self.box.Pose().Rows())
box_pose[:3, :3] = Rotation.from_rotvec([0.0, 0.0,
-np.pi / 2.0 + np.random.uniform(-BOX_RANDOM_ANGLE, BOX_RANDOM_ANGLE)
]).as_matrix()
box_pose[0, 3] = 200.0 + np.random.uniform(-BOX_X_RANDOM, BOX_X_RANDOM)
box_pose[1, 3] = 1800.0
box_pose[2, 3] = 0.0
self.box.setPose(robodk.Mat(box_pose.tolist()))
self.box.Scale(np.random.uniform(np.array([0.7, 0.7, 0.1]), np.ones(3)).tolist())
def pause(self, value):
self.paused = value
if not self.paused:
self.previous_sim_time = self.link.SimulationTime()
def close(self):
self.done = True
def simulation_loop(queue, lock):
loop = SimulationLoop(queue, lock).run()
class Simulation:
def __init__(self):
self.queue = multiprocessing.Queue()
self.write_lock = multiprocessing.Lock()
self.background_thread = multiprocessing.Process(target=simulation_loop, args=(self.queue, self.write_lock), daemon=True)
self.background_thread.start()
def reset_box(self):
self.queue.put(('reset_box',))
def pause(self, value):
self.queue.put(('pause', value))
def close(self):
self.queue.put(('close',))
| 2.5625 | 3 |
Square/Hotfire.py | kevin200617/U-Design-Summer-Camp-Code | 0 | 12759962 | <filename>Square/Hotfire.py
#!/usr/bin/env python3
from ev3dev2.sound import Sound
sound = Sound()
# Play a 200Hz tone for 2 seconds and then wait 0.4 seconds
# before playing the next tone, 800Hz for 1 second
# followed by a 3 second delay
sound.tone([(200, 2000, 400),(800, 1000, 3000)]) | 2.703125 | 3 |
mod_pbxproj/__init__.py | crankycoder/mod_pbxproj | 13 | 12759963 | from mod_pbxproj import XcodeProject
| 1 | 1 |
src/process-data/tsdbquery/__init__.py | clamm/spark-location-history | 15 | 12759964 | __author__ = 'cindylamm'
| 0.902344 | 1 |
other/nested_brackets.py | tg12/Python | 0 | 12759965 | '''THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND
NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR ANYONE
DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR OTHER LIABILITY,
WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.'''
# Bitcoin Cash (BCH) qpz32c4lg7x7lnk9jg6qg7s4uavdce89myax5v5nuk
# Ether (ETH) - 0x843d3DEC2A4705BD4f45F674F641cE2D0022c9FB
# Litecoin (LTC) - Lfk5y4F7KZa9oRxpazETwjQnHszEPvqPvu
# Bitcoin (BTC) - 34L8qWiQyKr8k4TnHDacfjbaSqQASbBtTd
# contact :- <EMAIL>
"""
The nested brackets problem is a problem that determines if a sequence of
brackets are properly nested. A sequence of brackets s is considered properly nested
if any of the following conditions are true:
- s is empty
- s has the form (U) or [U] or {U} where U is a properly nested string
- s has the form VW where V and W are properly nested strings
For example, the string "()()[()]" is properly nested but "[(()]" is not.
The function called is_balanced takes as input a string S which is a sequence of brackets and
returns true if S is nested and false otherwise.
"""
def is_balanced(S):
stack = []
open_brackets = set({"(", "[", "{"})
closed_brackets = set({")", "]", "}"})
open_to_closed = dict({"{": "}", "[": "]", "(": ")"})
for i in range(len(S)):
if S[i] in open_brackets:
stack.append(S[i])
elif S[i] in closed_brackets:
if len(stack) == 0 or (
len(stack) > 0 and open_to_closed[stack.pop()] != S[i]
):
return False
return len(stack) == 0
def main():
S = input("Enter sequence of brackets: ")
if is_balanced(S):
print((S, "is balanced"))
else:
print((S, "is not balanced"))
if __name__ == "__main__":
main()
| 2.859375 | 3 |
yaml_output_converter.py | ThePhar/AsyncTools | 0 | 12759966 | <gh_stars>0
"""This file was originally made as a quick & dirty helper tool, not meant for public eyes.
If you like good code, avert your eyes."""
import csv
import string
import yaml
import itertools
import sys
with open(sys.argv[1], 'r') as f:
mystery = yaml.unsafe_load(f.read())
shuffles = {"bigkey_shuffle": "b", "compass_shuffle": "c", "map_shuffle": "m"}
ignored = {"open_pyramid"}
always = {"game", "accessibility", "progression_balancing"}
rows = []
players = list(mystery["name"].values())
player_games = list(mystery["game"].values())
START = 1
for START, player in enumerate(players, 1):
if player.startswith("Player") and all(c in string.digits for c in player.lstrip("Player")):
print("First generic player:", player, START)
break
print(player)
END = len(players) + 1
print(START, END)
print("unnamed players follow")
for p in range(START, END):
row = {"keysanity": ""}
for key, value in [(k, v) for k, v in mystery.items() if isinstance(v, dict)]:
if not key in ignored:
if p not in value:
row[key] = None
continue
pvalue = value[p]
try:
pkey = getattr(pvalue, "current_key", pvalue)
except:
pass
if key == "smallkey_shuffle":
if pkey == "universal":
row["keysanity"] += "u"
elif pkey != "original_dungeon":
row["keysanity"] += "s"
elif key in shuffles:
if pkey != "original_dungeon":
row["keysanity"] += shuffles[key]
else:
row[key] = pvalue
row["keysanity"] = "".join(sorted(row["keysanity"]))
rows.append(row)
def get_option_name(option):
getter = getattr(option, "get_current_option_name", None)
if getter:
return getter()
else:
return option
def get_option_header(option, key):
getter = getattr(option, "display_name", None)
if getter:
return getter
else:
return key
# filter identical options out
games = set(row["game"] for row in rows)
print("Games", games)
for game in games:
remove = set()
game_rows = [row for row in rows if row["game"] == game]
for key, value in next(iter(game_rows)).items():
if all(row[key] == value for row in game_rows):
remove.add(key)
remove -= {"game"}
for key in remove:
for row in game_rows:
del row[key]
with open('mystery_result.csv', 'w') as mysterycsv:
rows = [{get_option_header(data, key): get_option_name(data) for key, data in row.items()} for row in rows]
fieldnames = set(itertools.chain.from_iterable(((key for key, value in dictionary.items() if value not in {"", None})
for dictionary in rows)))
fieldnames -= {"game"}
fieldnames = sorted(fieldnames)
fieldnames = ["game"] + fieldnames
writer = csv.DictWriter(mysterycsv, fieldnames=fieldnames, lineterminator='\n')
writer.writeheader()
for row in rows:
writer.writerow({key: value for key, value in row.items() if key in fieldnames})
with open("mystery_players.csv", 'w') as mysterycsv:
writer = csv.DictWriter(mysterycsv, fieldnames=["Slot", "Name", "Game"], lineterminator='\n')
writer.writeheader()
for i, name in enumerate(players, 1):
writer.writerow({"Slot": i, "Name": name, "Game": player_games[i-1]})
print("Done")
| 2.75 | 3 |
test/functional/tl_vesting.py | sinetek/BlockPo-to-Tradelayer | 0 | 12759967 | #!/usr/bin/env python3
# Copyright (c) 2015-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test Vesting tokens."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
import os
import json
import math
import http.client
import urllib.parse
class VestingBasicsTest (BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
self.extra_args = [["-txindex=1"]]
def setup_chain(self):
super().setup_chain()
#Append rpcauth to bitcoin.conf before initialization
rpcauth = "rpcauth=rt:93648e835a54c573682c2eb19f882535$7681e9c5b74bdd85e78166031d2058e1069b3ed7ed967c93fc63abba06f31144"
rpcuser = "rpcuser=rpcuser💻"
rpcpassword = "<PASSWORD>password=<PASSWORD>🔑"
with open(os.path.join(self.options.tmpdir+"/node0", "litecoin.conf"), 'a', encoding='utf8') as f:
f.write(rpcauth+"\n")
def run_test(self):
self.log.info("Preparing the workspace...")
# mining 1000 blocks, total budget: 14949.77187643 LTC
for i in range(0,2):
self.nodes[0].generate(500)
blocks = 500*(i+1)
self.log.info(str(blocks)+" blocks mined...")
################################################################################
# Checking RPC tl_sendvesting and related (starting in block 1000) #
################################################################################
url = urllib.parse.urlparse(self.nodes[0].url)
#Old authpair
authpair = url.username + ':' + url.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
addresses = []
accounts = ["john", "doe", "another", "mark", "tango"]
# for graphs for addresses[1]
vested = []
unvested = []
volume_ltc = []
#vested ALL for addresses[4]
bvested = []
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
self.log.info("watching LTC general balance")
params = str([""]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "getbalance",params)
self.log.info(out)
assert_equal(out['error'], None)
adminAddress = 'QgKxFUBgR8y4xFy3s9ybpbDvYNKr4HTKPb'
privkey = '<KEY>'
self.log.info("importing admin address")
params = str([privkey]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, True, "importprivkey",params)
# self.log.info(out)
assert_equal(out['error'], None)
self.log.info("watching private key of admin address")
params = str([adminAddress]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "dumpprivkey",params)
# self.log.info(out)
assert_equal(out['error'], None)
self.log.info("Creating addresses")
addresses = tradelayer_createAddresses(accounts, conn, headers)
addresses.append(adminAddress)
# self.log.info(addresses)
self.log.info("Funding addresses with LTC")
amount = 5
tradelayer_fundingAddresses(addresses, amount, conn, headers)
self.nodes[0].generate(1)
self.log.info("Checking the LTC balance in every account")
tradelayer_checkingBalance(accounts, amount, conn, headers)
self.log.info("Funding addresses[3] with 12000 LTC")
amount = 2000
params = str([addresses[3], amount]).replace("'",'"')
for i in range(0,6):
out = tradelayer_HTTP(conn, headers, False, "sendtoaddress",params)
# self.log.info(out)
assert_equal(out['error'], None)
self.nodes[0].generate(1)
self.log.info("Creating new tokens (sendissuancefixed)")
array = [0]
params = str([addresses[2],2,0,"lihki","","","90000000",array]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, True, "tl_sendissuancefixed",params)
# self.log.info(out)
self.log.info("Self Attestation for addresses")
tradelayer_selfAttestation(addresses,conn, headers)
self.log.info("Checking attestations")
out = tradelayer_HTTP(conn, headers, False, "tl_list_attestation")
# self.log.info(out)
result = []
registers = out['result']
for addr in addresses:
for i in registers:
if i['att sender'] == addr and i['att receiver'] == addr and i['kyc_id'] == 0:
result.append(True)
assert_equal(result, [True, True, True, True, True, True])
self.log.info("Checking vesting tokens property")
params = str([3])
out = tradelayer_HTTP(conn, headers, True, "tl_getproperty",params)
# self.log.info(out)
assert_equal(out['result']['propertyid'],3)
assert_equal(out['result']['name'],'Vesting Tokens')
assert_equal(out['result']['data'],'Divisible Tokens')
assert_equal(out['result']['url'],'www.tradelayer.org')
assert_equal(out['result']['divisible'],True)
assert_equal(out['result']['totaltokens'],'1500000.00000000')
self.log.info("Checking the property")
params = str([4])
out = tradelayer_HTTP(conn, headers, True, "tl_getproperty",params)
assert_equal(out['error'], None)
# self.log.info(out)
assert_equal(out['result']['propertyid'],4)
assert_equal(out['result']['name'],'lihki')
assert_equal(out['result']['data'],'')
assert_equal(out['result']['url'],'')
assert_equal(out['result']['divisible'],True)
assert_equal(out['result']['totaltokens'],'90000000.00000000')
self.log.info("sendvesting from adminAddress to first address")
params = str([adminAddress, addresses[0], "2000"]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "tl_sendvesting",params)
# self.log.info(out)
assert_equal(out['error'], None)
self.nodes[0].generate(1)
self.log.info("Checking tokens in receiver address")
params = str([addresses[0], 3]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, True, "tl_getbalance",params)
# self.log.info(out)
assert_equal(out['error'], None)
assert_equal(out['result']['balance'],'2000.00000000')
assert_equal(out['result']['reserve'],'0.00000000')
self.log.info("Checking unvested ALLs ")
params = str([addresses[0]]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "tl_getunvested",params)
# self.log.info(out)
assert_equal(out['result']['unvested'],'2000.00000000')
self.log.info("Checking the time lock of one year")
self.log.info("sendvesting from first to second address")
params = str([addresses[0], addresses[1], "1000"]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "tl_sendvesting",params)
# self.log.info(out)
assert_equal(out['error'], None)
self.nodes[0].generate(1)
self.log.info("Checking tokens in receiver address")
params = str([addresses[1], 3]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "tl_getbalance",params)
# self.log.info(out)
assert_equal(out['error'], None)
assert_equal(out['result']['balance'],'0.00000000')
assert_equal(out['result']['reserve'],'0.00000000')
self.log.info("Checking unvested ALLs ")
params = str([addresses[1]]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "tl_getunvested",params)
# self.log.info(out)
assert_equal(out['result']['unvested'],'0.00000000')
out = tradelayer_HTTP(conn, headers, True, "tl_getinfo")
block = out['result']['block']
self.log.info("block height :"+str(block))
self.log.info("Waiting for one year")
for i in range(20):
self.nodes[0].generate(1)
self.log.info("sendvesting from first to second address, again")
params = str([addresses[0], addresses[1], "1000"]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "tl_sendvesting",params)
# self.log.info(out)
assert_equal(out['error'], None)
self.nodes[0].generate(1)
self.log.info("sendvesting from first to 5th addresses")
params = str([addresses[0], addresses[4], "500"]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "tl_sendvesting",params)
# self.log.info(out)
assert_equal(out['error'], None)
self.nodes[0].generate(1)
self.log.info("Restarting for the node, in order to test persistence")
self.restart_node(0) #stop and start
url = urllib.parse.urlparse(self.nodes[0].url)
#New authpair
authpair = url.username + ':' + url.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
self.log.info("Checking tokens in receiver addresses")
params = str([addresses[1], 3]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "tl_getbalance",params)
# self.log.info(out)
assert_equal(out['error'], None)
assert_equal(out['result']['balance'],'1000.00000000')
assert_equal(out['result']['reserve'],'0.00000000')
params = str([addresses[4], 3]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "tl_getbalance",params)
# self.log.info(out)
assert_equal(out['error'], None)
assert_equal(out['result']['balance'],'500.00000000')
assert_equal(out['result']['reserve'],'0.00000000')
self.log.info("Checking unvested ALLs ")
params = str([addresses[1]]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "tl_getunvested",params)
# self.log.info(out)
assert_equal(out['result']['unvested'],'1000.00000000')
params = str([addresses[4]]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "tl_getunvested",params)
# self.log.info(out)
assert_equal(out['result']['unvested'],'500.00000000')
# 200 LTC implies release 7.5% of ALLs from unvested to balance
# NOTE: In regtest 200 LTC volume is equivalent to 20000 (x100) LTCs in testnet or mainnet
self.log.info("Creating LTC volume in DEx")
self.log.info("Sending a DEx sell tokens offer")
params = str([addresses[2], 4, "1000", "200", 250, "0.00001", "2", 1]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, True, "tl_senddexoffer",params)
assert_equal(out['error'], None)
# self.log.info(out)
self.nodes[0].generate(1)
self.log.info("Checking the offer in DEx")
params = str([addresses[2]]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, True, "tl_getactivedexsells",params)
# self.log.info(out)
assert_equal(out['error'], None)
assert_equal(out['result'][0]['propertyid'], 4)
assert_equal(out['result'][0]['action'], 2)
assert_equal(out['result'][0]['seller'], addresses[2])
assert_equal(out['result'][0]['ltcsdesired'], '200.00000000')
assert_equal(out['result'][0]['amountavailable'], '1000.00000000')
assert_equal(out['result'][0]['unitprice'], '0.20000000')
assert_equal(out['result'][0]['minimumfee'], '0.00001000')
self.log.info("Accepting the full offer")
params = str([addresses[3], addresses[2], 4, "1000"]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, True, "tl_senddexaccept",params)
assert_equal(out['error'], None)
# self.log.info(out)
self.nodes[0].generate(1)
self.log.info("Checking the offer status")
params = str([addresses[2]]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, True, "tl_getactivedexsells",params)
# self.log.info(out)
assert_equal(out['error'], None)
assert_equal(out['result'][0]['propertyid'], 4)
assert_equal(out['result'][0]['action'], 2)
assert_equal(out['result'][0]['seller'], addresses[2])
assert_equal(out['result'][0]['ltcsdesired'], '0.00000000')
assert_equal(out['result'][0]['amountavailable'], '0.00000000')
assert_equal(out['result'][0]['unitprice'], '0.20000000')
assert_equal(out['result'][0]['minimumfee'], '0.00001000')
assert_equal(out['result'][0]['accepts'][0]['buyer'], addresses[3])
assert_equal(out['result'][0]['accepts'][0]['amountdesired'], '1000.00000000')
assert_equal(out['result'][0]['accepts'][0]['ltcstopay'], '200.00000000')
self.log.info("Paying the tokens")
params = str([addresses[3], addresses[2], "200"]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, True, "tl_send_dex_payment",params)
# self.log.info(out)
self.nodes[0].generate(1)
self.log.info("Checking token balance in buyer address")
params = str([addresses[3], 4]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, True, "tl_getbalance",params)
# self.log.info(out)
assert_equal(out['error'], None)
assert_equal(out['result']['balance'], '1000.00000000')
assert_equal(out['result']['reserve'],'0.00000000')
self.log.info("Checking LTC Volume")
params = str([4, 1, 3000]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, True, "tl_get_ltcvolume",params)
# self.log.info(out)
assert_equal(out['error'], None)
assert_equal(out['result']['volume'], '200.00000000')
volume0 = float(out['result']['volume'])
self.nodes[0].generate(1)
self.log.info("Checking vesting in related address")
params = str([addresses[1], 1]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "tl_getbalance",params)
# self.log.info(out)
assert_equal(out['error'], None)
assert_equal(out['result']['balance'],'75.25749000') # 7.5% of vesting (NOTE: check the round up)
assert_equal(out['result']['reserve'],'0.00000000')
vested0 = float(out['result']['balance'])
params = str([addresses[4], 1]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "tl_getbalance",params)
# self.log.info(out)
assert_equal(out['error'], None)
# assert_equal(out['result']['balance'],'75.25749000') # 7.5% of vesting (NOTE: check the round up)
assert_equal(out['result']['reserve'],'0.00000000')
vested1 = float(out['result']['balance'])
bvested.append(vested1)
self.log.info("Checking unvested ALLs ")
params = str([addresses[1]]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "tl_getunvested",params)
# self.log.info(out)
assert_equal(out['result']['unvested'],'924.74251000')
unvested0 = float(out['result']['unvested'])
volume_ltc.append(volume0)
vested.append(vested0)
unvested.append(unvested0)
self.log.info("Checking vesting info")
out = tradelayer_HTTP(conn, headers, False, "tl_getvesting_info",params)
# self.log.info(out)
assert_equal(out['error'], None)
assert_equal(out['result']['propertyid'], 3)
assert_equal(out['result']['name'], 'Vesting Tokens')
assert_equal(out['result']['data'], 'Divisible Tokens')
assert_equal(out['result']['url'], 'www.tradelayer.org')
assert_equal(out['result']['divisible'], True)
assert_equal(out['result']['issuer'], 'QgKxFUBgR8y4xFy3s9ybpbDvYNKr4HTKPb')
assert_equal(out['result']['activation block'], 100)
assert_equal(out['result']['litecoin volume'], '200.00000000')
assert_equal(out['result']['vested percentage'], '7.52574900')
assert_equal(out['result']['last vesting block'], 1037)
assert_equal(out['result']['total vested'], '150.51498000')
assert_equal(out['result']['owners'], 3)
assert_equal(out['result']['total tokens'], '1500000.00000000')
assert_equal(out['result']['kyc_ids allowed'], '[]')
# 400 LTC implies release 15.05% of ALLs from unvested to balance
# Remember: 400 LTCs in regtest is 40000 (x100) LTCs in testnet/mainnet
self.log.info("Sending a DEx sell tokens offer")
params = str([addresses[2], 4, "1000000", "200000", 250, "0.00001", "2", 1]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, True, "tl_senddexoffer",params)
assert_equal(out['error'], None)
# self.log.info(out)
self.nodes[0].generate(1)
self.log.info("Checking the offer in DEx")
params = str([addresses[2]]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, True, "tl_getactivedexsells",params)
# self.log.info(out)
assert_equal(out['error'], None)
assert_equal(out['result'][0]['propertyid'], 4)
assert_equal(out['result'][0]['action'], 2)
assert_equal(out['result'][0]['seller'], addresses[2])
assert_equal(out['result'][0]['ltcsdesired'], '200000.00000000')
assert_equal(out['result'][0]['amountavailable'], '1000000.00000000')
assert_equal(out['result'][0]['unitprice'], '0.20000000')
assert_equal(out['result'][0]['minimumfee'], '0.00001000')
self.log.info("Accepting the part of the offer")
params = str([addresses[3], addresses[2], 4, "1000"]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, True, "tl_senddexaccept",params)
assert_equal(out['error'], None)
# self.log.info(out)
self.nodes[0].generate(1)
self.log.info("Checking the offer status")
params = str([addresses[2]]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, True, "tl_getactivedexsells",params)
# self.log.info(out)
assert_equal(out['error'], None)
assert_equal(out['result'][0]['accepts'][0]['buyer'], addresses[3])
assert_equal(out['result'][0]['accepts'][0]['amountdesired'], '1000.00000000')
assert_equal(out['result'][0]['accepts'][0]['ltcstopay'], '200.00000000')
self.log.info("Paying the tokens")
params = str([addresses[3], addresses[2], "200"]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, True, "tl_send_dex_payment",params)
# self.log.info(out)
self.nodes[0].generate(1)
self.log.info("Checking token balance in buyer address")
params = str([addresses[3], 4]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, True, "tl_getbalance",params)
# self.log.info(out)
assert_equal(out['error'], None)
assert_equal(out['result']['balance'], '2000.00000000')
assert_equal(out['result']['reserve'],'0.00000000')
self.log.info("Checking LTC Volume")
params = str([4, 1, 99999]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, True, "tl_get_ltcvolume",params)
# self.log.info(out)
assert_equal(out['error'], None)
assert_equal(out['result']['volume'], '400.00000000')
volume1 = float(out['result']['volume'])
self.nodes[0].generate(2)
self.log.info("Checking vesting in related addresses")
params = str([addresses[1], 1]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "tl_getbalance",params)
# self.log.info(out)
assert_equal(out['error'], None)
assert_equal(out['result']['balance'],'150.51498000') # 15.05% of vesting (NOTE: check the round up)
assert_equal(out['result']['reserve'],'0.00000000')
vested1 = float(out['result']['balance'])
params = str([addresses[4], 1]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "tl_getbalance",params)
# self.log.info(out)
assert_equal(out['error'], None)
# assert_equal(out['result']['balance'],'150.51498000') # 15.05% of vesting (NOTE: check the round up)
assert_equal(out['result']['reserve'],'0.00000000')
vested2 = float(out['result']['balance'])
bvested.append(vested2)
self.log.info("Checking unvested ALLs ")
params = str([addresses[1]]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "tl_getunvested",params)
# self.log.info(out)
assert_equal(out['result']['unvested'],'849.48502000')
unvested1 = float(out['result']['unvested'])
volume_ltc.append(volume1)
vested.append(vested1)
unvested.append(unvested1)
self.log.info("Checking vesting info")
out = tradelayer_HTTP(conn, headers, False, "tl_getvesting_info",params)
# self.log.info(out)
assert_equal(out['error'], None)
assert_equal(out['result']['propertyid'], 3)
assert_equal(out['result']['name'], 'Vesting Tokens')
assert_equal(out['result']['data'], 'Divisible Tokens')
assert_equal(out['result']['url'], 'www.tradelayer.org')
assert_equal(out['result']['divisible'], True)
assert_equal(out['result']['issuer'], 'QgKxFUBgR8y4xFy3s9ybpbDvYNKr4HTKPb')
assert_equal(out['result']['activation block'], 100)
assert_equal(out['result']['litecoin volume'], '400.00000000')
assert_equal(out['result']['vested percentage'], '15.05149900')
assert_equal(out['result']['last vesting block'], 1041)
assert_equal(out['result']['total vested'], '301.02996000')
assert_equal(out['result']['owners'], 3)
assert_equal(out['result']['total tokens'], '1500000.00000000')
assert_equal(out['result']['kyc_ids allowed'], '[]')
# Adding 200 LTCs in each step
for i in range(0,20):
self.log.info("Loop number:"+str(i))
# self.log.info("Checking the offer in DEx")
# params = str([addresses[2]]).replace("'",'"')
# out = tradelayer_HTTP(conn, headers, True, "tl_getactivedexsells",params)
# self.log.info(out)
self.log.info("Accepting the part of the offer")
params = str([addresses[3], addresses[2], 4, "1000"]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, True, "tl_senddexaccept",params)
assert_equal(out['error'], None)
# self.log.info(out)
self.nodes[0].generate(1)
self.log.info("Checking the offer status")
params = str([addresses[2]]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, True, "tl_getactivedexsells",params)
# self.log.info(out)
assert_equal(out['error'], None)
assert_equal(out['result'][0]['accepts'][0]['buyer'], addresses[3])
assert_equal(out['result'][0]['accepts'][0]['amountdesired'], '1000.00000000')
assert_equal(out['result'][0]['accepts'][0]['ltcstopay'], '200.00000000')
self.log.info("Paying the tokens")
params = str([addresses[3], addresses[2], "200"]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, True, "tl_send_dex_payment",params)
# self.log.info(out)
self.nodes[0].generate(1)
time.sleep(0.35)
self.log.info("Checking token balance in buyer address")
params = str([addresses[3], 4]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, True, "tl_getbalance",params)
# self.log.info(out)
assert_equal(out['error'], None)
nresult = 2000 + 1000 * (i + 1)
sresult = str(nresult)+'.00000000'
assert_equal(out['result']['balance'], sresult)
assert_equal(out['result']['reserve'],'0.00000000')
self.log.info("Checking LTC Volume")
params = str([4, 1, 99999]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, True, "tl_get_ltcvolume",params)
# self.log.info(out)
assert_equal(out['error'], None)
nvolume = 400 + 200 * (i + 1)
svolume = str(nvolume)+'.00000000'
assert_equal(out['result']['volume'], svolume)
volume1 = float(out['result']['volume'])
self.nodes[0].generate(1)
self.log.info("Checking vesting in in addresses[1]")
params = str([addresses[1], 1]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "tl_getbalance",params)
# self.log.info(out)
assert_equal(out['error'], None)
assert_equal(out['result']['reserve'],'0.00000000')
vested1 = float(out['result']['balance'])
self.log.info("Checking unvested ALLs in addresses[1]")
params = str([addresses[1]]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "tl_getunvested",params)
# self.log.info(out)
unvested1 = float(out['result']['unvested'])
assert_equal(unvested1 + vested1, 1000)
volume_ltc.append(volume1)
vested.append(vested1)
unvested.append(unvested1)
self.log.info("Checking vesting in addresses[4]")
params = str([addresses[4], 1]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "tl_getbalance",params)
# self.log.info(out)
assert_equal(out['error'], None)
assert_equal(out['result']['reserve'],'0.00000000')
vested2 = float(out['result']['balance'])
bvested.append(vested2)
self.log.info("Checking unvested ALLs in addresses[4]")
params = str([addresses[4]]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "tl_getunvested",params)
# self.log.info(out)
unvested2 = float(out['result']['unvested'])
assert_equal(unvested2 + vested2, 500)
time.sleep(0.2)
self.log.info("Checking LTC Volume")
params = str([4, 1, 99999]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, True, "tl_get_ltcvolume",params)
# self.log.info(out)
assert_equal(out['error'], None)
assert_equal(out['result']['volume'], '4400.00000000')
# At this volume the vesting must be 41.08 %
self.log.info("Checking final vesting in addresses[1]")
params = str([addresses[1], 1]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "tl_getbalance",params)
# self.log.info(out)
assert_equal(out['error'], None)
assert_equal(out['result']['balance'],'410.86307000')
self.log.info("Checking final unvested ALLs in addresses[1]")
params = str([addresses[1]]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "tl_getunvested",params)
# self.log.info(out)
assert_equal(out['error'], None)
assert_equal(out['result']['unvested'], '589.13693000')
self.log.info("Checking final vesting in addresses[4]")
params = str([addresses[4], 1]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "tl_getbalance",params)
# self.log.info(out)
assert_equal(out['error'], None)
assert_equal(out['result']['balance'],'205.43153500')
self.log.info("Checking final unvested ALLs in addresses[4]")
params = str([addresses[4]]).replace("'",'"')
out = tradelayer_HTTP(conn, headers, False, "tl_getunvested",params)
# self.log.info(out)
assert_equal(out['error'], None)
assert_equal(out['result']['unvested'], '294.56846500')
# pl.plot(volume_ltc, vested,'-b', label='vested amount for addresses[1]')
# pl.plot(volume_ltc, bvested,'-r', label='vested amount for addresses[3]')
# pl.legend(loc='upper left')
# pl.show()
conn.close()
self.stop_nodes()
if __name__ == '__main__':
VestingBasicsTest ().main ()
| 2.140625 | 2 |
tests/integration/s2n_handshake_test_old_s_client.py | bryce-shang/s2n-tls | 4,256 | 12759968 | #
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://aws.amazon.com/apache2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
#
"""
Handshake tests using Openssl 0.9.8 s_client against s2nd
"""
import argparse
import os
import sys
import subprocess
import itertools
import multiprocessing
import threading
import uuid
import re
import string
from os import environ
from multiprocessing.pool import ThreadPool
from s2n_test_constants import *
from time import sleep
S_CLIENT_NEGOTIATED_CIPHER_PREFIX="Cipher : "
PROTO_VERS_TO_S_CLIENT_ARG = {
S2N_TLS10 : "-tls1",
S2N_TLS11 : "-tls1_1",
S2N_TLS12 : "-tls1_2",
}
use_corked_io=False
def cleanup_processes(*processes):
for p in processes:
p.kill()
p.wait()
def validate_version(expected_version, output):
for line in output.splitlines():
if ACTUAL_VERSION_STR.format(expected_version or S2N_TLS10) in line:
return 0
return -1
def validate_data_transfer(expected_data, s_client_out, s2nd_out):
"""
Verify that the application data written between s_client and s2nd is encrypted and decrypted successfuly.
"""
found = 0
for line in s2nd_out.splitlines():
if expected_data in line:
found = 1
break
if found == 0:
print ("Did not find " + expected_data + " in output from s2nd")
return -1
found = 0
for line in s_client_out.splitlines():
if expected_data in line:
found = 1
break
if found == 0:
print ("Did not find " + expected_data + " in output from s_client")
return -1
return 0
def find_expected_cipher(expected_cipher, s_client_out):
"""
Make sure s_client and s2nd negotiate the cipher suite we expect
"""
s_client_out_len = len(s_client_out)
full_expected_string = S_CLIENT_NEGOTIATED_CIPHER_PREFIX + expected_cipher
for line in s_client_out.splitlines():
if full_expected_string in line:
return 0
break
print("Failed to find " + expected_cipher + " in s_client output")
return -1
def read_process_output_until(process, marker):
output = ""
while True:
line = process.stdout.readline().decode("utf-8")
output += line
if marker in line:
return output
return output
def try_handshake(endpoint, port, cipher, ssl_version, server_name=None, strict_hostname=False, server_cert=None, server_key=None,
server_cert_key_list=None, expected_server_cert=None, server_cipher_pref=None, ocsp=None, sig_algs=None, curves=None, resume=False, no_ticket=False,
prefer_low_latency=False, enter_fips_mode=False, client_auth=None, client_cert=DEFAULT_CLIENT_CERT_PATH,
client_key=DEFAULT_CLIENT_KEY_PATH, expected_cipher=None, expected_extensions=None):
"""
Attempt to handshake against s2nd listening on `endpoint` and `port` using Openssl s_client
:param int endpoint: endpoint for s2nd to listen on
:param int port: port for s2nd to listen on
:param str cipher: ciphers for Openssl s_client to offer. See https://www.openssl.org/docs/man1.0.2/apps/ciphers.html
:param int ssl_version: SSL version for s_client to use
:param str server_name: server_name value for s_client to send
:param bool strict_hostname: whether s_client should strictly check to see if server certificate matches the server_name
:param str server_cert: path to certificate for s2nd to use
:param str server_key: path to private key for s2nd to use
:param list server_cert_key_list: a list of (cert_path, key_path) tuples for multicert tests.
:param str expected_server_cert: Path to the expected server certificate should be sent to s_client.
:param str ocsp: path to OCSP response file for stapling
:param str sig_algs: Signature algorithms for s_client to offer
:param str curves: Elliptic curves for s_client to offer
:param bool resume: True if s_client should try to reconnect to s2nd and reuse the same TLS session. False for normal negotiation.
:param bool no_ticket: True if s2n server should not use session ticket to resume the same TLS session.
:param bool prefer_low_latency: True if s2nd should use 1500 for max outgoing record size. False for default max.
:param bool enter_fips_mode: True if s2nd should enter libcrypto's FIPS mode. Libcrypto must be built with a FIPS module to enter FIPS mode.
:param bool client_auth: True if the test should try and use client authentication
:param str client_cert: Path to the client's cert file
:param str client_key: Path to the client's private key file
:param str expected_cipher: the cipher we expect to negotiate
:param list expected_extensions: list of expected extensions that s_client should receive.
:return: 0 on successfully negotiation(s), -1 on failure
"""
# Override certificate for ECDSA if unspecified. We can remove this when we
# support multiple certificates
if server_cert is None and server_cert_key_list is None and "ECDSA" in cipher:
server_cert = TEST_ECDSA_CERT
server_key = TEST_ECDSA_KEY
# Fire up s2nd
s2nd_cmd = ["../../bin/s2nd"]
if server_cert is not None:
s2nd_cmd.extend(["--cert", server_cert])
if server_key is not None:
s2nd_cmd.extend(["--key", server_key])
if server_cert_key_list is not None:
for cert_key_path in server_cert_key_list:
cert_path = cert_key_path[0]
key_path = cert_key_path[1]
s2nd_cmd.extend(["--cert", cert_path])
s2nd_cmd.extend(["--key", key_path])
if ocsp is not None:
s2nd_cmd.extend(["--ocsp", ocsp])
if prefer_low_latency == True:
s2nd_cmd.append("--prefer-low-latency")
if client_auth is not None:
s2nd_cmd.append("-m")
s2nd_cmd.extend(["-t", client_cert])
if use_corked_io:
s2nd_cmd.append("-C")
s2nd_cmd.extend([str(endpoint), str(port)])
s2nd_ciphers = "test_all_tls12"
if server_cipher_pref is not None:
s2nd_ciphers = server_cipher_pref
if enter_fips_mode == True:
s2nd_ciphers = "test_all_fips"
s2nd_cmd.append("--enter-fips-mode")
s2nd_cmd.append("-c")
s2nd_cmd.append(s2nd_ciphers)
if no_ticket:
s2nd_cmd.append("-T")
s2nd = subprocess.Popen(s2nd_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
# Make sure s2nd has started
s2nd.stdout.readline()
s_client_cmd = ["openssl", "s_client", "-connect", str(endpoint) + ":" + str(port)]
if ssl_version is not None:
s_client_cmd.append(PROTO_VERS_TO_S_CLIENT_ARG[ssl_version])
if cipher is not None:
s_client_cmd.extend(["-cipher", cipher])
# For verifying extensions that s2nd sends expected extensions
s_client_cmd.append("-tlsextdebug")
# Fire up s_client
s_client = subprocess.Popen(s_client_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
s_client_out = ""
s2nd_out = ""
openssl_connect_marker = "CONNECTED"
openssl_reconnect_marker = "drop connection and then reconnect"
end_of_msg_marker = "__end_of_msg__"
# Wait until openssl and s2n have finished the handshake and are connected to each other
s_client_out += read_process_output_until(s_client, openssl_connect_marker)
s2nd_out += read_process_output_until(s2nd, openssl_connect_marker)
if resume == True:
for i in range(0,5):
# Wait for openssl to resume connection 5 times in a row, and verify resumption works.
s_client_out += read_process_output_until(s_client, openssl_reconnect_marker)
s2nd_out += read_process_output_until(s2nd, openssl_connect_marker)
data_to_validate = cipher + " " + str(uuid.uuid4())
# Write the data to openssl towards s2n server
msg = (data_to_validate + "\n" + end_of_msg_marker + "\n\n").encode("utf-8")
s_client.stdin.write(msg)
s_client.stdin.flush()
# Write the data to s2n towards openssl client
s2nd.stdin.write(msg)
s2nd.stdin.flush()
# Wait for the Data transfer to complete between OpenSSL and s2n
s_client_out += read_process_output_until(s_client, end_of_msg_marker)
s2nd_out += read_process_output_until(s2nd, end_of_msg_marker)
cleanup_processes(s2nd, s_client)
if validate_data_transfer(data_to_validate, s_client_out, s2nd_out) != 0:
return -1
if validate_version(ssl_version, s2nd_out) != 0:
return -1
if resume is True:
if validate_resume(s2nd_out) != 0:
return -1
if ocsp is not None:
if validate_ocsp(s_client_out) != 0:
return -1
if expected_cipher is not None:
if find_expected_cipher(expected_cipher, s_client_out) != 0:
return -1
if strict_hostname is True:
if validate_hostname(s_client_out) != 0:
return -1
if expected_server_cert is not None:
if validate_selected_certificate(s_client_out, expected_server_cert) != 0:
return -1
if expected_extensions is not None:
for extension in expected_extensions:
if extension.s_client_validate(s_client_out) != 0:
return -1
return 0
def cert_path_to_str(cert_path):
# Converts a path to a cert into a string usable for printing to test output
# Example: "./test_certs/rsa_2048_sha256_client_cert.pem" => "RSA-2048-SHA256"
return '-'.join(cert_path[cert_path.rfind('/')+1:].split('_')[:3]).upper()
def print_result(result_prefix, return_code):
suffix = ""
if return_code == 0:
if sys.stdout.isatty():
suffix = "\033[32;1mPASSED\033[0m"
else:
suffix = "PASSED"
else:
if sys.stdout.isatty():
suffix = "\033[31;1mFAILED\033[0m"
else:
suffix ="FAILED"
print(result_prefix + suffix)
def create_thread_pool():
threadpool_size = multiprocessing.cpu_count() * 4 # Multiply by 4 to increase parallelization between integration tests
print("\tCreating ThreadPool of size: " + str(threadpool_size))
threadpool = ThreadPool(processes=threadpool_size)
return threadpool
def run_handshake_test(host, port, ssl_version, cipher, fips_mode, no_ticket, use_client_auth, client_cert_path, client_key_path):
cipher_name = cipher.openssl_name
cipher_vers = cipher.min_tls_vers
# Skip the cipher if openssl can't test it. 3DES/RC4 are disabled by default in 1.1.1
if not cipher.openssl_1_1_1_compatible:
return 0
if ssl_version and ssl_version < cipher_vers:
return 0
client_cert_str=str(use_client_auth)
if (use_client_auth is not None) and (client_cert_path is not None):
client_cert_str = cert_path_to_str(client_cert_path)
ret = try_handshake(host, port, cipher_name, ssl_version, no_ticket=no_ticket, enter_fips_mode=fips_mode, client_auth=use_client_auth, client_cert=client_cert_path, client_key=client_key_path)
result_prefix = "Cipher: %-30s ClientCert: %-16s Vers: %-8s ... " % (cipher_name, client_cert_str, S2N_PROTO_VERS_TO_STR[ssl_version])
print_result(result_prefix, ret)
return ret
def handshake_test(host, port, test_ciphers, fips_mode, no_ticket=False, use_client_auth=None, use_client_cert=None, use_client_key=None):
"""
Basic handshake tests using all valid combinations of supported cipher suites and TLS versions.
"""
print("\n\tRunning handshake tests:")
failed = 0
for ssl_version in [S2N_TLS10, None]:
print("\n\tTesting ciphers using client version: " + S2N_PROTO_VERS_TO_STR[ssl_version])
port_offset = 0
results = []
# Only test non ECC ciphers, openssl 0.9.8 has trouble with ECDHE.
# Only test 1.0/SSLv3 ciphers since 0.9.8 only supports those.
for cipher in filter(lambda x: "ECDHE" not in x.openssl_name and x.min_tls_vers < S2N_TLS11, test_ciphers):
async_result = run_handshake_test(host, port + port_offset, ssl_version, cipher, fips_mode, no_ticket, use_client_auth, use_client_cert, use_client_key)
port_offset += 1
results.append(async_result)
for async_result in results:
if async_result != 0:
failed = 1
return failed
def main():
parser = argparse.ArgumentParser(description='Runs TLS server integration tests against s2nd using Openssl s_client')
parser.add_argument('host', help='The host for s2nd to bind to')
parser.add_argument('port', type=int, help='The port for s2nd to bind to')
parser.add_argument('--use_corked_io', action='store_true', help='Turn corked IO on/off')
parser.add_argument('--libcrypto', default='openssl-1.1.1', choices=S2N_LIBCRYPTO_CHOICES,
help="""The Libcrypto that s2n was built with. s2n supports different cipher suites depending on
libcrypto version. Defaults to openssl-1.1.1.""")
args = parser.parse_args()
use_corked_io = args.use_corked_io
# Retrieve the test ciphers to use based on the libcrypto version s2n was built with
test_ciphers = S2N_LIBCRYPTO_TO_TEST_CIPHERS[args.libcrypto]
host = args.host
port = args.port
libcrypto_version = args.libcrypto
fips_mode = False
if environ.get("S2N_TEST_IN_FIPS_MODE") is not None:
fips_mode = True
print("\nRunning s2nd in FIPS mode.")
print("\nRunning tests with: " + os.popen('openssl version').read())
if use_corked_io == True:
print("Corked IO is on")
failed = 0
failed += handshake_test(host, port, test_ciphers, fips_mode)
return failed
if __name__ == "__main__":
sys.exit(main())
| 2.0625 | 2 |
python_pubsub/cgitb.py | kragen/mod_pubsub | 1 | 12759969 | <gh_stars>1-10
# cgitb.py -- a traceback printer for CGI scripts
# We got this code from http://lfw.org/python/ -- thanks ?!ng
# FIXME: Does not escape HTML markup in exceptions.
# This exposes browsers to inter-domain security compromises and spoofing.
import sys, os, types, string, keyword, linecache, tokenize, inspect, pydoc
def breaker():
return ('<body bgcolor="#f0f0ff">' +
'<font color="#f0f0ff" size="-5"> > </font> ' +
'</table>' * 5)
def html(context=5):
etype, evalue = sys.exc_type, sys.exc_value
if type(etype) is types.ClassType:
etype = etype.__name__
pyver = 'Python ' + string.split(sys.version)[0] + '<br>' + sys.executable
head = pydoc.html.heading(
'<big><big><strong>%s</strong></big></big>' % str(etype),
'#ffffff', '#aa55cc', pyver)
head = head + ('<p>A problem occurred while running a Python script. '
'Here is the sequence of function calls leading up to '
'the error, with the most recent (innermost) call last.')
indent = '<tt><small>%s</small> </tt>' % (' ' * 5)
traceback = []
for frame, file, lnum, func, lines, index in inspect.trace(context):
file = str(file) # HACK: file might not be filename, but we don't care
file = os.path.abspath(file)
link = '<a href="file:%s">%s</a>' % (file, pydoc.html.escape(file))
args, varargs, varkw, locals = inspect.getargvalues(frame)
if func == '?':
call = ''
else:
call = 'in <strong>%s</strong>' % func + inspect.formatargvalues(
args, varargs, varkw, locals,
formatvalue=lambda value: '=' + pydoc.html.repr(value))
names = []
def tokeneater(type, token, start, end, line, names=names):
if type == tokenize.NAME and token not in keyword.kwlist:
if token not in names:
names.append(token)
if type == tokenize.NEWLINE: raise IndexError
def linereader(file=file, lnum=[lnum]):
line = linecache.getline(file, lnum[0])
lnum[0] = lnum[0] + 1
return line
try:
tokenize.tokenize(linereader, tokeneater)
except IndexError: pass
lvals = []
for name in names:
if name in frame.f_code.co_varnames:
if locals.has_key(name):
value = pydoc.html.repr(locals[name])
else:
value = '<em>undefined</em>'
name = '<strong>%s</strong>' % name
else:
if frame.f_globals.has_key(name):
value = pydoc.html.repr(frame.f_globals[name])
else:
value = '<em>undefined</em>'
name = '<em>global</em> <strong>%s</strong>' % name
lvals.append('%s = %s' % (name, value))
if lvals:
lvals = string.join(lvals, ', ')
lvals = indent + '''
<small><font color="#909090">%s</font></small><br>''' % lvals
else:
lvals = ''
level = '''
<table width="100%%" bgcolor="#d8bbff" cellspacing=0 cellpadding=2 border=0>
<tr><td>%s %s</td></tr></table>''' % (link, call)
excerpt = []
i = lnum - index
for line in lines:
number = ' ' * (5-len(str(i))) + str(i)
number = '<small><font color="#909090">%s</font></small>' % number
line = '<tt>%s %s</tt>' % (number, pydoc.html.preformat(line))
if i == lnum:
line = '''
<table width="100%%" bgcolor="#ffccee" cellspacing=0 cellpadding=0 border=0>
<tr><td>%s</td></tr></table>''' % line
excerpt.append('\n' + line)
if i == lnum:
excerpt.append(lvals)
i = i + 1
traceback.append('<p>' + level + string.join(excerpt, '\n'))
exception = '<p><strong>%s</strong>: %s' % (str(etype), str(evalue))
attribs = []
if type(evalue) is types.InstanceType:
for name in dir(evalue):
value = pydoc.html.repr(getattr(evalue, name))
attribs.append('<br>%s%s = %s' % (indent, name, value))
return head + string.join(traceback) + exception + string.join(attribs)
def handler():
print breaker()
print html()
if __name__ == '__main__':
try: import tester
except: handler()
# End of cgitb.py
| 2.65625 | 3 |
json2obsidian.py | dearmiles/TheBrain2TiddlyNObsidian | 0 | 12759970 | import codecs
import json
import markdownify
strSourcePath = 'd:\\工作\\b2t\\'
strMDPath = 'd:\\工作\\b2t\\markdown\\'
def readjson(strFileName):
try:
jsonfin = codecs.open('d:\\工作\\b2t\\thoughts.json', 'r', 'utf-8')
objthoughts = json.load(jsonfin)
except Exception as e:
print(e)
return None
else:
return objthoughts
pass
def scanjson(listjson):
'''
return mapping:
listTupleFileContents=[]
listJsonLinks = []
# dictID2Thought = {}
'''
dictTupleFileContents={}
dictJsonLinks = {}
# dictID2Thought = {}
for jsonthought in listjson:
strtitle = str(jsonthought['title'])
strcontent = ''
strtitle = strtitle.replace('\\',u'·').replace('/',u'、').replace('*','_').replace('?',u'?').replace('"',u'“').replace('<','《').replace('>','》').replace('|','_').replace(':',u':')
#invalid filename chars: ':\/?*"<>|'
if jsonthought.__contains__('text'):
strcontent = jsonthought['text']
if jsonthought.__contains__('tmap.id'):
strID = jsonthought['tmap.id']
if not dictTupleFileContents.__contains__(strID):
dictTupleFileContents[strID] = (strtitle, strcontent)
if jsonthought.__contains__('tmap.edges') and jsonthought['tmap.edges'].__len__() > 2:
dictEdges = json.loads(jsonthought['tmap.edges'])
for edge in dict(dictEdges).items():
if dictJsonLinks.__contains__(strID):
dictJsonLinks[strID].append(dict(edge[1])['to'])
else:
dictJsonLinks[strID] = [dict(edge[1])['to']]
return dictTupleFileContents, dictJsonLinks
pass
def writeFileContent(strFileName, strContentHTML):
strContentMD = markdownify.markdownify(strContentHTML, heading_style="ATX")
mdout = codecs.open(strMDPath + strFileName + '.md', 'a', 'utf-8')
mdout.write(strContentMD)
mdout.close()
pass
def insertlink(strFileName, listLinks):
mdout = codecs.open(strMDPath + strFileName + '.md', 'a', 'utf-8')
for strLink in listLinks:
strLine = '''[[%s]]\n\r''' % strLink
mdout.writelines(strLine)
mdout.close()
pass
strFileName = strSourcePath + 'thoughts.json'
if __name__ == "__main__":
jsonThought = readjson(strFileName)
dictTupleFileContents, dictJsonLinks = scanjson(jsonThought)
for item in dict(dictTupleFileContents).items():
strID = item[0]
tupleContent = item[1]
strFileName = tupleContent[0]
strContent = tupleContent[1]
listLinkIDs = []
listLinks = []
if dict(dictJsonLinks).__contains__(strID):
listLinkIDs = dict(dictJsonLinks)[strID]
for strLinkID in listLinkIDs:
strLink = dict(dictTupleFileContents)[strLinkID][0]
listLinks.append(strLink)
writeFileContent(strFileName, strContent)
insertlink(strFileName, listLinks)
pass
| 2.90625 | 3 |
test/test.py | gieeedreee/calculator | 0 | 12759971 | import pytest
from calculator.calculator import Calculator
def test_add():
calculator = Calculator()
result = calculator.add(15)
assert result == 15
def test_subtract():
calculator = Calculator(20)
result = calculator.subtract(15)
assert result == 5
def test_multiply():
calculator = Calculator(2)
result = calculator.multiply(15)
assert result == 30
def test_divide():
calculator = Calculator(50)
result = calculator.divide(0)
assert result == 50
def test_nth_root():
calculator = Calculator(100)
result = calculator.nth_root(2)
assert result == 10
def test_reset():
calculator = Calculator(10)
result = calculator.reset()
assert result == 0
| 2.9375 | 3 |
apischema/types.py | callumforrester/apischema | 0 | 12759972 | <filename>apischema/types.py<gh_stars>0
import collections.abc
import sys
from enum import Enum, auto
from itertools import chain
from types import MappingProxyType
from typing import (
AbstractSet,
Any,
Collection,
Dict,
FrozenSet,
List,
Mapping,
MutableMapping,
MutableSequence,
MutableSet,
Sequence,
Set,
TYPE_CHECKING,
Tuple,
Type,
Union,
)
AnyType = Any
NoneType: Type[None] = type(None)
Number = Union[int, float]
PRIMITIVE_TYPES = (str, int, bool, float, NoneType)
COLLECTION_TYPES = {
Collection: tuple,
collections.abc.Collection: tuple,
Sequence: tuple,
collections.abc.Sequence: tuple,
Tuple: tuple,
tuple: tuple,
MutableSequence: list,
collections.abc.MutableSequence: list,
List: list,
list: list,
AbstractSet: frozenset,
collections.abc.Set: frozenset,
FrozenSet: frozenset,
frozenset: frozenset,
MutableSet: set,
collections.abc.MutableSet: set,
Set: set,
set: set,
}
MAPPING_TYPES = {
Mapping: MappingProxyType,
collections.abc.Mapping: MappingProxyType,
MutableMapping: dict,
collections.abc.MutableMapping: dict,
Dict: dict,
dict: dict,
MappingProxyType: MappingProxyType,
}
if sys.version_info >= (3, 7): # pragma: no cover
OrderedDict = dict
ChainMap = collections.ChainMap
else: # pragma: no cover
OrderedDict = collections.OrderedDict
class ChainMap(collections.ChainMap):
def __iter__(self):
return iter({k: None for k in chain.from_iterable(reversed(self.maps))})
class Metadata(Mapping[str, Any]):
def __or__(self, other: Mapping[str, Any]) -> "Metadata":
return MetadataImplem({**self, **other})
def __ror__(self, other: Mapping[str, Any]) -> "Metadata":
return MetadataImplem({**other, **self})
class MetadataMixin(Metadata):
key: str
def __getitem__(self, key):
if key != self.key:
raise KeyError(key)
return self
def __iter__(self):
return iter((self.key,))
def __len__(self):
return 1
class MetadataImplem(dict, Metadata): # type: ignore
def __hash__(self):
return hash(tuple(sorted(self.items())))
# Singleton type, see https://www.python.org/dev/peps/pep-0484/#id30
if TYPE_CHECKING:
class UndefinedType(Enum):
Undefined = auto()
Undefined = UndefinedType.Undefined
else:
class UndefinedType:
def __new__(cls):
return Undefined
def __repr__(self):
return "Undefined"
def __str__(self):
return "Undefined"
def __bool__(self):
return False
Undefined = object.__new__(UndefinedType)
| 2.15625 | 2 |
django_prbac/tests/test_models.py | doordash/django-prbac | 0 | 12759973 | <filename>django_prbac/tests/test_models.py
# Use modern Python
from __future__ import unicode_literals, absolute_import, print_function
# Standard Library Imports
# Django imports
from django.test import TestCase # https://code.djangoproject.com/ticket/20913
# External Library imports
# Local imports
from django_prbac.models import *
from django_prbac import arbitrary
class TestRole(TestCase):
def setUp(self):
Role.get_cache().clear()
def test_has_permission_immediate_no_params(self):
subrole = arbitrary.role()
superrole1 = arbitrary.role()
superrole2 = arbitrary.role()
arbitrary.grant(to_role=superrole1, from_role=subrole)
# A few ways of saying the same thing
self.assertTrue(subrole.instantiate({}).has_privilege(superrole1.instantiate({})))
self.assertTrue(subrole.has_privilege(superrole1.instantiate({})))
self.assertTrue(subrole.instantiate({}).has_privilege(superrole1))
self.assertTrue(subrole.has_privilege(superrole1))
self.assertFalse(subrole.instantiate({}).has_privilege(superrole2.instantiate({})))
self.assertFalse(subrole.has_privilege(superrole2.instantiate({})))
self.assertFalse(subrole.instantiate({}).has_privilege(superrole2))
self.assertFalse(subrole.has_privilege(superrole2))
def test_has_permission_transitive_no_params(self):
subrole = arbitrary.role()
midrole = arbitrary.role()
superrole1 = arbitrary.role()
superrole2 = arbitrary.role()
arbitrary.grant(to_role=midrole, from_role=subrole)
arbitrary.grant(to_role=superrole1, from_role=midrole)
# A few ways of saying the same thing
self.assertTrue(subrole.instantiate({}).has_privilege(superrole1.instantiate({})))
self.assertTrue(subrole.has_privilege(superrole1.instantiate({})))
self.assertTrue(subrole.instantiate({}).has_privilege(superrole1))
self.assertTrue(subrole.has_privilege(superrole1))
self.assertFalse(subrole.instantiate({}).has_privilege(superrole2.instantiate({})))
self.assertFalse(subrole.has_privilege(superrole2.instantiate({})))
self.assertFalse(subrole.instantiate({}).has_privilege(superrole2))
self.assertFalse(subrole.has_privilege(superrole2))
def test_has_permission_far_transitive_no_params(self):
subrole = arbitrary.role()
superrole1 = arbitrary.role()
superrole2 = arbitrary.role()
midroles = [arbitrary.role() for __ in range(0, 10)]
arbitrary.grant(subrole, midroles[0])
arbitrary.grant(midroles[-1], superrole1)
# Link up all roles in the list that are adjacent
for midsubrole, midsuperrole in zip(midroles[:-1], midroles[1:]):
arbitrary.grant(from_role=midsubrole, to_role=midsuperrole)
self.assertTrue(subrole.instantiate({}).has_privilege(superrole1.instantiate({})))
self.assertFalse(subrole.instantiate({}).has_privilege(superrole2.instantiate({})))
def test_has_permission_immediate_params(self):
subrole = arbitrary.role()
superrole1 = arbitrary.role(parameters=set(['one']))
arbitrary.grant(to_role=superrole1, from_role=subrole, assignment=dict(one='foo'))
self.assertTrue(subrole.instantiate({}).has_privilege(superrole1.instantiate(dict(one='foo'))))
self.assertFalse(subrole.instantiate({}).has_privilege(superrole1.instantiate(dict(one='baz'))))
def test_unsaved_role_does_not_have_permission(self):
role1 = Role()
role2 = arbitrary.role()
self.assertFalse(role1.has_privilege(role2))
self.assertFalse(role2.has_privilege(role1))
class TestGrant(TestCase):
def test_instantiated_to_role_smoke_test(self):
"""
Basic smoke test:
1. grant.instantiated_role({})[param] == grant.assignment[param] if param is free for the role
2. grant.instantiated_role({})[param] does not exist if param is not free for the role
"""
parameters = ['one']
superrole = arbitrary.role(parameters=parameters)
grant = arbitrary.grant(to_role=superrole, assignment={'one':'hello'})
self.assertEqual(grant.instantiated_to_role({}).assignment, {'one':'hello'})
grant = arbitrary.grant(to_role=superrole, assignment={'two': 'goodbye'})
self.assertEqual(grant.instantiated_to_role({}).assignment, {})
class TestUserRole(TestCase):
def setUp(self):
Role.get_cache().clear()
def test_user_role_integration(self):
"""
Basic smoke test of integration of PRBAC with django.contrib.auth
"""
user = arbitrary.user()
role = arbitrary.role()
priv = arbitrary.role()
arbitrary.grant(from_role=role, to_role=priv)
user_role = arbitrary.user_role(user=user, role=role)
self.assertEqual(user.prbac_role, user_role)
self.assertTrue(user.prbac_role.has_privilege(role))
self.assertTrue(user.prbac_role.has_privilege(priv))
| 2.296875 | 2 |
oggm/core/sia2d.py | C-Merrill/oggm | 0 | 12759974 | <filename>oggm/core/sia2d.py
import numpy as np
from numpy import ix_
import xarray as xr
import os
from oggm import cfg, utils
from oggm.cfg import G, SEC_IN_YEAR, SEC_IN_DAY
def filter_ice_border(ice_thick):
"""Sets the ice thickness at the border of the domain to zero."""
ice_thick[0, :] = 0
ice_thick[-1, :] = 0
ice_thick[:, 0] = 0
ice_thick[:, -1] = 0
return ice_thick
class Model2D(object):
"""Interface to a distributed model"""
def __init__(self, bed_topo, init_ice_thick=None, dx=None, dy=None,
mb_model=None, y0=0., glen_a=None, mb_elev_feedback='annual',
ice_thick_filter=filter_ice_border):
"""Create a new 2D model from gridded data.
Parameters
----------
bed_topo : 2d array
the topography
init_ice_thick : 2d array (optional)
the initial ice thickness (default is zero everywhere)
dx : float
map resolution (m)
dy : float
map resolution (m)
mb_model : oggm.core.massbalance model
the mass-balance model to use for the simulation
y0 : int
the starting year
glen_a : float
Glen's flow law parameter A
mb_elev_feedback : str (default: 'annual')
when to update the mass-balance model ('annual', 'monthly', or
'always')
ice_thick_filter : func
function to apply to the ice thickness *after* each time step.
See filter_ice_border for an example. Set to None for doing nothing
"""
# Mass balance
self.mb_elev_feedback = mb_elev_feedback
self.mb_model = mb_model
# Defaults
if glen_a is None:
glen_a = cfg.PARAMS['glen_a']
self.glen_a = glen_a
if dy is None:
dy = dx
self.dx = dx
self.dy = dy
self.dxdy = dx * dy
self.y0 = None
self.t = None
self.reset_y0(y0)
self.ice_thick_filter = ice_thick_filter
# Data
self.bed_topo = bed_topo
self.ice_thick = None
self.reset_ice_thick(init_ice_thick)
self.ny, self.nx = bed_topo.shape
@property
def mb_model(self):
return self._mb_model
@mb_model.setter
def mb_model(self, value):
# We need a setter because the MB func is stored as an attr too
_mb_call = None
if value:
if self.mb_elev_feedback in ['always', 'monthly']:
_mb_call = value.get_monthly_mb
elif self.mb_elev_feedback in ['annual', 'never']:
_mb_call = value.get_annual_mb
else:
raise ValueError('mb_elev_feedback not understood')
self._mb_model = value
self._mb_call = _mb_call
self._mb_current_date = None
self._mb_current_out = dict()
self._mb_current_heights = dict()
def reset_y0(self, y0):
"""Reset the initial model time"""
self.y0 = y0
self.t = 0
def reset_ice_thick(self, ice_thick=None):
"""Reset the ice thickness"""
if ice_thick is None:
ice_thick = self.bed_topo * 0.
self.ice_thick = ice_thick.copy()
@property
def yr(self):
return self.y0 + self.t / SEC_IN_YEAR
@property
def area_m2(self):
return np.sum(self.ice_thick > 0) * self.dxdy
@property
def volume_m3(self):
return np.sum(self.ice_thick * self.dxdy)
@property
def volume_km3(self):
return self.volume_m3 * 1e-9
@property
def area_km2(self):
return self.area_m2 * 1e-6
@property
def surface_h(self):
return self.bed_topo + self.ice_thick
def get_mb(self, year=None):
"""Get the mass balance at the requested height and time.
Optimized so that no mb model call is necessary at each step.
"""
if year is None:
year = self.yr
# Do we have to optimise?
if self.mb_elev_feedback == 'always':
return self._mb_call(self.bed_topo + self.ice_thick, year)
date = utils.floatyear_to_date(year)
if self.mb_elev_feedback == 'annual':
# ignore month changes
date = (date[0], date[0])
if self._mb_current_date != date or (self._mb_current_out is None):
# We need to reset all
self._mb_current_date = date
_mb = self._mb_call(self.surface_h.flatten(), year)
self._mb_current_out = _mb.reshape((self.ny, self.nx))
return self._mb_current_out
def step(self, dt):
"""Advance one step."""
raise NotImplementedError
def run_until(self, y1, stop_if_border=False):
"""Run until a selected year."""
t = (y1 - self.y0) * SEC_IN_YEAR
while self.t < t:
self.step(t - self.t)
if stop_if_border:
if (np.any(self.ice_thick[0, :] > 10) or
np.any(self.ice_thick[-1, :] > 10) or
np.any(self.ice_thick[:, 0] > 10) or
np.any(self.ice_thick[:, -1] > 10)):
raise RuntimeError('Glacier exceeds boundaries')
if self.ice_thick_filter is not None:
self.ice_thick = self.ice_thick_filter(self.ice_thick)
if np.any(~np.isfinite(self.ice_thick)):
raise FloatingPointError('NaN in numerical solution.')
def run_until_equilibrium(self, rate=0.001, ystep=5, max_ite=200):
"""Run until an equuilibrium is reached (can take a while)."""
ite = 0
was_close_zero = 0
t_rate = 1
while (t_rate > rate) and (ite <= max_ite) and (was_close_zero < 5):
ite += 1
v_bef = self.volume_m3
self.run_until(self.yr + ystep)
v_af = self.volume_m3
if np.isclose(v_bef, 0., atol=1):
t_rate = 1
was_close_zero += 1
else:
t_rate = np.abs(v_af - v_bef) / v_bef
if ite > max_ite:
raise RuntimeError('Did not find equilibrium.')
def run_until_and_store(self, ye, step=2, run_path=None, grid=None,
print_stdout=False, stop_if_border=False):
"""Run until a selected year and store the output in a NetCDF file."""
yrs = np.arange(np.floor(self.yr), np.floor(ye) + 1, step)
out_thick = np.zeros((len(yrs), self.ny, self.nx))
for i, yr in enumerate(yrs):
if print_stdout and (yr / 10) == int(yr / 10):
print('{}: year {} of {}, '
'max thick {:.1f}m\r'.format(print_stdout,
int(yr),
int(ye),
self.ice_thick.max()))
self.run_until(yr, stop_if_border=stop_if_border)
out_thick[i, :, :] = self.ice_thick
run_ds = grid.to_dataset() if grid else xr.Dataset()
run_ds['ice_thickness'] = xr.DataArray(out_thick,
dims=['time', 'y', 'x'],
coords={'time': yrs})
run_ds['bed_topo'] = xr.DataArray(self.bed_topo,
dims=['y', 'x'])
# write output?
if run_path is not None:
if os.path.exists(run_path):
os.remove(run_path)
run_ds.to_netcdf(run_path)
return run_ds
class Upstream2D(Model2D):
"""Actual model"""
def __init__(self, bed_topo, init_ice_thick=None, dx=None,
mb_model=None, y0=0., glen_a=None, mb_elev_feedback='annual',
cfl=0.124, max_dt=31*SEC_IN_DAY,
ice_thick_filter=filter_ice_border):
"""Create a new 2D model from gridded data.
Parameters
----------
bed_topo : 2d array
the topography
init_ice_thick : 2d array (optional)
the initial ice thickness (default is zero everywhere)
dx : float
map resolution (m)
dy : float
map resolution (m)
mb_model : oggm.core.massbalance model
the mass-balance model to use for the simulation
y0 : int
the starting year
glen_a : float
Glen's flow law parameter A
mb_elev_feedback : str (default: 'annual')
when to update the mass-balance model ('annual', 'monthly', or
'always')
cfl : float (default:0.124)
forward time stepping stability criteria. Default is just beyond
R. Hindmarsh's idea of 1/2(n+1).
max_dt : int (default: 31 days)
maximum allow time step (in seconds). Useful because otherwise the
automatic time step can be quite ambitious.
ice_thick_filter : func
function to apply to the ice thickness *after* each time step.
See filter_ice_border for an example. Set to None for doing nothing
"""
super(Upstream2D, self).__init__(bed_topo,
init_ice_thick=init_ice_thick,
dx=dx, mb_model=mb_model, y0=y0,
glen_a=glen_a,
mb_elev_feedback=mb_elev_feedback,
ice_thick_filter=ice_thick_filter)
# We introduce Gamma to shorten the equations
self.rho = cfg.PARAMS['ice_density']
self.glen_n = cfg.PARAMS['glen_n']
self.gamma = (2. * self.glen_a * (self.rho * G) ** self.glen_n
/ (self.glen_n + 2))
# forward time stepping stability criteria
# default is just beyond <NAME>'s idea of 1/2(n+1)
self.cfl = cfl
self.max_dt = max_dt
# extend into 2D
self.Lx = 0.5 * (self.nx - 1) * self.dx
self.Ly = 0.5 * (self.ny - 1) * self.dy
# Some indices
self.k = np.arange(0, self.ny)
self.kp = np.hstack([np.arange(1, self.ny), self.ny - 1])
self.km = np.hstack([0, np.arange(0, self.ny - 1)])
self.l = np.arange(0, self.nx) # flake8: noqa E741
self.lp = np.hstack([np.arange(1, self.nx), self.nx - 1])
self.lm = np.hstack([0, np.arange(0, self.nx - 1)])
self.H_upstream_up = np.zeros((self.ny, self.nx))
self.H_upstream_dn = np.zeros((self.ny, self.nx))
# Easy optimisation
self._ixklp = ix_(self.k, self.lp)
self._ixkl = ix_(self.k, self.l)
self._ixklm = ix_(self.k, self.lm)
self._ixkpl = ix_(self.kp, self.l)
self._ixkml = ix_(self.km, self.l)
self._ixkplp = ix_(self.kp, self.lp)
self._ixkplm = ix_(self.kp, self.lm)
self._ixkmlm = ix_(self.km, self.lm)
self._ixkmlp = ix_(self.km, self.lp)
def diffusion_upstream_2d(self):
# Builded upon the Eq. (62) with the term in y in the diffusivity.
# It differs from diffusion_Upstream_2D_V1 only for the definition of
# "s_grad", l282-283 & l305-306 in V1 and l355-356 & l379-380 in V2)
H = self.ice_thick
S = self.surface_h
N = self.glen_n
# Optim
S_ixklp = S[self._ixklp]
S_ixkl = S[self._ixkl]
S_ixklm = S[self._ixklm]
S_ixkml = S[self._ixkml]
S_ixkpl = S[self._ixkpl]
S_ixkplp = S[self._ixkplp]
S_ixkplm = S[self._ixkplm]
S_ixkmlm = S[self._ixkmlm]
S_ixkmlp = S[self._ixkmlp]
Hl = H[self._ixkl]
Hlp = H[self._ixklp]
Hlm = H[self._ixklm]
Hk = Hl
Hkp = H[self._ixkpl]
Hkm = H[self._ixkml]
# --- all the l components
# applying Eq. (61) to the scheme
H_l_up = 0.5 * (Hlp + Hl)
H_l_dn = 0.5 * (Hl + Hlm)
H_l_upstream_up = self.H_upstream_up
gt = S_ixklp > S_ixkl
H_l_upstream_up[gt] = Hlp[gt]
H_l_upstream_up[~gt] = Hl[~gt]
H_l_upstream_dn = self.H_upstream_dn
gt = S_ixkl > S_ixklm
H_l_upstream_dn[gt] = Hl[gt]
H_l_upstream_dn[~gt] = Hlm[~gt]
# applying Eq. (62) to the scheme
S_diff = S_ixkpl - S_ixkml
S_lpdiff = S_ixklp - S_ixkl
S_lmdiff = S_ixkl - S_ixklm
s_l_grad_up = (((S_diff + S_ixkplp - S_ixkmlp)
** 2. / (4 * self.dx) ** 2.) +
(S_lpdiff ** 2. / self.dy ** 2.)) ** ((N - 1.) / 2.)
s_l_grad_dn = (((S_diff + S_ixkplm - S_ixkmlm)
** 2. / (4 * self.dx) ** 2.) +
(S_lmdiff ** 2. / self.dy ** 2.)) ** ((N - 1.) / 2.)
D_l_up = self.gamma * H_l_up ** (N + 1) * H_l_upstream_up * s_l_grad_up
D_l_dn = self.gamma * H_l_dn ** (N + 1) * H_l_upstream_dn * s_l_grad_dn
# --- all the k components
# applying Eq. (61) to the scheme
H_k_up = 0.5 * (Hkp + Hl)
H_k_dn = 0.5 * (Hl + Hkm)
H_k_upstream_up = self.H_upstream_up
gt = S_ixkpl > S_ixkl
H_k_upstream_up[gt] = Hkp[gt]
H_k_upstream_up[~gt] = Hk[~gt]
H_k_upstream_dn = self.H_upstream_dn
gt = S_ixkl > S_ixkml
H_k_upstream_dn[gt] = Hk[gt]
H_k_upstream_dn[~gt] = Hkm[~gt]
# applying Eq. (62) to the scheme
S_diff = S_ixklp - S_ixklm
S_kpdiff = S_ixkpl - S_ixkl
S_kmdiff = S_ixkl - S_ixkml
s_k_grad_up = (((S_diff + S_ixkplp - S_ixkplm)
** 2. / (4 * self.dy) ** 2.) +
(S_kpdiff ** 2. / self.dx ** 2.)) ** ((N - 1.) / 2.)
s_k_grad_dn = (((S_diff + S_ixkmlp - S_ixkmlm)
** 2. / (4 * self.dy) ** 2.) +
(S_kmdiff ** 2. / self.dx ** 2.)) ** ((N - 1.) / 2.)
D_k_up = self.gamma * H_k_up ** (N + 1) * H_k_upstream_up * s_k_grad_up
D_k_dn = self.gamma * H_k_dn ** (N + 1) * H_k_upstream_dn * s_k_grad_dn
# --- Check the cfl condition
divisor = max(max(np.max(np.abs(D_k_up)), np.max(np.abs(D_k_dn))),
max(np.max(np.abs(D_l_up)), np.max(np.abs(D_l_dn))))
if divisor == 0:
dt_cfl = self.max_dt
else:
dt_cfl = (self.cfl * min(self.dx ** 2., self.dy ** 2.) / divisor)
# --- Calculate Final diffusion term
div_k = (D_k_up * S_kpdiff / self.dy -
D_k_dn * S_kmdiff / self.dy) / self.dy
div_l = (D_l_up * S_lpdiff / self.dx -
D_l_dn * S_lmdiff / self.dx) / self.dx
return div_l + div_k, dt_cfl
def step(self, dt):
"""Advance one step."""
div_q, dt_cfl = self.diffusion_upstream_2d()
dt_use = utils.clip_scalar(np.min([dt_cfl, dt]), 0, self.max_dt)
self.ice_thick = utils.clip_min(self.surface_h +
(self.get_mb() + div_q) * dt_use -
self.bed_topo,
0)
# Next step
self.t += dt_use
return dt
| 3.03125 | 3 |
bspider/commands/master.py | littlebai3618/bspider | 3 | 12759975 | <gh_stars>1-10
"""
** 检查是否有supervisor.pid文件确认supervisor是否启动,启动则调用rpc接口启动进程 否则:初始化supervisor.conf 启动进程
1. web service
2. 调度器
3. 定时任务管理
# 如果MySQL表未创建的实例化表和初始数据
"""
# 忽略PyMySQL警告
from warnings import filterwarnings
import pymysql
filterwarnings('ignore', category = pymysql.Warning)
import os
import re
import string
import time
from shutil import ignore_patterns, copy2
from werkzeug.security import generate_password_hash
from bspider.commands import BSpiderCommand
from bspider.utils.conf import PLATFORM_PATH_ENV, PLATFORM_NAME_ENV
from bspider.utils.database import prepare_insert_sql
from bspider.utils.exceptions import UsageError
from bspider.utils.template import render_templatefile
IGNORE = ignore_patterns('*.pyc', '.svn')
def init_custom_code(templates_dir, table):
sql = list()
desc = re.compile('@([A-Za-z]+)=(.*?)\n')
for dirpath, _, filenames in os.walk(os.path.join(templates_dir, 'inner_module')):
for filename in filenames:
if filename.startswith('__'):
continue
if filename.endswith('.module'):
with open(os.path.join(dirpath, filename)) as f:
text = f.read()
result_dict = dict(content=text)
param = desc.findall(text)
for k, v in param:
result_dict[k] = v.strip()
sql.append(prepare_insert_sql(table, data=result_dict))
return sql
PLAIN_TABLE = [
('bspider_cron', dict()),
('bspider_customcode', dict()),
('bspider_downloader_status', dict()),
('bspider_node', dict()),
('bspider_node_status', dict()),
('bspider_parser_status', dict()),
('bspider_project', dict()),
('bspider_project_customcode', dict()),
('bspider_user', dict(password=generate_password_hash('<PASSWORD>'))),
('bspider_worker', dict()),
('bspider_data_source', dict()),
('bspider_project_data_source', dict())
]
class Command(BSpiderCommand):
def syntax(self):
return "<op:start|stop>"
def short_desc(self):
"""
Run/stop BSpider by supervisor with three process:
master: a web server to manager spiders (by gunicorn and gevent).
bcorn: a cronjob process to manager cron task.
scheduler: dispatch all spider project.
** first start master service this cmd will try to create some MySQL table
:return:
"""
return "Run/stop BSpider as a master node"
def init_supervisor(self):
"""查看supervisor是否已经启动"""
platform_path = os.environ[PLATFORM_PATH_ENV]
platform_name = os.environ[PLATFORM_NAME_ENV]
tplfile = os.path.join(self.templates_dir, 'tools_cfg', 'master_gunicorn.py.tmpl')
copy2(tplfile, os.path.join(platform_path, '.cache', 'master_gunicorn.py.tmpl'))
render_templatefile(os.path.join(platform_path, '.cache', 'master_gunicorn.py.tmpl'),
master_port=self.frame_settings['MASTER']['port'],
master_ip=self.frame_settings['MASTER']['ip'],
log_level=self.frame_settings['LOGGER_LEVEL'].lower(),
platform_name=platform_name,
platform_path=platform_path)
if os.path.exists(os.path.join(platform_path, '.cache', 'supervisord.pid')):
return True
tplfile = os.path.join(self.templates_dir, 'tools_cfg', 'supervisor.conf.tmpl')
config_path = os.path.join(platform_path, '.cache', 'supervisor.conf')
copy2(tplfile, config_path)
render_templatefile(config_path,
platform_path=platform_path,
bin_path=os.path.join(platform_path, 'bin'),
master_ip=self.frame_settings['MASTER']['ip'],
supervisor_rpc_port=self.frame_settings['SUPERVISOR_RPC']['port'],
supervisor_rpc_username=self.frame_settings['SUPERVISOR_RPC']['username'],
supervisor_rpc_password=self.frame_settings['SUPERVISOR_RPC']['password'])
cmd = 'supervisord -c {}'.format(os.path.join(platform_path, '.cache', 'supervisor.conf'))
print('start supervisor')
print(f'cmd: {cmd}')
print(os.popen(cmd).read().strip())
time.sleep(3)
return True
def init_database(self):
from bspider.utils.database import MysqlClient
mysql_client = MysqlClient.from_settings(self.frame_settings['WEB_STUDIO_DB'])
sql = 'show tables;'
remote_table = set()
for table in mysql_client.select(sql):
for table_name in table.values():
if table_name.startswith('bspider_'):
remote_table.add(table_name)
if set([table[0] for table in PLAIN_TABLE]) == remote_table:
return True
else:
print('Warning: Mysql Table is not create or destroyed')
print(remote_table)
print([table[0] for table in PLAIN_TABLE])
while True:
in_content = input("Initialize MySQL table or not (Y/N):")
if in_content.upper() == "N":
exit(1)
elif in_content.upper() == "Y":
break
# 初始化表
for table, param in PLAIN_TABLE:
sql_path = os.path.join(self.templates_dir, 'table_sql', f'{table}.sql')
with open(sql_path) as f:
sql_list = string.Template(f.read().strip()).substitute(**param)
for sql in sql_list.split(';\n'):
if len(sql):
mysql_client.query(sql)
print(f'init table:{table} success')
sql_list = init_custom_code(self.templates_dir, 'bspider_customcode')
print(f'insert {len(sql_list)} inner module')
for sql, value in sql_list:
mysql_client.insert(sql, value)
return True
def run(self, args, opts):
if len(args) != 1:
raise UsageError('args error')
op = args[0]
if op not in ('start', 'stop'):
raise UsageError('unknow op: %s' % (op))
if op == 'start':
self.init_database()
self.init_supervisor()
rpc_socket = os.path.join(os.environ[PLATFORM_PATH_ENV], '.cache', 'supervisor.conf')
print(f'Use cmd: supervisorctl -c {rpc_socket} {op} {{module}}')
print('=======supervisor output ========')
for module in ('master', 'bcorn', 'scheduler'):
cmd = f'supervisorctl -c {rpc_socket} {op} {module}'
print(os.popen(cmd).read().strip())
print('=================================')
# print(f'A new BSpider master node {op}!')
print(f'see /platform/logs/supervisor/{module}.log to check process status!')
| 1.960938 | 2 |
tools/gen_dispatcher_collect_macros.py | RaviGaddipati/libsimdpp | 0 | 12759976 | #!/usr/bin/env python3
# Copyright (C) 2015 <NAME> <<EMAIL>>
#
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
# Generates the simdpp/dispatch/collect_macros_generated.h file
# Use as $ ./tools/gen_dispatcher_collect_macros.py > simdpp/dispatch/collect_macros_generated.h
from gen_common import output_template
num_archs = 15
single_arch_template = '''
#ifdef SIMDPP_DISPATCH_ARCH$num$
#define SIMDPP_ARCH_PP_LIST SIMDPP_DISPATCH_ARCH$num$
#include <simdpp/detail/preprocess_single_arch.h>
// Use the results of preprocess_single_arch.h to define
// SIMDPP_DISPATCH_$num$_NAMESPACE
#if SIMDPP_ARCH_PP_NS_USE_NULL
#define SIMDPP_DISPATCH_$num$_NS_ID_NULL SIMDPP_INSN_ID_NULL
#else
#define SIMDPP_DISPATCH_$num$_NS_ID_NULL
#endif
#if SIMDPP_ARCH_PP_NS_USE_SSE2
#define SIMDPP_DISPATCH_$num$_NS_ID_SSE2 SIMDPP_INSN_ID_SSE2
#else
#define SIMDPP_DISPATCH_$num$_NS_ID_SSE2
#endif
#if SIMDPP_ARCH_PP_NS_USE_SSE3
#define SIMDPP_DISPATCH_$num$_NS_ID_SSE3 SIMDPP_INSN_ID_SSE3
#else
#define SIMDPP_DISPATCH_$num$_NS_ID_SSE3
#endif
#if SIMDPP_ARCH_PP_NS_USE_SSSE3
#define SIMDPP_DISPATCH_$num$_NS_ID_SSSE3 SIMDPP_INSN_ID_SSSE3
#else
#define SIMDPP_DISPATCH_$num$_NS_ID_SSSE3
#endif
#if SIMDPP_ARCH_PP_NS_USE_SSE4_1
#define SIMDPP_DISPATCH_$num$_NS_ID_SSE4_1 SIMDPP_INSN_ID_SSE4_1
#else
#define SIMDPP_DISPATCH_$num$_NS_ID_SSE4_1
#endif
#if SIMDPP_ARCH_PP_NS_USE_AVX
#define SIMDPP_DISPATCH_$num$_NS_ID_AVX SIMDPP_INSN_ID_AVX
#else
#define SIMDPP_DISPATCH_$num$_NS_ID_AVX
#endif
#if SIMDPP_ARCH_PP_NS_USE_AVX2
#define SIMDPP_DISPATCH_$num$_NS_ID_AVX2 SIMDPP_INSN_ID_AVX2
#else
#define SIMDPP_DISPATCH_$num$_NS_ID_AVX2
#endif
#if SIMDPP_ARCH_PP_NS_USE_FMA3
#define SIMDPP_DISPATCH_$num$_NS_ID_FMA3 SIMDPP_INSN_ID_FMA3
#else
#define SIMDPP_DISPATCH_$num$_NS_ID_FMA3
#endif
#if SIMDPP_ARCH_PP_NS_USE_FMA4
#define SIMDPP_DISPATCH_$num$_NS_ID_FMA4 SIMDPP_INSN_ID_FMA4
#else
#define SIMDPP_DISPATCH_$num$_NS_ID_FMA4
#endif
#if SIMDPP_ARCH_PP_NS_USE_XOP
#define SIMDPP_DISPATCH_$num$_NS_ID_XOP SIMDPP_INSN_ID_XOP
#else
#define SIMDPP_DISPATCH_$num$_NS_ID_XOP
#endif
#if SIMDPP_ARCH_PP_NS_USE_AVX512F
#define SIMDPP_DISPATCH_$num$_NS_ID_AVX512F SIMDPP_INSN_ID_AVX512F
#else
#define SIMDPP_DISPATCH_$num$_NS_ID_AVX512F
#endif
#if SIMDPP_ARCH_PP_NS_USE_NEON
#define SIMDPP_DISPATCH_$num$_NS_ID_NEON SIMDPP_INSN_ID_NEON
#else
#define SIMDPP_DISPATCH_$num$_NS_ID_NEON
#endif
#if SIMDPP_ARCH_PP_NS_USE_NEON_FLT_SP
#define SIMDPP_DISPATCH_$num$_NS_ID_NEON_FLT_SP SIMDPP_INSN_ID_NEON_FLT_SP
#else
#define SIMDPP_DISPATCH_$num$_NS_ID_NEON_FLT_SP
#endif
#if SIMDPP_ARCH_PP_NS_USE_ALTIVEC
#define SIMDPP_DISPATCH_$num$_NS_ID_ALTIVEC SIMDPP_INSN_ID_ALTIVEC
#else
#define SIMDPP_DISPATCH_$num$_NS_ID_ALTIVEC
#endif
#define SIMDPP_DISPATCH_$num$_NAMESPACE SIMDPP_PP_PASTE15(arch, $n$
SIMDPP_DISPATCH_$num$_NS_ID_NULL, $n$
SIMDPP_DISPATCH_$num$_NS_ID_SSE2, $n$
SIMDPP_DISPATCH_$num$_NS_ID_SSE3, $n$
SIMDPP_DISPATCH_$num$_NS_ID_SSSE3, $n$
SIMDPP_DISPATCH_$num$_NS_ID_SSE4_1, $n$
SIMDPP_DISPATCH_$num$_NS_ID_AVX, $n$
SIMDPP_DISPATCH_$num$_NS_ID_AVX2, $n$
SIMDPP_DISPATCH_$num$_NS_ID_AVX512F, $n$
SIMDPP_DISPATCH_$num$_NS_ID_FMA3, $n$
SIMDPP_DISPATCH_$num$_NS_ID_FMA4, $n$
SIMDPP_DISPATCH_$num$_NS_ID_XOP, $n$
SIMDPP_DISPATCH_$num$_NS_ID_NEON, $n$
SIMDPP_DISPATCH_$num$_NS_ID_NEON_FLT_SP, $n$
SIMDPP_DISPATCH_$num$_NS_ID_ALTIVEC)
#define SIMDPP_DISPATCH_$num$_FN_REGISTER(ARRAY,NAME,FUN_TYPE) $n$
ARRAY[$num$-1] = SIMDPP_DISPATCH_$num$_NAMESPACE::register_fn_##NAME((FUN_TYPE)(NULL));
#define SIMDPP_DISPATCH_$num$_FN_DECLARE(NAME,FUN_TYPE) $n$
namespace SIMDPP_DISPATCH_$num$_NAMESPACE { $n$
::simdpp::detail::FnVersion register_fn_##NAME(FUN_TYPE); }
#undef SIMDPP_ARCH_PP_LIST
#else
#define SIMDPP_DISPATCH_$num$_FN_REGISTER(ARRAY,NAME,FUN_TYPE)
#define SIMDPP_DISPATCH_$num$_FN_DECLARE(NAME,FUN_TYPE)
#endif'''
single_fn_register_template = ' SIMDPP_DISPATCH_$num$_FN_REGISTER(ARRAY,NAME,FUN_TYPE) $n$'
single_fn_declare_template = ' SIMDPP_DISPATCH_$num$_FN_DECLARE(NAME,FUN_TYPE) $n$'
# print the actual file
print('''/* Copyright (C) 2015 <NAME> <<EMAIL>>
Distributed under the Boost Software License, Version 1.0.
(See accompanying file LICENSE_1_0.txt or copy at
http://www.boost.org/LICENSE_1_0.txt)
*/
// This file is generated automatically. See tools/gen_dispatcher_collect_macros.py
#ifndef LIBSIMDPP_DISPATCH_COLLECT_MACROS_GENERATED_H
#define LIBSIMDPP_DISPATCH_COLLECT_MACROS_GENERATED_H
#ifndef LIBSIMDPP_SIMD_H
#error "This file must be included through simd.h"
#endif
#if SIMDPP_EMIT_DISPATCHER
''')
print('#define SIMDPP_DISPATCH_MAX_ARCHS ' + str(num_archs) + '\n')
for i in range(1, num_archs+1):
vars = { 'num' : str(i) }
output_template(single_arch_template, vars)
print('''
#define SIMDPP_DISPATCH_DECLARE_FUNCTIONS(NAME,FUN_TYPE) \\''')
for i in range(1, num_archs+1):
vars = { 'num' : str(i) }
output_template(single_fn_declare_template, vars)
print('''
#define SIMDPP_DISPATCH_COLLECT_FUNCTIONS(ARRAY,NAME,FUN_TYPE) \\''')
for i in range(1, num_archs+1):
vars = { 'num' : str(i) }
output_template(single_fn_register_template, vars)
print('''
#endif // SIMDPP_EMIT_DISPATCHER
#endif
''')
| 1.554688 | 2 |
mywebsite/shop/templatetags/shop.py | Zadigo/ecommerce_template | 16 | 12759977 | import datetime
from django.template import Library
from django.utils.html import format_html, format_html_join
register = Library()
@register.simple_tag
def stars(score):
if score is None:
return ''
if isinstance(score, float):
score = int(score)
html_tags = format_html_join(
'\n',
'<span class="fa fa-star" value="{}"></span>',
(str(i) for i in range(0, score))
)
return html_tags
@register.simple_tag
def estimated_delivery_date(days):
current_date = datetime.datetime.now().date()
date_plus_fifteen = current_date + datetime.timedelta(days=days)
upper_date = date_plus_fifteen + datetime.timedelta(days=12)
return f"le {date_plus_fifteen.day}/{date_plus_fifteen.month} \
et le {upper_date.day}/{upper_date.month}"
| 2.515625 | 3 |
Famcy/_items_/display/displayTag/displayTag.py | nexuni/Famcy | 0 | 12759978 | import markdown
import Famcy
import json
class displayTag(Famcy.FamcyBlock):
"""
Represents the block to display
paragraph.
"""
def __init__(self):
self.value = displayTag.generate_template_content()
super(displayTag, self).__init__()
self.init_block()
@classmethod
def generate_template_content(cls):
return {
"title": "displayTag",
"content": "displayTag content",
}
def init_block(self):
self.body = Famcy.div()
self.body["id"] = self.id
self.body["className"] = "displayTag"
h3_temp = Famcy.h3()
h4_temp = Famcy.h4()
self.body.addElement(h3_temp)
self.body.addElement(h4_temp)
def render_inner(self):
self.body.children[0].innerHTML = self.value["title"]
self.body.children[1].innerHTML = self.value["content"]
return self.body | 2.90625 | 3 |
int.p4app/nc.py | baru64/int-p4 | 3 | 12759979 | # simple netcat in python2
import sys
import socket
def netcat(host, port, is_server):
if is_server:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((host, port))
s.listen(1)
conn, _ = s.accept()
while True:
data = conn.recv(1024)
if not data: break
print data
conn.close()
else:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host, port))
while True:
data = raw_input()
if not data: break
s.sendall(data)
s.close()
if __name__ == "__main__":
if len(sys.argv) < 4:
print """usage\t\t nc.py TYPE HOST PORT
TYPE - 'c' for client, 'l' for server"""
else:
if sys.argv[1] == 'c':
netcat(sys.argv[2], int(sys.argv[3]), False)
elif sys.argv[1] == 'l':
netcat(sys.argv[2], int(sys.argv[3]), True)
else:
print "Bad option. Use 'l' or 'c'."
| 3.421875 | 3 |
gcpdiag/queries/gcs.py | jorisfa/gcpdiag | 63 | 12759980 | <filename>gcpdiag/queries/gcs.py
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Queries related to GCP Cloud Storage
"""
import logging
import re
from typing import Dict, Mapping
import googleapiclient.errors
from gcpdiag import caching, config, models, utils
from gcpdiag.queries import apis, iam
class Bucket(models.Resource):
"""Represents a GCS Bucket."""
_resource_data: dict
def __init__(self, project_id, resource_data):
super().__init__(project_id=project_id)
self._resource_data = resource_data
self._metadata_dict = None
@property
def id(self) -> str:
return self._resource_data['id']
@property
def name(self) -> str:
return self._resource_data['name']
def is_uniform_access(self) -> bool:
return self._resource_data['iamConfiguration']['uniformBucketLevelAccess'][
'enabled']
@property
def full_path(self) -> str:
result = re.match(r'https://www.googleapis.com/storage/v1/(.*)',
self._resource_data['selfLink'])
if result:
return result.group(1)
else:
return '>> ' + self._resource_data['selfLink']
@property
def short_path(self) -> str:
path = self.project_id + '/' + self.name
return path
@property
def labels(self) -> dict:
return self._resource_data.get('labels', {})
class BucketIAMPolicy(iam.BaseIAMPolicy):
def _is_resource_permission(self, permission):
return True
@caching.cached_api_call(in_memory=True)
def get_bucket_iam_policy(project_id: str, bucket: str) -> BucketIAMPolicy:
gcs_api = apis.get_api('storage', 'v1', project_id)
request = gcs_api.buckets().getIamPolicy(bucket=bucket)
return iam.fetch_iam_policy(request, BucketIAMPolicy, project_id, bucket)
@caching.cached_api_call(in_memory=True)
def get_buckets(context: models.Context) -> Mapping[str, Bucket]:
buckets: Dict[str, Bucket] = {}
if not apis.is_enabled(context.project_id, 'storage'):
return buckets
gcs_api = apis.get_api('storage', 'v1', context.project_id)
logging.info('fetching list of GCS buckets in project %s', context.project_id)
query = gcs_api.buckets().list(project=context.project_id)
try:
resp = query.execute(num_retries=config.API_RETRIES)
if 'items' not in resp:
return buckets
for resp_b in resp['items']:
# verify that we have some minimal data that we expect
if 'id' not in resp_b:
raise RuntimeError('missing data in bucket response')
f = Bucket(project_id=context.project_id, resource_data=resp_b)
buckets[f.full_path] = f
except googleapiclient.errors.HttpError as err:
raise utils.GcpApiError(err) from err
return buckets
| 2.203125 | 2 |
PyBuilder/utils.py | leocll/PyBuilder | 0 | 12759981 | <filename>PyBuilder/utils.py
# -*- coding: utf-8 -*-
# @Time : 2020-04-17 14:24
# @Author : leocll
# @Email : <EMAIL>
# @File : utils.py
import os
import sys
import typing
import shutil
import fnmatch
import importlib
def path4package(package: str, src: str = None) -> str:
"""
根据包名获取路径
:param package: 包名
:param src: 源码根路径
:return:
"""
for path in sys.path:
if src and path.startswith(src):
continue
if os.path.isdir(os.path.join(path, package)):
return os.path.join(path, package)
raise Exception("not found package: '%s'" % package)
def remove_pycache4dir(tree: str):
"""
移除`__pycache__`文件夹
:param tree: 路径
:return:
"""
if not os.path.isdir(tree):
return
tree = os.path.abspath(tree)
if os.path.dirname(tree) == '__pycache__':
shutil.rmtree(tree)
return
for dn in os.listdir(tree):
remove_pycache4dir(os.path.join(tree, dn))
def read_patter4file(file: str) -> typing.List[str]:
"""
读取文件的匹配项
:param file: 文件路径
:return:
"""
file = os.path.abspath(file)
try:
with open(file, 'r') as f:
lines = f.readlines()
lines_n = []
for line in lines:
line = line.strip()
if len(line) > 0 and not line.startswith('#'):
lines_n.append(line.split(' ')[0])
return lines_n
except Exception:
return []
def parse_patter(pats: typing.List[str]) -> typing.Tuple[typing.List[str], typing.List[str]]:
"""
分析匹配项
========
/xx/ # 只匹配根目录下文件夹
xx/ # 匹配所有的文件夹
/xx # 只匹配根目录下文件
xx # 匹配所有的文件
!xx # 除xx之外
=========
/xx/ => xx + xx/**
xx/ => xx + xx/** + **/xx + **/xx/**
/xx => xx
xx => xx + **/xx
!xx => 除xx之外
:param pats: 匹配项列表
:return: 包含的匹配项,除此之外的匹配项
"""
pats_includes = []
pats_excludes = []
for pat in pats:
if pat.startswith('!'):
pat = pat[1:]
pats_n = pats_excludes
else:
pats_n = pats_includes
pats_n.append(pat)
if pat.endswith('/'): # 文件夹:xx/
if pat.startswith('/'): # '/'开头,表示根目录下的文件
# 根目录下的文件夹:/xx/ => xx or xx/**
pats_n.append(pat[1:-1])
pats_n.append(pat[1:] + '**')
else:
# xx/ => xx or xx/** or **/xx or **/xx/**
pats_n.append(pat[:-1])
pats_n.append(pat + '**')
pats_n.append('**/' + pat[:-1])
pats_n.append('**/' + pat + '**')
else:
if pat.startswith('/'): # '/'开头,表示根目录下的文件
# 根目录下的文件:/xx => xx
pats_n.append(pat[1:])
else:
# xx => xx or **/xx
pats_n.append('**/' + pat)
return pats_includes, pats_excludes
def match(fn: str, pats: typing.List[str], pats_no: typing.List[str]) -> bool:
"""
匹配
:param fn: 目标
:param pats: 包含的匹配项
:param pats_no: 除此之外的匹配项
:return:
"""
def _match(pats_: typing.List[str]) -> bool:
if not fn:
return False
for pat in pats_:
if fnmatch.fnmatch(fn, pat):
return True
return False
return _match(pats) and not _match(pats_no)
def copy(src: str, target: str):
"""
复制文件
:param src: 源文件
:param target: 目标文件
:return:
"""
src = os.path.abspath(src)
target = os.path.abspath(target)
if not os.path.exists(src) or os.path.exists(target):
return
mkdir_p(os.path.dirname(target))
if os.path.isdir(src):
shutil.copytree(src, target)
else:
shutil.copy(src, target)
def mkdir_p(tree: str):
"""
新建文件夹`mkdir -p`
:param tree: 路径
:return:
"""
tree = os.path.abspath(tree)
if os.path.isdir(tree):
return
try:
os.mkdir(tree)
except FileNotFoundError:
mkdir_p(os.path.dirname(tree))
os.mkdir(tree)
def import_module4file(file: str):
"""
根据文件导入模块
:param file: 文件路径
:return:
"""
file = os.path.abspath(file)
if not os.path.exists(file):
raise FileNotFoundError(file)
module = None
for m in sys.modules:
try:
if m.__file__ == file:
module = m
break
except AttributeError: # 内建模块没有__file__属性
continue
if module:
return module
file_dir = os.path.dirname(file)
in_path = True
if file_dir not in sys.path:
sys.path.append(file_dir)
in_path = False
module_name = os.path.splitext(os.path.basename(file))[0]
module = importlib.import_module(module_name)
if not in_path:
sys.path.remove(file_dir)
return module
| 2.671875 | 3 |
examples/rest-api-python/src/list.py | drewfish/serverless-stack | 5,922 | 12759982 | <reponame>drewfish/serverless-stack<gh_stars>1000+
import json
from db.notes import getNotes
def main(event, context):
return {
"statusCode": 200,
"body": json.dumps(getNotes(), indent=2)
}
| 1.96875 | 2 |
day:70/Trie.py | hawaijar/FireLeetcode | 1 | 12759983 | <filename>day:70/Trie.py<gh_stars>1-10
class Node:
def __init__(self, ch):
self.ch = ch
self.children = {}
self.isWordTerminal = False
class Trie:
def __init__(self):
"""
Initialize your data structure here.
"""
self.root = Node('\0')
def insert(self, word: str) -> None:
"""
Inserts a word into the trie.
"""
current = self.root
for ch in word:
if ch not in current.children:
current.children[ch] = Node(ch)
current = current.children[ch]
current.isWordTerminal = True
def get_node(self, word: str):
current = self.root
for ch in word:
if ch in current.children:
current = current.children[ch]
else:
return None
return current
def search(self, word: str) -> bool:
"""
Returns if the word is in the trie.
"""
node = self.get_node(word)
if node is not None:
if node.isWordTerminal:
return True
return False
def startsWith(self, prefix: str) -> bool:
"""
Returns if there is any word in the trie that starts with the given prefix.
"""
return self.get_node(prefix) is not None
# Your Trie object will be instantiated and called as such:
# obj = Trie()
# obj.insert(word)
# param_2 = obj.search(word)
# param_3 = obj.startsWith(prefix)
| 4 | 4 |
facial_expression_program_cython/Gettingthecorrectframerate.pycrawl_image.py | NhamNgocTuanAnh/Real_Time_Facial_Expression_Recognition | 0 | 12759984 | <filename>facial_expression_program_cython/Gettingthecorrectframerate.pycrawl_image.py
#include <opencv2/highgui.hpp>
#include <opencv2/imgproc.hpp>
#include <ctime>
#include <windows.h>
using namespace cv;
int main(int argc, char** argv)
{
char* filename = argc >= 2 ? argv[1] : (char*)"c:/768x576.avi";
VideoCapture capture(filename); // open video file
if (!capture.isOpened()) // check if we succeeded
return -1;
int fps_of_video = (int)capture.get(CAP_PROP_FPS);
int time_to_wait = 1000 / fps_of_video;
int frameCounter = 0;
int tick = 0;
int fps = fps_of_video;
std::time_t timeBegin = std::time(0);
for (;;)
{
double time_start = (double)getTickCount();
Mat frame;
capture >> frame; // get a new frame
if (frame.empty())
{
break;
}
frameCounter++;
std::time_t timeNow = std::time(0) - timeBegin;
if (timeNow - tick >= 1)
{
tick++;
fps = frameCounter;
frameCounter = 0;
}
cv::putText(frame, cv::format("Original FPS of Video=%d", fps_of_video), cv::Point(30, 50), cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(0, 0, 255));
cv::putText(frame, cv::format("Average FPS=%d", fps), cv::Point(30, 80), cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(0, 0, 255));
imshow("Video Player", frame);
if (waitKey(1) == 27) break;
// wait for some time to correct FPS
while (time_to_wait > ((double)getTickCount() - time_start) / getTickFrequency() * 1000)
{
//http://stackoverflow.com/questions/1157209/is-there-an-alternative-sleep-function-in-c-to-milliseconds
Sleep(1);
}
}
return 0;
}
| 2.53125 | 3 |
scripts/clashFunctions.py | XiyuChenFAU/kgs_vibration_entropy | 1 | 12759985 | <reponame>XiyuChenFAU/kgs_vibration_entropy<filename>scripts/clashFunctions.py
#!/usr/bin/python
import sys
import os
import numpy as np
import fnmatch
import matplotlib.colors as colors
import matplotlib.cm as cmx
import matplotlib as mpl
mpl.use('Agg')
from combineKGSPath_steps import extractPath
from math import log
import csv
from matplotlib.ticker import MultipleLocator, LinearLocator, FormatStrFormatter
import matplotlib.pyplot as plt
import matplotlib.patches as patches
from matplotlib.lines import Line2D
import operator
def getClashes(pdbPath,pathList,reversePathList):
#This function returns forward and reverse clashes separately
# print "Path ini:"+str(pathList[0])+", "+str(pathList[1])
# regPlanner=1
# it_ids=[]
# currentIt=0
# currentPathEntry=0
pathLength=len(pathList)
# with open("output.txt") as outputFile:
# for line in outputFile:
# if currentPathEntry == pathLength:
# break
# if "Using regular planner" in line:
# regPlanner = 1
# if "Switching to reversePlanner" in line:
# regPlanner = 0
# if "New structure" in line:
# currentIt = currentIt + 1
#
# if regPlanner == 1:
# tokens = line.split(' ')
# num =tokens[3]
# pathId =int(num[num.find('_')+1:num.find('.')])
# if pathId == pathList[currentPathEntry]:
# it_ids.append(currentIt)
# currentPathEntry = currentPathEntry+1
#
currentClashes = []
fwdClashes=[]
atClash=0
currentPathEntry=0
atConfig=0
currentIt=1
# print "All forward confs: "
# print it_ids
# print "num path ids "+str(len(pathList))+", num its "+str(len(it_ids))
with open("kgs_planner.log", "r") as log_file:
for line in log_file:
if line == '\n':
currentIt += 1
continue;
if(currentIt != pathList[currentPathEntry]):
continue;
if "Using clash constraint for atoms:" in line:
if atClash==0:
currentClashes=[]
atClash=1
tokens = line.split(' ')
id1=int(tokens[5])
id2=int(tokens[6])
currentClashes.append((min(id1,id2),max(id1,id2)))
if "Rejected!" in line:
atClash=0
currentClashes=[]
if "Size of pareto front" in line:
atClash=0
currentClashes=[]
if "New structure: " in line:
if atClash==1:
# print "Adding clashes at it "+str(currentIt)+": "
# print currentClashes
fwdClashes.append(currentClashes)
if currentPathEntry==len(pathList)-1:
# print "Stopping, as entry "+str(currentPathEntry)+" and "+str(len(pathList)-1)
break;
currentPathEntry=currentPathEntry+1
atClash=0
# print "Current It "+str(currentIt)+" current path entry "+str(it_ids[currentPathEntry])
currentClashes=[]
#------------REVERSE-------------Same analysis for the reverse path
# regPlanner=1
# it_ids=[]
currentIt=0
currentPathEntry=0
pathLength=len(reversePathList)
# with open("output.txt") as outputFile:
# for line in outputFile:
# if currentPathEntry == pathLength:
# break
# if "Using regular planner" in line:
# regPlanner = 1
# if "Switching to reversePlanner" in line:
# regPlanner = 0
# if "New structure" in line:
# currentIt = currentIt + 1
#
# if regPlanner == 0:
# tokens = line.split(' ')
# num =tokens[3]
# pathId =int(num[num.find('_')+1:num.find('.')])
# if pathId == reversePathList[currentPathEntry]:
# it_ids.append(currentIt)
# currentPathEntry = currentPathEntry+1
#
currentClashes = []
revClashes=[]
atClash=0
currentPathEntry=0
atConfig=0
currentIt=1
# currentConf=reversePathList[currentPathEntry]
# print "All reverse confs: "
# print it_ids
with open("kgs_planner.log", "r") as log_file:
for line in log_file:
if line == '\n':
currentIt += 1
continue;
if(currentIt != reversePathList[currentPathEntry]):
continue;
if "Using clash constraint for atoms:" in line:
if atClash==0:
currentClashes=[]
atClash=1
tokens = line.split(' ')
id1=int(tokens[5])
id2=int(tokens[6])
currentClashes.append((min(id1,id2),max(id1,id2)))
if "Rejected!" in line:
atClash=0
currentClashes=[]
if "Size of pareto front" in line:
atClash=0
currentClashes=[]
if "New structure: " in line:
if atClash==1:
# print "Adding clashes at it "+str(currentIt)+": "
# print currentClashes
revClashes.append(currentClashes)
if currentPathEntry==len(reversePathList)-1:
# print "Stopping, as entry "+str(currentPathEntry)+" and "+str(len(reversePathList)-1)
break;
currentPathEntry=currentPathEntry+1
atClash=0
# print "Current It "+str(currentIt)+" current path entry "+str(it_ids[currentPathEntry])
currentClashes=[]
revClashes.reverse()
return fwdClashes, revClashes;
def getAllClashes(pdbPath,pathList,reversePathList):
# This function directly combines forward and reverse clashes
# print "Path ini:"+str(pathList[0])+", "+str(pathList[1])
# regPlanner=1
# it_ids=[]
# currentIt=0
# currentPathEntry=0
pathLength=len(pathList)
# with open("output.txt") as outputFile:
# for line in outputFile:
# if currentPathEntry == pathLength:
# break
# if "Using regular planner" in line:
# regPlanner = 1
# if "Switching to reversePlanner" in line:
# regPlanner = 0
# if "New structure" in line:
# currentIt = currentIt + 1
#
# if regPlanner == 1:
# tokens = line.split(' ')
# num =tokens[3]
# pathId =int(num[num.find('_')+1:num.find('.')])
# if pathId == pathList[currentPathEntry]:
# it_ids.append(currentIt)
# currentPathEntry = currentPathEntry+1
currentClashes = []
fwdClashes=[]
atClash=0
currentPathEntry=0
atConfig=0
currentIt=1
# print "All forward confs: "
# print it_ids
# print "num path ids "+str(len(pathList))+", num its "+str(len(it_ids))
with open("kgs_planner.log", "r") as log_file:
for line in log_file:
if line == '\n':
currentIt += 1
continue;
if(currentIt != pathList[currentPathEntry]):
continue;
if "Using clash constraint for atoms:" in line:
if atClash==0:
currentClashes=[]
atClash=1
tokens = line.split(' ')
id1=int(tokens[5])
id2=int(tokens[6])
currentClashes.append((min(id1,id2),max(id1,id2)))
if "Rejected!" in line:
atClash=0
currentClashes=[]
if "New structure: " in line:
if atClash==1:
# print "Adding clashes at it "+str(currentIt)+": "
# print currentClashes
fwdClashes.append(currentClashes)
if currentPathEntry==len(pathList)-1:
# print "Stopping, as entry "+str(currentPathEntry)+" and "+str(len(pathList)-1)
break;
currentPathEntry=currentPathEntry+1
atClash=0
# print "Current It "+str(currentIt)+" current path entry "+str(it_ids[currentPathEntry])
currentClashes=[]
#------------REVERSE-------------Same analysis for the reverse path
# regPlanner=1
# it_ids=[]
# currentIt=0
# currentPathEntry=0
pathLength=len(reversePathList)
# with open("output.txt") as outputFile:
# for line in outputFile:
# if currentPathEntry == pathLength:
# break
# if "Using regular planner" in line:
# regPlanner = 1
# if "Switching to reversePlanner" in line:
# regPlanner = 0
# if "New structure" in line:
# currentIt = currentIt + 1
#
# if regPlanner == 0:
# tokens = line.split(' ')
# num =tokens[3]
# pathId =int(num[num.find('_')+1:num.find('.')])
# if pathId == reversePathList[currentPathEntry]:
# it_ids.append(currentIt)
# currentPathEntry = currentPathEntry+1
currentClashes = []
revClashes=[]
atClash=0
currentPathEntry=0
atConfig=0
currentIt=1
# currentConf=reversePathList[currentPathEntry]
# print "All reverse confs: "
# print it_ids
with open("kgs_planner.log", "r") as log_file:
for line in log_file:
if line == '\n':
currentIt += 1
continue;
if(currentIt != reversePathList[currentPathEntry]):
continue;
if "Using clash constraint for atoms:" in line:
if atClash==0:
currentClashes=[]
atClash=1
tokens = line.split(' ')
id1=int(tokens[5])
id2=int(tokens[6])
currentClashes.append((min(id1,id2),max(id1,id2)))
if "Rejected!" in line:
atClash=0
currentClashes=[]
if "New structure: " in line:
if atClash==1:
# print "Adding clashes at it "+str(currentIt)+": "
# print currentClashes
revClashes.append(currentClashes)
if currentPathEntry==len(reversePathList)-1:
# print "Stopping, as entry "+str(currentPathEntry)+" and "+str(len(reversePathList)-1)
break;
currentPathEntry=currentPathEntry+1
atClash=0
# print "Current It "+str(currentIt)+" current path entry "+str(it_ids[currentPathEntry])
currentClashes=[]
revClashes.reverse
allClashes=fwdClashes
allClashes.extend(revClashes)
return allClashes
def getAtomResidueList(pdbFileIn):
#This function returns a map between atoms and residues for the given pdb file
print os.getcwd()
atomResidueList={}
with open(pdbFileIn,"r") as pdbFile:
for line in pdbFile:
if "ATOM" == str(line[0:4]):
atomId=str(line[4:11])
atomId=int(atomId.lstrip())
resId=line[22:26]
resId = int(resId.lstrip())
atomResidueList[atomId]=resId
return atomResidueList
def collectAtomClashes(allClashes):
#This function returns a sorted list of all atom ID-based clashes
clashCollection={}
for confIt in allClashes:
for clash in confIt:
if clash in clashCollection:
oldVal=clashCollection[clash]
clashCollection[clash]=oldVal+1
else:
clashCollection[clash]=1
sorted_collection = sorted(clashCollection.items(), key=operator.itemgetter(1))
sorted_collection.reverse()
return sorted_collection
def collectAllResidues(residueCollection,atomClashes,atomResidueList):
for entries in atomClashes:
val = operator.itemgetter(1)(entries)
key = operator.itemgetter(0)(entries)
atom1=key[0]
atom2=key[1]
#Atom to residue informaiton
resId1 = atomResidueList[atom1]
resId2 = atomResidueList[atom2]
#Add the first residue
if resId1 in residueCollection:
oldVal=residueCollection[resId1]
residueCollection[resId1]=oldVal+val
else:
residueCollection[resId1]=val
#Add second residue, if not the same as resid1 to prevent double counting of internal clashes
if resId1 != resId2:
# if resId2 == 55:
# print "Res 55, atom "+str(atom2)+", clash with res "+str(resId1)+", atom : "+str(atom1)+", "+str(val)+" times."
if resId2 in residueCollection:
oldVal=residueCollection[resId2]
residueCollection[resId2]=oldVal+val
else:
residueCollection[resId2]=val
# Add remaining residues without clashes
for atom in atomResidueList:
res = atomResidueList[atom]
if res in residueCollection:
continue
else:
residueCollection[res]=0
return residueCollection
def collectResidueClashes(clashCollection,atomClashes, atomResidueList):
# This function returns pairwise residue clashes (not sorted)
for confIt in atomClashes:
for clash in confIt:
atom1=clash[0]
atom2=clash[1]
resId1 = atomResidueList[atom1]
resId2 = atomResidueList[atom2]
resClash = (resId1,resId2)
if resClash in clashCollection:
oldVal=clashCollection[resClash]
clashCollection[resClash]=oldVal+1
else:
clashCollection[resClash]=1
# sorted_collection = sorted(clashCollection.items(), key=operator.itemgetter(1))
# sorted_collection.reverse()
# return sorted_collection
return clashCollection
def convertAtomClashesToResidueClashes(clashCollection,atomResidueList,minClashNumber=1,numRuns=1):
#This function takes a collection of atom-id-based clashes and turns it into pairswise residue clashes
residueLinks={}
for entries in clashCollection:
val = operator.itemgetter(1)(entries)
key = operator.itemgetter(0)(entries)
atom1=key[0]
atom2=key[1]
val = round(val/numRuns)
#Atom to residue informaiton
if val >= minClashNumber: # possibly filter on minimum number of clash occurence (average per run)
resId1 = atomResidueList[atom1]
resId2 = atomResidueList[atom2]
if resId1 == resId2:
continue
pairVal=(resId1,resId2)
if pairVal in residueLinks:
residueLinks[pairVal] += val
else:
residueLinks[pairVal] = val
return residueLinks
def convertResidueClashesToLinks(clashCollection,minClashNumber=1,numRuns=1,pdbFile=None):
# This function filters residue-based clashes by a minimum occurence per run on average
residueLinks={}
for entries in clashCollection:
val = clashCollection[entries]
val = round(val/numRuns)
resId1 = operator.itemgetter(0)(entries)
resId2 = operator.itemgetter(1)(entries)
if resId1 == resId2:
continue
if val >= minClashNumber:
residueLinks[entries]=val
# atom1 = pdbFile.getAtom("A",resId1,"CA")
# atom2 = pdbFile.getAtom("A",resId2,"CA")
# if val >= minClashNumber and (atom1.tempFactor > 1587.5 and atom2.tempFactor > 1587.5):
# residueLinks[entries]=val
# print residueLinks
return residueLinks
def convertClashesToResidueNetworks(clashCollection,minClashNumber=1,numRuns=1):
#This function creates a contiguous network, given a minimum residue-clash occurence per run on average
networks=[] #map of contiguous steric networks
listOfSets=[]
# Treat minClashNumber as an average over all runs
for entries in clashCollection:
val = operator.itemgetter(1)(entries)
val = round(val/numRuns)
key = operator.itemgetter(0)(entries)
resId1=key[0]
resId2=key[1]
if resId1 == resId2:
continue
#Atom to residue informaiton
if val >= minClashNumber:
listOfSets.append(set([resId1,resId2]))
while True:
merged_one = False
supersets = [listOfSets[0]]
for s in listOfSets[1:]:
in_super_set = False
for ss in supersets:
if s & ss:
ss |= s
merged_one = True
in_super_set = True
break
if not in_super_set:
supersets.append(s)
#print supersets
if not merged_one:
break
listOfSets = supersets
supersets.sort(key=len,reverse=True) #sort by length of entries
residues={} #map of residues and network ID
numSets = len(supersets)
id=1
for net in supersets:
# print net
for res in net:
residues[res]=id
# print residues[res]
id=id+1
# sorted_collection = sorted(residues.items(), key=operator.itemgetter(1))
# sorted_collection.reverse()
return residues,numSets
def convertClashesToAllResidues(clashCollection,atomResidueList):
#This function converts atom-based clashes to residues, accounting for the number of clashes, including residues without a clash
#This function is mostly used for the pymol output
residues={}
for entries in clashCollection:
val = operator.itemgetter(1)(entries)
key = operator.itemgetter(0)(entries)
atom1=key[0]
atom2=key[1]
#Atom to residue informaiton
resId1 = atomResidueList[atom1]
resId2 = atomResidueList[atom2]
#Add the first residue
if resId1 in residues:
oldVal=residues[resId1]
residues[resId1]=oldVal+val
else:
residues[resId1]=val
#Add second residue, if not the same as resid1 to prevent double counting of internal clashes
if resId1 != resId2:
# if resId2 == 55:
# print "Res 55, atom "+str(atom2)+", clash with res "+str(resId1)+", atom : "+str(atom1)+", "+str(val)+" times."
if resId2 in residues:
oldVal=residues[resId2]
residues[resId2]=oldVal+val
else:
residues[resId2]=val
# Add remaining residues without clashes
for atom in atomResidueList:
res = atomResidueList[atom]
if res in residues:
continue
else:
residues[res]=0
#Identify maximum value for normalization
maxVal=0
for val in residues:
nums = residues[val]
if nums > maxVal:
maxVal=nums
# Normalize entries
# for val in residues:
# nums = residues[val]
# residues[val] = round(float(nums)/float(maxVal)*100)/100.0
sorted_collection = sorted(residues.items(), key=operator.itemgetter(1))
sorted_collection.reverse()
return sorted_collection,maxVal
def pdbAlterBFactor(pdb_file,clashResidues):
# For an ATOM record, update tempFactor number
f = open(pdb_file,'r')
pdb = f.readlines()
f.close()
out = []
for line in pdb:
if line[0:6] == "ATOM " or line[0:6] == "TER ":
# tokens = line.split(' ')
# print tokens
resId=int(line[22:26])
val=0
if resId in clashResidues:
val=clashResidues[resId]
out.append("%s%6s%s" % (line[0:60],val,line[67:]))
else:
out.append(line)
return out
| 2.140625 | 2 |
tests/integration/pybaseball/test_statcast_batter.py | mwisnie5/pybaseball | 1 | 12759986 | import pandas as pd
from pybaseball.statcast_batter import statcast_batter, statcast_batter_exitvelo_barrels
def test_statcast_batter_exitvelo_barrels() -> None:
result: pd.DataFrame = statcast_batter_exitvelo_barrels(2019)
assert result is not None
assert not result.empty
assert len(result.columns) == 19
assert len(result) == 250
def test_statcast_batter() -> None:
result: pd.DataFrame = statcast_batter('2019-01-01', '2019-12-31', 642715)
assert result is not None
assert not result.empty
assert len(result.columns) == 89
assert len(result) == 2418
| 2.40625 | 2 |
WebSite/PrayerWall/bookings/views.py | Tinka8ell/Prayer-Wall | 0 | 12759987 | <gh_stars>0
# Create your views here.
import datetime, os, time, sys
from django.http import HttpResponse, HttpResponseRedirect, JsonResponse
from django.shortcuts import get_object_or_404, get_list_or_404, render
from django.urls import reverse
from django.views import generic
from django.db.models import F
from django.utils import timezone
from django.template import loader
from .models import Event, Location, Schedule, Slot, Booking, BookingForm
def index(request):
now = datetime.datetime.now()
timeString = now.strftime("%Y-%m-%d %H:%M")
events = get_list_or_404(Event)
count = len(events)
data = []
for event in events:
start = event.start_date.strftime("%A, %d %b %Y from %H:%M")
data.append({'name': event.name,
'start': start,
'number': event.pk,
})
templateData = {
'title' : 'All events',
'count': count,
'events': data,
'size': len(data),
'time': timeString,
}
return render(request, 'bookings/events.html', templateData)
def current(request):
# redirect to the event page for the latest event
now = datetime.datetime.now()
timeString = now.strftime("%Y-%m-%d %H:%M")
events = get_list_or_404(Event)
latest = events[-1]
number = latest.pk
return HttpResponseRedirect(reverse('bookings:event', args = (number,)))
def event(request, number):
now = datetime.datetime.now()
timeString = now.strftime("%Y-%m-%d %H:%M")
event = get_object_or_404(Event, pk=number)
isWatch = event.isWatch
start = event.start_date.strftime("%A, %d %b %Y from %H:%M")
print(f"Event: {number}, start: {start}, isWatch = {isWatch}", file=sys.stderr)
schedules = Schedule.objects.filter(event=event.pk)
data = []
for schedule in schedules:
location = Location.objects.get(pk=schedule.location.pk)
loc = {'name': location.name,
'size': location.size,
'schedule': schedule.pk,
}
data.append(loc)
templateData = {
'title' : event.name,
'start': start,
'number': event.pk,
'length': event.length,
'locations': data,
'time': timeString,
}
print(f"Event: {number}, start: {start}, isWatch = {isWatch}, about to render", file=sys.stderr)
return render(request, 'bookings/locations.html', templateData)
def schedule(request, schedule):
now = datetime.datetime.now()
timeString = now.strftime("%Y-%m-%d %H:%M")
schedule_object = get_object_or_404(Schedule, pk=schedule)
event = schedule_object.event
number = event.pk
print(f"Schedule: {schedule}", file=sys.stderr)
isWatch = event.isWatch
start = event.start_date.strftime("%A, %d %b %Y from %H:%M")
if isWatch:
start = event.start_date.strftime("%A, %d %b %Y")
print(f"Schedule: {schedule}, start: {start}, isWatch = {isWatch}", file=sys.stderr)
# get alternative venues
alternatives = Schedule.objects.filter(event=event.pk).exclude(pk=schedule)
alt = []
for alternate in alternatives:
alt.append((alternate.id, alternate.location.name))
location = schedule_object.location
bookings = {}
loc = {'name': location.name,
'size': location.size,
'schedule': schedule,
}
slots = Slot.objects.filter(schedule=schedule).order_by('time', 'watch')
loc['count'] = len(slots)
for slot in slots:
people = []
time = slot.time.strftime("%a %H:%M")
if slot.watch != Slot.Part.HOURLY:
time = slot.time.strftime("%a %d %b: ") + Slot.Part(slot.watch).label
booked = Booking.objects.filter(slot=slot.pk)
count = 0
for booking in booked:
count += booking.number
people.append(booking.person)
if count == 0:
status = 'slotFree' # colour if available
else:
if location.size > 0 and count >= location.size: # so full
status = 'slotFull' # colour if not available
else:
status = 'slotCovered' # colour if covered
bookings[time] = { 'people': people,
'count': count,
'status': status,
'slot': slot.pk,
'watch': slot.watch,
}
loc['bookings'] = bookings
templateData = {
'title' : event.name,
'start': start,
'number': event.pk,
'length': event.length,
'time': timeString,
'location': loc,
'alternatives': alt,
'isWatch': isWatch,
}
print(f"Schedule: {schedule}, start: {start}, isWatch = {isWatch}, about to render", file=sys.stderr)
return render(request, 'bookings/bookings.html', templateData)
def booking(request, slot):
now = datetime.datetime.now()
timeString = now.strftime("%Y-%m-%d %H:%M")
# need to detct what sort of slot!
slot_object = get_object_or_404(Slot, pk=slot)
schedule = slot_object.schedule
event = schedule.event
isWatch = event.isWatch
start = event.start_date.strftime("%A, %d %b %Y from %H:%M")
if isWatch:
start = event.start_date.strftime("%A, %d %b %Y")
location = schedule.location
time = slot_object.time.strftime("%a %H:%M")
if slot_object.watch != Slot.Part.HOURLY:
time = slot_object.time.strftime("%a %d %b: ") + Slot.Part(slot_object.watch).label
templateData = {
'title' : event.name,
'location': location.name,
'slot': time,
'number': slot,
'time': timeString,
'isWatch': isWatch,
'watch': slot_object.watch,
}
# if this is a POST request we need to process the form data
if request.method == 'POST':
# create a form instance and populate it with data from the request:
booking = Booking(slot=slot_object)
form = BookingForm(request.POST, instance=booking)
# check whether it's valid:
if form.is_valid():
# process the data in form.cleaned_data as required
booking.save() # commit save with the foreign key
# redirect to a new URL:
return HttpResponseRedirect(reverse('bookings:schedule', args = (schedule.pk,)))
# else drop to main return as errors ...
# if a GET (or any other method) we'll create a blank form
else:
form = BookingForm()
templateData['form'] = form
return render(request, 'bookings/booking.html', templateData)
def json(request):
now = datetime.datetime.now()
timeString = now.strftime("%Y-%m-%d %H:%M")
events = get_list_or_404(Event)
data = {}
for event in events:
start = event.start_date.strftime("%A, %d %b %Y from %H:%M")
number = event.pk
schedules = getJson(number)
data[number] = {'name': event.name,
'start': start,
'length': event.length,
'schedules': schedules,
}
return JsonResponse(data)
def jsonCurrent(request):
# redirect to the event page for the latest event
now = datetime.datetime.now()
timeString = now.strftime("%Y-%m-%d %H:%M")
events = get_list_or_404(Event)
latest = events[-1]
return returnJsonEvent(request, latest)
def jsonEvent(request, number):
now = datetime.datetime.now()
timeString = now.strftime("%Y-%m-%d %H:%M")
event = get_object_or_404(Event, pk=number)
return returnJsonEvent(request, event)
def returnJsonEvent(request, event):
number = event.pk
start = event.start_date.strftime("%A, %d %b %Y from %H:%M")
schedules = getJson(number)
data = {'name': event.name,
'start': start,
'schedules': schedules,
}
return JsonResponse(data)
def getJson(number):
data = {}
schedules = Schedule.objects.filter(event=number)
for schedule in schedules:
location = Location.objects.get(pk=schedule.location.pk)
sched = schedule.pk
slots = getSlots(sched)
loc = {'name': location.name,
'size': location.size,
'slots': slots,
}
data[sched] = loc
return data
def getSlots(number):
data = {}
slots = Slot.objects.filter(schedule=number).order_by('time')
for slot in slots:
people = []
time = slot.time.strftime("%a %H:%M")
bookings = Booking.objects.filter(slot=slot.pk)
for booking in bookings:
people.append({'person': booking.person,
'number': booking.number,
})
data[time] = {'people': people, }
return data
| 2.171875 | 2 |
pychron/options/views/views.py | aelamspychron/pychron | 1 | 12759988 | # ===============================================================================
# Copyright 2015 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from traits.api import Property
from traitsui.api import UItem, Item, HGroup, VGroup, EnumEditor
# ============= standard library imports ========================
# ============= local library imports ==========================
from traitsui.editors import TabularEditor
from traitsui.tabular_adapter import TabularAdapter
from pychron.core.helpers.traitsui_shortcuts import okcancel_view
from pychron.envisage.icon_button_editor import icon_button_editor
class SubviewAdapter(TabularAdapter):
columns = [('', 'name')]
name_text = Property
font = '10'
def _get_name_text(self):
return self.item
def view(title):
agrp = HGroup(Item('selected', show_label=False,
editor=EnumEditor(name='names'),
tooltip='List of available plot options'),
icon_button_editor('controller.save_options', 'disk',
tooltip='Save changes to options'),
icon_button_editor('controller.save_as_options', 'save_as',
tooltip='Save options with a new name'),
icon_button_editor('controller.add_options',
'add',
tooltip='Add new plot options'),
icon_button_editor('controller.delete_options',
'delete',
tooltip='Delete current plot options',
enabled_when='delete_enabled'),
icon_button_editor('controller.factory_default', 'edit-bomb',
enabled_when='selected',
tooltip='Apply factory defaults'))
sgrp = UItem('subview_names',
width=-120,
editor=TabularEditor(editable=False,
adapter=SubviewAdapter(),
selected='selected_subview'))
ogrp = UItem('subview',
style='custom')
bgrp = HGroup(sgrp, ogrp)
v = okcancel_view(VGroup(agrp, bgrp),
width=800,
height=750,
resizable=True,
title=title)
return v
# ============= EOF =============================================
| 1.359375 | 1 |
games_seeker/nl_controller/controller.py | sgg10/games_seeker | 0 | 12759989 | <filename>games_seeker/nl_controller/controller.py<gh_stars>0
import sys
import subprocess
import yake
import nltk
import click
def create_extractor(lang='en', max_ngram=1, deduplication_threshold=0.9, num_df_keywords=10):
return yake.KeywordExtractor(
lan=lang,
n=max_ngram,
dedupLim=deduplication_threshold,
top=num_df_keywords,
features=None
)
def get_keywords(text, extractor=create_extractor()):
keywords = extractor.extract_keywords(text)
words = set(map(lambda x: x[0], keywords))
return keywords, words
class NaturalLanguageController:
def __init__(self):
nltk.download("book")
nltk.download("omw")
nltk.download("wordnet")
nltk.download('omw-1.4')
nltk.download('averaged_perceptron_tagger')
from nltk.corpus import wordnet as wn
click.clear()
initial_command = ["games_seeker"] if "games_seeker" in sys.argv[0].split("/") else ["python", "main.py"]
self.functions = {
"builder": lambda: subprocess.run(initial_command + ["builder"]),
"can_run_game": lambda: subprocess.run(initial_command + ["can_run_game"]),
"classifier": lambda: subprocess.run(initial_command + ["classifier"]),
"improver": lambda: subprocess.run(initial_command + ["improver"]),
}
ss = {
"builder": [
wn.synsets("Armar", lang="spa"),
wn.synsets("Comprar", lang="spa"),
],
"run": [
wn.synsets("ejecutar", lang="spa"),
wn.synsets("Jugar", lang="spa"),
],
"improver": [
wn.synsets("Mejorar", lang="spa"),
wn.synsets("Arreglar", lang="spa"),
wn.synsets("Ajustar", lang="spa"),
wn.synsets("Actualizar", lang="spa"),
],
"classifier": [
wn.synsets("Clasificar", lang="spa"),
wn.synsets("Categorizar", lang="spa"),
wn.synsets("Evaluar", lang="spa"),
],
}
self.associated_keywords = {
"builder": set(
[
name for _ss in ss["builder"] for syn in _ss
for name in syn.lemma_names() if "_" not in name
]
) | {"build"},
"can_run_game": set(
[
name for _ss in ss["run"] for syn in _ss
for name in syn.lemma_names() if "_" not in name
]
) | {"run"},
"improver": set(
[
name for _ss in ss["improver"] for syn in _ss
for name in syn.lemma_names() if "_" not in name
]
) | {"improve"},
"classifier": set(
[
name for _ss in ss["classifier"] for syn in _ss
for name in syn.lemma_names() if "_" not in name
]
) | {"classify"}
}
def run(self):
tries = 3
while tries != 0:
text = input("What do you do? ").lower()
_, words = get_keywords(text)
for function, associated in self.associated_keywords.items():
diff = words & associated
if diff:
return self.functions[function], function
print("Can you be more clear?")
tries -= 1
return lambda: print("Sorry, I couldn't process your request"), None
| 2.625 | 3 |
tdc/generation/retrosyn.py | ypapanik/TDC | 1 | 12759990 | <filename>tdc/generation/retrosyn.py
# -*- coding: utf-8 -*-
# Author: <NAME>
# License: MIT
import warnings
warnings.filterwarnings("ignore")
from . import generation_dataset
from ..metadata import dataset_names
from ..utils import create_fold
class RetroSyn(generation_dataset.PairedDataLoader):
"""Data loader class accessing to retro-synthetic prediction task.
"""
def __init__(self, name, path = './data', print_stats = False, input_name = 'product', output_name = 'reactant'):
"""To create an data loader object for forward reaction prediction task. The goal is to predict
the reaction products given a set of reactants
Args:
name (str): the name of the datset
path (str, optional): the path to the saved data file.
print_stats (bool, optional): whether to print the basic statistics
input_name (str, optional): the name of the column containing input molecular data (product)
output_name (str, optional): the name of the column containing output molecular data (reactant)
"""
super().__init__(name, path, print_stats, input_name, output_name)
def get_split(self, method = 'random', seed = 42, frac = [0.7, 0.1, 0.2], include_reaction_type = False):
'''Return the data splitted as train, valid, test sets.
Arguments:
method (str): splitting schemes: random, scaffold
seed (int): random seed, default 42
frac (list of float): ratio of train/val/test split
include_reaction_type (bool): whether or not to include reaction type in the split
Returns:
pandas DataFrame/dict: a dataframe of the dataset
Raises:
AttributeError: Use the correct split method as input (random, scaffold)
'''
df = self.get_data(format = 'df')
if include_reaction_type:
from ..utils import get_reaction_type
try:
rt = get_reaction_type(self.name)
df['reaction_type'] = rt
except:
raise ValueError('Reaction Type Unavailable for ' + str(self.name) + '! Please turn include_reaction_type to be false!')
if method == 'random':
return create_fold(df, seed, frac)
else:
raise AttributeError("Please use the correct split method") | 2.65625 | 3 |
muninn/storage/base.py | stcorp/muninn | 3 | 12759991 | import os.path
import muninn.util as util
class StorageBackend(object):
def __init__(self):
self.supports_symlinks = False
self.global_prefix = ''
def get_tmp_root(self, product):
if self._tmp_root:
tmp_root = os.path.join(self._tmp_root, product.core.archive_path)
util.make_path(tmp_root)
return tmp_root
def run_for_product(self, product, fn, use_enclosing_directory):
tmp_root = self.get_tmp_root(product)
product_path = self.product_path(product)
with util.TemporaryDirectory(dir=tmp_root, prefix=".run_for_product-",
suffix="-%s" % product.core.uuid.hex) as tmp_path:
self.get(product, product_path, tmp_path, use_enclosing_directory)
paths = [os.path.join(tmp_path, basename) for basename in os.listdir(tmp_path)]
return fn(paths)
def prepare(self):
# Prepare storage for use.
raise NotImplementedError()
def exists(self):
# Check that storage exists.
raise NotImplementedError()
def initialize(self, configuration):
# Initialize storage.
raise NotImplementedError()
def destroy(self):
# Destroy storage
raise NotImplementedError()
def product_path(self, product): # TODO refactor away?
# Product path within storage
raise NotImplementedError()
# TODO lower-granularity put/get/delete
def put(self, paths, properties, use_enclosing_directory, use_symlinks=None,
retrieve_files=None, run_for_product=None):
# Place product file(s) into storage
raise NotImplementedError()
def get(self, product, product_path, target_path, use_enclosing_directory, use_symlinks=None):
# Retrieve product file(s) from storage
raise NotImplementedError()
def size(self, product_path):
# Return product storage size
raise NotImplementedError()
def delete(self, product_path, properties):
# Delete product file(s) from storage
raise NotImplementedError()
def move(self, product, archive_path, paths=None):
# Move product
raise NotImplementedError()
| 2.34375 | 2 |
configs/togal/standard_unet_02_10.py | Togal-ai-Team/mmsegmentation | 0 | 12759992 | <reponame>Togal-ai-Team/mmsegmentation
_base_ = [
'../_base_/datasets/togal.py',
'../_base_/default_runtime.py', '../_base_/schedules/schedule_160k.py'
]
# model settings
norm_cfg = dict(type='BN', requires_grad=True)
model = dict(
type='EncoderDecoder',
pretrained=None,
backbone=dict(
type='Unet',
encoder_name="tu-tf_efficientnetv2_b3",
encoder_depth=5
),
decode_head=dict(
type='FCNHead',
in_channels=16,
in_index=-1,
channels=16,
num_convs=1,
concat_input=False,
dropout_ratio=0.1,
num_classes=2,
norm_cfg=norm_cfg,
align_corners=False,
loss_decode=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0)),
# model training and testing settings
train_cfg=dict(),
test_cfg=dict(mode='slide', crop_size=(256, 256), stride=(170, 170)))
| 1.414063 | 1 |
research/delf/delf/python/datasets/generic_dataset.py | NasTul/models | 82,518 | 12759993 | # Lint as: python3
# Copyright 2021 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functions for generic image dataset creation."""
import os
from delf.python.datasets import utils
class ImagesFromList():
"""A generic data loader that loads images from a list.
Supports images of different sizes.
"""
def __init__(self, root, image_paths, imsize=None, bounding_boxes=None,
loader=utils.default_loader):
"""ImagesFromList object initialization.
Args:
root: String, root directory path.
image_paths: List, relative image paths as strings.
imsize: Integer, defines the maximum size of longer image side.
bounding_boxes: List of (x1,y1,x2,y2) tuples to crop the query images.
loader: Callable, a function to load an image given its path.
Raises:
ValueError: Raised if `image_paths` list is empty.
"""
# List of the full image filenames.
images_filenames = [os.path.join(root, image_path) for image_path in
image_paths]
if not images_filenames:
raise ValueError("Dataset contains 0 images.")
self.root = root
self.images = image_paths
self.imsize = imsize
self.images_filenames = images_filenames
self.bounding_boxes = bounding_boxes
self.loader = loader
def __getitem__(self, index):
"""Called to load an image at the given `index`.
Args:
index: Integer, image index.
Returns:
image: Tensor, loaded image.
"""
path = self.images_filenames[index]
if self.bounding_boxes is not None:
img = self.loader(path, self.imsize, self.bounding_boxes[index])
else:
img = self.loader(path, self.imsize)
return img
def __len__(self):
"""Implements the built-in function len().
Returns:
len: Number of images in the dataset.
"""
return len(self.images_filenames)
| 2.359375 | 2 |
pyaims/python/soma/aims/tests/test_pyaims_thread_read.py | brainvisa/aims-free | 4 | 12759994 | #!/usr/bin/env python
from __future__ import print_function
from __future__ import absolute_import
import threading
from soma import aims
import os
import sys
from optparse import OptionParser
import threading
import tempfile
import shutil
import soma.subprocess
import time
import six
from six.moves import zip
def aims_test_thread_read(filenames, verbose=True):
class Loadfile(object):
def __init__(self, filename, lock, objnum, verbose):
self._filename = filename
self.lock = lock
self.objnum = objnum
self.verbose = verbose
def __call__(self):
if self.verbose:
print('reading %s...' % self._filename)
obj = aims.read(self._filename)
if self.verbose:
print('read %s: %s' % (self._filename, str(type(obj))))
self.lock.acquire()
self.objnum[0] += 1
self.lock.release()
aims.carto.PluginLoader.load() # do this once in main thread
threads = []
lock = threading.RLock()
# objnum is a list, not an int, because the counter has to be shared
# between all threads: a list is, an int is not
objnum = [0]
starttime = time.time()
for fname in filenames:
thread = threading.Thread(
target=Loadfile(fname, lock, objnum, verbose))
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
duration = time.time() - starttime
print('finished. Read %d / %d objects in %.3f seconds.'
% (objnum[0], len(filenames), duration))
nmissing = len(filenames) - objnum[0]
if nmissing != 0:
print('Not all objects were loaded, %d missing.' % nmissing)
raise RuntimeError('Not all objects were loaded, %d missing.'
% nmissing)
def _convertFileFormat(aimsobj, directory, prefix, format, is_soma=False):
if is_soma:
exts = somaio_extensions(aimsobj, format)
else:
exts = aims.Finder.extensions(format)
if len(exts) == 0:
return None
exts2 = [x for x in exts if x != '']
if len(exts) != len(exts2):
exts2.append('')
exts = exts2
del exts2
formatok = False
for ext in exts:
if ext == '':
newfilename = os.path.join(directory, prefix)
else:
newfilename = os.path.join(directory,
'.'.join((prefix, ext)))
try:
aims.write(aimsobj, newfilename, format=format)
if not os.path.exists(newfilename):
for f in os.listdir(directory):
if not f.endswith( '.minf' ) \
and (ext == '' or f.endswith('.' + ext)):
newfilename = os.path.join(directory, f)
break
else:
shutil.rmtree(directory)
os.mkdir(directory)
continue
f = aims.Finder()
if f.check(newfilename) and f.format() == format:
formatok = True
break
else:
# print('could not read', newfilename)
shutil.rmtree(directory)
os.mkdir(directory)
except:
shutil.rmtree(directory)
os.mkdir(directory)
continue
if formatok:
return newfilename
return None
def somaio_formats(aimsobj):
try:
fclass = getattr(aims.carto,
'FormatDictionary_%s' % aims.typeCode(aimsobj))
except:
if isinstance(aimsobj, aims.carto.GenericObject):
fclass = aims.carto.FormatDictionary_Object
else:
return
formats = fclass.writeFormats()
exts = fclass.writeExtensions()
ext_by_format = dict([(f, []) for f in formats])
for ext, flist in six.iteritems(exts):
for f in flist:
ext_by_format[f].append(ext)
return ext_by_format
def somaio_extensions(aimsobj, format):
try:
fclass = getattr(aims.carto,
'FormatDictionary_%s' % aims.typeCode(aimsobj))
except:
if isinstance(aimsobj, aims.carto.GenericObject):
fclass = aims.carto.FormatDictionary_Object
else:
return []
exts = fclass.writeExtensions()
exts_for_format = [ext for ext, formats in six.iteritems(exts)
if format in formats]
return exts_for_format
def test_all_formats(filename, number=30, separate_process=False):
f = aims.Finder()
if not f.check(filename):
raise IOError('%f is not readable' % filename)
ot = f.objectType(), f.dataType()
aimsobj = aims.read(filename)
formats = aims.IOObjectTypesDictionary.formats(*ot)
soma_io_formats = somaio_formats(aimsobj)
success = True
unsafe_formats = []
safe_formats = []
all_formats = list(zip(formats, [False] * len(formats))) \
+ [(f, True) for f in soma_io_formats]
for format, is_soma in all_formats:
# JP2 writer in Qt (4.8.1 at least) systematically crashes.
if format in ('JP2'):
continue
print('testing: %s / %s, format: %s' % (ot[0], ot[1], format))
try:
directory = tempfile.mkdtemp(prefix='aims_thread_test')
newfilename = _convertFileFormat(aimsobj, directory, 'aims_test',
format, is_soma)
if not newfilename:
print('could not generate format', format)
# shutil.rmtree( directory )
continue
print('testing read on %s...' % newfilename)
try:
if separate_process:
soma.subprocess.check_call([sys.executable, '-m',
'soma.aims.tests.test_pyaims_thread_read', '-i',
newfilename, '-n', str(number), '--silent'])
else:
aims_test_thread_read([newfilename] * number,
verbose=False)
print('Passed.')
safe_formats.append(format)
# shutil.rmtree( directory )
except:
print('format %s is unsafe.' % format)
success = False
unsafe_formats.append(format)
finally:
shutil.rmtree(directory)
print('All done for %s / %s. Success =' % ot, success)
if not success:
return {ot: unsafe_formats}, {ot: safe_formats}
return {}, {ot: safe_formats}
if __name__ == '__main__':
parser = OptionParser(
description='Perform tests of threaded concurrent loading of aims objects in pyaims')
parser.add_option('-i', '--input', dest='infiles',
help='files to be read concurrently', action='append', default=[])
parser.add_option('-n', '--number', dest='number', type='int',
help='number of times each file should be read at the same time. Default: 30 if one input filename, 1 otherwise', default=0)
parser.add_option('-a', '--all', dest='all', action='store_true',
default=False,
help='test all possible formats for each input file (convert to all of them and test)')
parser.add_option('-s', '--subprocess', dest='subprocess',
action='store_true', default=False,
help='use subprocesses to run formats tests (with -a option). By default, they run in a single process, so a thread-related crash will end all tests (but will be easier to trace with a debugger).')
parser.add_option('--silent', dest='silent', action='store_true',
default=False,
help='be less verbose in per-file tests (no -a option)')
parser.add_option('-l', '--loop', dest='loop',
action='store_true', help='loop the execution endlessly (until it crashes). Useful for debugging rare crashes')
options, args = parser.parse_args()
filenames = options.infiles + args
if len(filenames) == 0:
print('no input files.')
parser.parse_args(['-h'])
if options.number == 0:
if len(filenames) == 1 or options.all:
num = 30
else:
num = 1
else:
num = options.number
# import libxml2
# libxml2.newTextReaderFilename( '/tmp/ra_head.gii.minf' )
# import xml.parsers.expat
# open( '/tmp/xml.xml', 'w' ).write( '<?xml version="1.0" encoding="utf-8" ?><grop></grop>' )
# p = xml.parsers.expat.ParserCreate()
# p.ParseFile( open( '/tmp/xml.xml' ) )
from soma.qt_gui.qt_backend import QtGui
app = QtGui.QApplication(sys.argv)
doit = True
while doit:
if options.all:
unsafe_formats = {}
safe_formats = {}
for filename in filenames:
tested_formats = test_all_formats(filename, num,
separate_process=options.subprocess)
unsafe_formats.update(tested_formats[0])
safe_formats.update(tested_formats[1])
if len(unsafe_formats) != 0:
print('Results:')
print('unsafe formats:')
print(unsafe_formats)
print('safe formats:')
print(safe_formats)
raise RuntimeError('Some tests failed.')
else:
print('OK.')
print('safe formats:')
print(safe_formats)
else:
filenames = filenames * num
aims_test_thread_read(filenames, verbose=not options.silent)
if not options.loop:
doit = False
| 2.484375 | 2 |
python/testData/inspections/PyRedundantParenthesesInspection/NestedParentheses.py | jnthn/intellij-community | 2 | 12759995 | <filename>python/testData/inspections/PyRedundantParenthesesInspection/NestedParentheses.py
x = (<weak_warning descr="Remove redundant parentheses">((42))</weak_warning>) | 1.578125 | 2 |
22_GScan/lib/plugins/Rootkit_Analysis.py | hemuke/python | 0 | 12759996 | <reponame>hemuke/python<filename>22_GScan/lib/plugins/Rootkit_Analysis.py
# coding:utf-8
from __future__ import print_function
import os, optparse, time, sys, json
from lib.core.common import *
# 作者:咚咚呛
# Rootkit检测,规则参考rkhunter
# 1、扫描93类rootkit特征
# 2、检查已知rootkit的内核符号表
# 3、检查已知rootkit内核文件
class Rootkit_Analysis:
def __init__(self):
self.name = u'Rootkit类安全检测'
# 恶意rootkit输出
self.rootkit = []
# 集合内核符号表
self.kallsyms = []
# 各类rootkit特征,file、dir代表其特征、
W55808A = {'name': '55808 Variant A', 'file': ['/tmp/.../r', '/tmp/.../a'], 'dir': [], 'ksyms': []}
Adore_Rootkit = {'name': 'Adore Rootkit',
'file': ['/usr/secure', '/usr/doc/sys/qrt', '/usr/doc/sys/run', '/usr/doc/sys/crond',
'/usr/sbin/kfd', '/usr/doc/kern/var',
'/usr/doc/kern/string.o', '/usr/doc/kern/ava', '/usr/doc/kern/adore.o',
'/var/log/ssh/old'],
'dir': ['/lib/security/.config/ssh', '/usr/doc/kern', '/usr/doc/backup', '/usr/doc/backup/txt',
'/lib/backup', '/lib/backup/txt', '/usr/doc/work', '/usr/doc/sys', '/var/log/ssh',
'/usr/doc/.spool', '/usr/lib/kterm'], 'ksyms': []}
AjaKit_Rootkit = {'name': 'AjaKit Rootkit',
'file': ['/dev/tux/.addr', '/dev/tux/.proc', '/dev/tux/.file', '/lib/.libgh-gh/cleaner',
'/lib/.libgh-gh/Patch/patch', '/lib/.libgh-gh/sb0k'],
'dir': ['/dev/tux', '/lib/.libgh-gh'], 'ksyms': []}
aPa_Kit_Rootkit = {'name': 'aPa Kit Rootkit', 'file': ['/usr/share/.aPa'], 'dir': [], 'ksyms': []}
Apache_Worm = {'name': 'Apache Worm', 'file': ['/bin/.log'], 'dir': [], 'ksyms': []}
Ambient_Rootkit = {'name': 'Ambient Rootkit',
'file': ['/usr/lib/.ark?', '/dev/ptyxx/.log', '/dev/ptyxx/.file', '/dev/ptyxx/.proc',
'/dev/ptyxx/.addr'],
'dir': ['/dev/ptyxx'], 'ksyms': []}
Balaur_Rootkit = {'name': 'Balaur Rootkit', 'file': ['/usr/lib/liblog.o'],
'dir': ['/usr/lib/.kinetic', '/usr/lib/.egcs', '/usr/lib/.wormie'], 'ksyms': []}
Beastkit_Rootkit = {'name': 'Beastkit Rootkit',
'file': ['/usr/sbin/arobia', '/usr/sbin/idrun', '/usr/lib/elm/arobia/elm',
'/usr/lib/elm/arobia/elm/hk', '/usr/lib/elm/arobia/elm/hk.pub',
'/usr/lib/elm/arobia/elm/sc', '/usr/lib/elm/arobia/elm/sd.pp',
'/usr/lib/elm/arobia/elm/sdco', '/usr/lib/elm/arobia/elm/srsd'],
'dir': ['/lib/ldd.so/bktools'], 'ksyms': []}
beX2_Rootkit = {'name': 'beX2 Rootkit', 'file': ['/usr/info/termcap.info-5.gz', '/usr/bin/sshd2'],
'dir': ['/usr/include/bex'], 'ksyms': []}
BOBkit_Rootkit = {'name': 'BOBkit Rootkit',
'file': ['/usr/sbin/ntpsx', '/usr/sbin/.../bkit-ava', '/usr/sbin/.../bkit-d',
'/usr/sbin/.../bkit-shd', '/usr/sbin/.../bkit-f', '/usr/include/.../proc.h',
'/usr/include/.../.bash_history', '/usr/include/.../bkit-get',
'/usr/include/.../bkit-dl', '/usr/include/.../bkit-screen',
'/usr/include/.../bkit-sleep', '/usr/lib/.../bkit-adore.o', '/usr/lib/.../ls',
'/usr/lib/.../netstat', '/usr/lib/.../lsof', '/usr/lib/.../bkit-ssh/bkit-shdcfg',
'/usr/lib/.../bkit-ssh/bkit-shhk', '/usr/lib/.../bkit-ssh/bkit-pw',
'/usr/lib/.../bkit-ssh/bkit-shrs', '/usr/lib/.../bkit-ssh/bkit-mots',
'/usr/lib/.../uconf.inv', '/usr/lib/.../psr', '/usr/lib/.../find',
'/usr/lib/.../pstree', '/usr/lib/.../slocate', '/usr/lib/.../du',
'/usr/lib/.../top'],
'dir': ['/usr/sbin/...', '/usr/include/...', '/usr/include/.../.tmp', '/usr/lib/...',
'/usr/lib/.../.ssh', '/usr/lib/.../bkit-ssh', '/usr/lib/.bkit-', '/tmp/.bkp'],
'ksyms': []}
OSX_Boonana_A_Trojan = {'name': 'OSX Boonana-A Trojan',
'file': ['/Library/StartupItems/OSXDriverUpdates/OSXDriverUpdates',
'/Library/StartupItems/OSXDriverUpdates/StartupParameters.plist'],
'dir': ['/var/root/.jnana'], 'ksyms': []}
cb_Rootkit = {'name': 'cb Rootkit',
'file': ['/dev/srd0', '/lib/libproc.so.2.0.6', '/dev/mounnt', '/etc/rc.d/init.d/init',
'/usr/bin/.zeen/..%/cl', '/usr/bin/.zeen/..%/.x.tgz', '/usr/bin/.zeen/..%/statdx',
'/usr/bin/.zeen/..%/wted', '/usr/bin/.zeen/..%/write', '/usr/bin/.zeen/..%/scan',
'/usr/bin/.zeen/..%/sc', '/usr/bin/.zeen/..%/sl2', '/usr/bin/.zeen/..%/wroot',
'/usr/bin/.zeen/..%/wscan', '/usr/bin/.zeen/..%/wu', '/usr/bin/.zeen/..%/v',
'/usr/bin/.zeen/..%/read', '/usr/lib/sshrc', '/usr/lib/ssh_host_key',
'/usr/lib/ssh_host_key.pub', '/usr/lib/ssh_random_seed', '/usr/lib/sshd_config',
'/usr/lib/shosts.equiv', '/usr/lib/ssh_known_hosts', '/u/zappa/.ssh/pid',
'/usr/bin/.system/..%/tcp.log', '/usr/bin/.zeen/..%/curatare/attrib',
'/usr/bin/.zeen/..%/curatare/chattr', '/usr/bin/.zeen/..%/curatare/ps',
'/usr/bin/.zeen/..%/curatare/pstree', '/usr/bin/.system/..%/.x/xC.o'],
'dir': ['/usr/bin/.zeen', '/usr/bin/.zeen/..%/curatare', '/usr/bin/.zeen/..%/scan',
'/usr/bin/.system/..%'], 'ksyms': []}
CiNIK_Worm = {'name': 'CiNIK Worm', 'file': ['/tmp/.cinik'], 'dir': ['/tmp/.font-unix/.cinik'], 'ksyms': []}
CX_Rootkit = {'name': 'CX Rootkit',
'file': ['/usr/lib/ldlibso', '/usr/lib/configlibso', '/usr/lib/shklibso', '/usr/lib/randomlibso',
'/usr/lib/ldlibstrings.so', '/usr/lib/ldlibdu.so', '/usr/lib/ldlibns.so',
'/usr/include/db'],
'dir': ['/usr/include/cxk'], 'ksyms': []}
Abuse_Kit = {'name': 'Abuse Kit', 'file': ['/dev/mdev', '/usr/lib/libX.a'], 'dir': [], 'ksyms': []}
Devil_Rootkit = {'name': 'Devil Rootkit',
'file': ['/var/lib/games/.src', '/dev/dsx', '/dev/caca', '/dev/pro', '/bin/bye',
'/bin/homedir', '/usr/bin/xfss', '/usr/sbin/tzava',
'/usr/doc/tar/.../.dracusor/stuff/holber',
'/usr/doc/tar/.../.dracusor/stuff/sense',
'/usr/doc/tar/.../.dracusor/stuff/clear',
'/usr/doc/tar/.../.dracusor/stuff/tzava',
'/usr/doc/tar/.../.dracusor/stuff/citeste',
'/usr/doc/tar/.../.dracusor/stuff/killrk',
'/usr/doc/tar/.../.dracusor/stuff/searchlog',
'/usr/doc/tar/.../.dracusor/stuff/gaoaza',
'/usr/doc/tar/.../.dracusor/stuff/cleaner',
'/usr/doc/tar/.../.dracusor/stuff/shk',
'/usr/doc/tar/.../.dracusor/stuff/srs',
'/usr/doc/tar/.../.dracusor/utile.tgz',
'/usr/doc/tar/.../.dracusor/webpage', '/usr/doc/tar/.../.dracusor/getpsy',
'/usr/doc/tar/.../.dracusor/getbnc',
'/usr/doc/tar/.../.dracusor/getemech',
'/usr/doc/tar/.../.dracusor/localroot.sh',
'/usr/doc/tar/.../.dracusor/stuff/old/sense'],
'dir': ['/usr/doc/tar/.../.dracusor'], 'ksyms': []}
Diamorphine_LKM = {'name': 'Diamorphine LKM', 'file': [], 'dir': [],
'ksyms': ['diamorphine', 'module_hide', 'module_hidden', 'is_invisible', 'hacked_getdents',
'hacked_kill']}
Dica_Kit_Rootkit = {'name': 'Dica-Kit Rootkit',
'file': ['/lib/.sso', '/lib/.so', '/var/run/...dica/clean', '/var/run/...dica/dxr',
'/var/run/...dica/read', '/var/run/...dica/write', '/var/run/...dica/lf',
'/var/run/...dica/xl', '/var/run/...dica/xdr', '/var/run/...dica/psg',
'/var/run/...dica/secure', '/var/run/...dica/rdx', '/var/run/...dica/va',
'/var/run/...dica/cl.sh', '/var/run/...dica/last.log', '/usr/bin/.etc',
'/etc/sshd_config', '/etc/ssh_host_key', '/etc/ssh_random_seed'],
'dir': ['/var/run/...dica', '/var/run/...dica/mh', '/var/run/...dica/scan'], 'ksyms': []}
Dreams_Rootkit = {'name': 'Dreams Rootkit',
'file': ['/dev/ttyoa', '/dev/ttyof', '/dev/ttyop', '/usr/bin/sense', '/usr/bin/sl2',
'/usr/bin/logclear', '/usr/bin/(swapd)', '/usr/bin/initrd', '/usr/bin/crontabs',
'/usr/bin/snfs', '/usr/lib/libsss', '/usr/lib/libsnf.log', '/usr/lib/libshtift/top',
'/usr/lib/libshtift/ps', '/usr/lib/libshtift/netstat', '/usr/lib/libshtift/ls',
'/usr/lib/libshtift/ifconfig', '/usr/include/linseed.h', '/usr/include/linpid.h',
'/usr/include/linkey.h', '/usr/include/linconf.h', '/usr/include/iceseed.h',
'/usr/include/icepid.h', '/usr/include/icekey.h', '/usr/include/iceconf.h'],
'dir': ['/dev/ida/.hpd', '/usr/lib/libshtift'], 'ksyms': []}
Duarawkz_Rootkit = {'name': 'Duarawkz Rootkit', 'file': ['/usr/bin/duarawkz/loginpass'],
'dir': ['/usr/bin/duarawkz'], 'ksyms': []}
Ebury_sshd_backdoor = {'name': 'Ebury sshd backdoor',
'file': ['/lib/libns2.so', '/lib64/libns2.so', '/lib/libns5.so', '/lib64/libns5.so',
'/lib/libpw3.so', '/lib64/libpw3.so', '/lib/libpw5.so', '/lib64/libpw5.so',
'/lib/libsbr.so', '/lib64/libsbr.so', '/lib/libslr.so', '/lib64/libslr.so',
'/lib/tls/libkeyutils.so.1', '/lib64/tls/libkeyutils.so.1'],
'dir': [], 'ksyms': []}
ENYE_LKM = {'name': 'ENYE LKM', 'file': ['/etc/.enyelkmHIDE^IT.ko', '/etc/.enyelkmOCULTAR.ko'], 'dir': [],
'ksyms': []}
Flea_Rootkit = {'name': 'Flea Rootkit', 'file': ['/etc/ld.so.hash', '/lib/security/.config/ssh/sshd_config',
'/lib/security/.config/ssh/ssh_host_key',
'/lib/security/.config/ssh/ssh_host_key.pub',
'/lib/security/.config/ssh/ssh_random_seed', '/usr/bin/ssh2d',
'/usr/lib/ldlibns.so', '/usr/lib/ldlibps.so',
'/usr/lib/ldlibpst.so',
'/usr/lib/ldlibdu.so', '/usr/lib/ldlibct.so'],
'dir': ['/lib/security/.config/ssh', '/dev/..0', '/dev/..0/backup'], 'ksyms': []}
FreeBSD_Rootkit = {'name': 'FreeBSD Rootkit',
'file': ['/dev/ptyp', '/dev/ptyq', '/dev/ptyr', '/dev/ptys', '/dev/ptyt',
'/dev/fd/.88/freshb-bsd', '/dev/fd/.88/fresht', '/dev/fd/.88/zxsniff',
'/dev/fd/.88/zxsniff.log', '/dev/fd/.99/.ttyf00', '/dev/fd/.99/.ttyp00',
'/dev/fd/.99/.ttyq00', '/dev/fd/.99/.ttys00', '/dev/fd/.99/.pwsx00', '/etc/.acid',
'/usr/lib/.fx/sched_host.2', '/usr/lib/.fx/random_d.2', '/usr/lib/.fx/set_pid.2',
'/usr/lib/.fx/setrgrp.2', '/usr/lib/.fx/TOHIDE', '/usr/lib/.fx/cons.saver',
'/usr/lib/.fx/adore/ava/ava', '/usr/lib/.fx/adore/adore/adore.ko', '/bin/sysback',
'/usr/local/bin/sysback'],
'dir': ['/dev/fd/.88', '/dev/fd/.99', '/usr/lib/.fx', '/usr/lib/.fx/adore'], 'ksyms': []}
Fu_Rootkit = {'name': 'Fu Rootkit', 'file': ['/sbin/xc', '/usr/include/ivtype.h', '/bin/.lib'], 'dir': [],
'ksyms': []}
Fuckit_Rootkit = {'name': 'Fuckit Rootkit',
'file': ['/lib/libproc.so.2.0.7', '/dev/proc/.bash_profile', '/dev/proc/.bashrc',
'/dev/proc/.cshrc', '/dev/proc/fuckit/hax0r', '/dev/proc/fuckit/hax0rshell',
'/dev/proc/fuckit/config/lports', '/dev/proc/fuckit/config/rports',
'/dev/proc/fuckit/config/rkconf', '/dev/proc/fuckit/config/password',
'/dev/proc/fuckit/config/progs', '/dev/proc/fuckit/system-bins/init',
'/usr/lib/libcps.a', '/usr/lib/libtty.a'],
'dir': ['/dev/proc', '/dev/proc/fuckit', '/dev/proc/fuckit/system-bins', '/dev/proc/toolz'],
'ksyms': []}
GasKit_Rootkit = {'name': 'GasKit Rootkit', 'file': ['/dev/dev/gaskit/sshd/sshdd'],
'dir': ['/dev/dev', '/dev/dev/gaskit', '/dev/dev/gaskit/sshd'], 'ksyms': []}
Heroin_LKM = {'name': 'Heroin LKM', 'file': [], 'dir': [], 'ksyms': ['heroin']}
HjC_Kit_Rootkit = {'name': 'HjC Kit Rootkit', 'file': [], 'dir': ['/dev/.hijackerz'], 'ksyms': []}
ignoKit_Rootkit = {'name': 'ignoKit Rootkit',
'file': ['/lib/defs/p', '/lib/defs/q', '/lib/defs/r', '/lib/defs/s', '/lib/defs/t',
'/usr/lib/defs/p', '/usr/lib/defs/q', '/usr/lib/defs/r', '/usr/lib/defs/s',
'/usr/lib/defs/t', '/usr/lib/.libigno/pkunsec',
'/usr/lib/.libigno/.igno/psybnc/psybnc'],
'dir': ['/usr/lib/.libigno', '/usr/lib/.libigno/.igno'], 'ksyms': []}
iLLogiC_Rootkit = {'name': 'iLLogiC Rootkit',
'file': ['/dev/kmod', '/dev/dos', '/usr/lib/crth.o', '/usr/lib/crtz.o', '/etc/ld.so.hash',
'/usr/bin/sia', '/usr/bin/ssh2d', '/lib/security/.config/sn',
'/lib/security/.config/iver', '/lib/security/.config/uconf.inv',
'/lib/security/.config/ssh/ssh_host_key',
'/lib/security/.config/ssh/ssh_host_key.pub', '/lib/security/.config/ssh/sshport',
'/lib/security/.config/ssh/ssh_random_seed', '/lib/security/.config/ava',
'/lib/security/.config/cleaner', '/lib/security/.config/lpsched',
'/lib/security/.config/sz', '/lib/security/.config/rcp',
'/lib/security/.config/patcher', '/lib/security/.config/pg',
'/lib/security/.config/crypt', '/lib/security/.config/utime',
'/lib/security/.config/wget', '/lib/security/.config/instmod',
'/lib/security/.config/bin/find', '/lib/security/.config/bin/du',
'/lib/security/.config/bin/ls', '/lib/security/.config/bin/psr',
'/lib/security/.config/bin/netstat', '/lib/security/.config/bin/su',
'/lib/security/.config/bin/ping', '/lib/security/.config/bin/passwd'],
'dir': ['/lib/security/.config', '/lib/security/.config/ssh', '/lib/security/.config/bin',
'/lib/security/.config/backup', '/root/%%%/.dir', '/root/%%%/.dir/mass-scan',
'/root/%%%/.dir/flood'], 'ksyms': []}
OSX_Inqtana = {'name': 'OSX Inqtana Variant A',
'file': ['/Users/w0rm-support.tgz', '/Users/InqTest.class', '/Users/com.openbundle.plist',
'/Users/com.pwned.plist', '/Users/libavetanaBT.jnilib'],
'dir': ['/Users/de', '/Users/javax'], 'ksyms': []}
OSX_Inqtana2 = {'name': 'OSX Inqtana Variant B',
'file': ['/Users/w0rms.love.apples.tgz', '/Users/InqTest.class', '/Users/InqTest.java',
'/Users/libavetanaBT.jnilib', '/Users/InqTanaHandler', '/Users/InqTanaHandler.bundle'],
'dir': ['/Users/de', '/Users/javax'], 'ksyms': []}
OSX_Inqtana3 = {'name': 'OSX Inqtana Variant C',
'file': ['/Users/applec0re.tgz', '/Users/InqTest.class', '/Users/InqTest.java',
'/Users/libavetanaBT.jnilib', '/Users/environment.plist', '/Users/pwned.c',
'/Users/pwned.dylib'],
'dir': ['/Users/de', '/Users/javax'], 'ksyms': []}
IntoXonia_NG_Rootkit = {'name': 'IntoXonia-NG Rootkit', 'file': [], 'dir': [],
'ksyms': ['funces', 'ixinit', 'tricks', 'kernel_unlink', 'rootme', 'hide_module',
'find_sys_call_tbl']}
Irix_Rootkit = {'name': 'Irix Rootkit', 'file': [],
'dir': ['/dev/pts/01', '/dev/pts/01/backup', '/dev/pts/01/etc', '/dev/pts/01/tmp'], 'ksyms': []}
Jynx_Rootkit = {'name': 'Jynx Rootkit',
'file': ['/xochikit/bc', '/xochikit/ld_poison.so', '/omgxochi/bc', '/omgxochi/ld_poison.so',
'/var/local/^^/bc', '/var/local/^^/ld_poison.so'],
'dir': ['/xochikit', '/omgxochi', '/var/local/^^'], 'ksyms': []}
Jynx2_Rootkit = {'name': 'Jynx2 Rootkit', 'file': ['/XxJynx/reality.so'], 'dir': ['/XxJynx'], 'ksyms': []}
KBeast_Rootkit = {'name': 'KBeast Rootkit',
'file': ['/usr/_h4x_/ipsecs-kbeast-v1.ko', '/usr/_h4x_/_h4x_bd', '/usr/_h4x_/acctlog'],
'dir': ['/usr/_h4x_'],
'ksyms': ['h4x_delete_module', 'h4x_getdents64', 'h4x_kill', 'h4x_open', 'h4x_read',
'h4x_rename', 'h4x_rmdir', 'h4x_tcp4_seq_show', 'h4x_write']}
OSX_Keydnap_backdoor = {'name': 'OSX Keydnap backdoor',
'file': ['/Applications/Transmission.app/Contents/Resources/License.rtf',
'/Volumes/Transmission/Transmission.app/Contents/Resources/License.rtf',
'/Library/LaunchAgents/com.apple.iCloud.sync.daemon.plist',
'/Library/LaunchAgents/com.geticloud.icloud.photo.plist'],
'dir': ['/Library/Application%Support/com.apple.iCloud.sync.daemon/'], 'ksyms': []}
Kitko_Rootkit = {'name': 'Kitko Rootkit', 'file': [], 'dir': ['/usr/src/redhat/SRPMS/...'], 'ksyms': []}
KNARK_FILES = {'name': 'Knark Rootkit', 'file': ['/proc/knark/pids'], 'dir': ['/proc/knark'], 'ksyms': []}
KOMPLEX_FILES = {'name': 'OSX Komplex Trojan',
'file': ['/Users/Shared/.local/kextd', '/Users/Shared/com.apple.updates.plist',
'/Users/Shared/start.sh'], 'dir': [], 'ksyms': []}
LINUXV_FILES = {'name': 'ld-linuxv rootkit', 'file': ['/lib/ld-linuxv.so.1'],
'dir': ['/var/opt/_so_cache', '/var/opt/_so_cache/ld', '/var/opt/_so_cache/lc'], 'ksyms': []}
LION_FILES = {'name': 'Lion Worm', 'file': ['/bin/in.telnetd', '/bin/mjy', '/usr/man/man1/man1/lib/.lib/mjy',
'/usr/man/man1/man1/lib/.lib/in.telnetd',
'/usr/man/man1/man1/lib/.lib/.x', '/dev/.lib/lib/scan/1i0n.sh',
'/dev/.lib/lib/scan/hack.sh', '/dev/.lib/lib/scan/bind',
'/dev/.lib/lib/scan/randb', '/dev/.lib/lib/scan/scan.sh',
'/dev/.lib/lib/scan/pscan', '/dev/.lib/lib/scan/star.sh',
'/dev/.lib/lib/scan/bindx.sh', '/dev/.lib/lib/scan/bindname.log',
'/dev/.lib/lib/1i0n.sh', '/dev/.lib/lib/lib/netstat',
'/dev/.lib/lib/lib/dev/.1addr', '/dev/.lib/lib/lib/dev/.1logz',
'/dev/.lib/lib/lib/dev/.1proc', '/dev/.lib/lib/lib/dev/.1file'],
'dir': [], 'ksyms': []}
LOCKIT_FILES = {'name': 'Lockit Rootkit',
'file': ['/usr/lib/libmen.oo/.LJK2/ssh_config', '/usr/lib/libmen.oo/.LJK2/ssh_host_key',
'/usr/lib/libmen.oo/.LJK2/ssh_host_key.pub',
'/usr/lib/libmen.oo/.LJK2/ssh_random_seed*', '/usr/lib/libmen.oo/.LJK2/sshd_config',
'/usr/lib/libmen.oo/.LJK2/backdoor/RK1bd', '/usr/lib/libmen.oo/.LJK2/backup/du',
'/usr/lib/libmen.oo/.LJK2/backup/ifconfig',
'/usr/lib/libmen.oo/.LJK2/backup/inetd.conf', '/usr/lib/libmen.oo/.LJK2/backup/locate',
'/usr/lib/libmen.oo/.LJK2/backup/login', '/usr/lib/libmen.oo/.LJK2/backup/ls',
'/usr/lib/libmen.oo/.LJK2/backup/netstat', '/usr/lib/libmen.oo/.LJK2/backup/ps',
'/usr/lib/libmen.oo/.LJK2/backup/pstree', '/usr/lib/libmen.oo/.LJK2/backup/rc.sysinit',
'/usr/lib/libmen.oo/.LJK2/backup/syslogd', '/usr/lib/libmen.oo/.LJK2/backup/tcpd',
'/usr/lib/libmen.oo/.LJK2/backup/top', '/usr/lib/libmen.oo/.LJK2/clean/RK1sauber',
'/usr/lib/libmen.oo/.LJK2/clean/RK1wted', '/usr/lib/libmen.oo/.LJK2/hack/RK1parse',
'/usr/lib/libmen.oo/.LJK2/hack/RK1sniff', '/usr/lib/libmen.oo/.LJK2/hide/.RK1addr',
'/usr/lib/libmen.oo/.LJK2/hide/.RK1dir', '/usr/lib/libmen.oo/.LJK2/hide/.RK1log',
'/usr/lib/libmen.oo/.LJK2/hide/.RK1proc',
'/usr/lib/libmen.oo/.LJK2/hide/RK1phidemod.c',
'/usr/lib/libmen.oo/.LJK2/modules/README.modules',
'/usr/lib/libmen.oo/.LJK2/modules/RK1hidem.c',
'/usr/lib/libmen.oo/.LJK2/modules/RK1phide',
'/usr/lib/libmen.oo/.LJK2/sshconfig/RK1ssh'],
'dir': ['/usr/lib/libmen.oo/.LJK2'], 'ksyms': []}
MOKES_FILES = {'name': 'Mokes backdoor', 'file': [
'/tmp/ss0-[0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9].sst',
'/tmp/aa0-[0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9].aat',
'/tmp/kk0-[0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9].kkt',
'/tmp/dd0-[0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9].ddt'],
'dir': [], 'ksyms': []}
MRK_FILES = {'name': 'MRK RootKit',
'file': ['/dev/ida/.inet/pid', '/dev/ida/.inet/ssh_host_key', '/dev/ida/.inet/ssh_random_seed',
'/dev/ida/.inet/tcp.log'], 'dir': ['/dev/ida/.inet', '/var/spool/cron/.sh'], 'ksyms': []}
MOODNT_FILES = {'name': 'Mood-NT Rootkit',
'file': ['/sbin/init__mood-nt-_-_cthulhu', '/_cthulhu/mood-nt.init', '/_cthulhu/mood-nt.conf',
'/_cthulhu/mood-nt.sniff'], 'dir': ['/_cthulhu'], 'ksyms': []}
NIO_FILES = {'name': 'Ni0 Rootkit',
'file': ['/var/lock/subsys/...datafile.../...net...', '/var/lock/subsys/...datafile.../...port...',
'/var/lock/subsys/...datafile.../...ps...', '/var/lock/subsys/...datafile.../...file...'],
'dir': ['/tmp/waza', '/var/lock/subsys/...datafile...', '/usr/sbin/es'], 'ksyms': []}
OHHARA_FILES = {'name': 'Ohhara Rootkit',
'file': ['/var/lock/subsys/...datafile.../...datafile.../in.smbd.log'],
'dir': ['/var/lock/subsys/...datafile...', '/var/lock/subsys/...datafile.../...datafile...',
'/var/lock/subsys/...datafile.../...datafile.../bin',
'/var/lock/subsys/...datafile.../...datafile.../usr/bin',
'/var/lock/subsys/...datafile.../...datafile.../usr/sbin',
'/var/lock/subsys/...datafile.../...datafile.../lib/security'], 'ksyms': []}
OPTICKIT_FILES = {'name': 'Optic Kit Rootkit', 'file': [],
'dir': ['/dev/tux', '/usr/bin/xchk', '/usr/bin/xsf', '/usr/bin/ssh2d'], 'ksyms': []}
OSXRK_FILES = {'name': 'OSXRK',
'file': ['/dev/.rk/nc', '/dev/.rk/diepu', '/dev/.rk/backd', '/Library/StartupItems/opener',
'/Library/StartupItems/opener.sh', '/System/Library/StartupItems/opener',
'/System/Library/StartupItems/opener.sh'],
'dir': ['/dev/.rk', '/Users/LDAP-daemon', '/tmp/.work'], 'ksyms': []}
OZ_FILES = {'name': 'Oz Rootkit', 'file': ['/dev/.oz/.nap/rkit/terror'], 'dir': ['/dev/.oz'], 'ksyms': []}
PHALANX_FILES = {'name': 'Phalanx Rootkit',
'file': ['/uNFuNF', '/etc/host.ph1', '/bin/host.ph1', '/usr/share/.home.ph1/phalanx',
'/usr/share/.home.ph1/cb', '/usr/share/.home.ph1/kebab'],
'dir': ['/usr/share/.home.ph1', '/usr/share/.home.ph1/tty'], 'ksyms': []}
PHALANX2_FILES = {'name': 'Phalanx2 Rootkit',
'file': ['/etc/khubd.p2/.p2rc', '/etc/khubd.p2/.phalanx2', '/etc/khubd.p2/.sniff',
'/etc/khubd.p2/sshgrab.py', '/etc/lolzz.p2/.p2rc', '/etc/lolzz.p2/.phalanx2',
'/etc/lolzz.p2/.sniff', '/etc/lolzz.p2/sshgrab.py', '/etc/cron.d/zupzzplaceholder',
'/usr/lib/zupzz.p2/.p-2.3d', '/usr/lib/zupzz.p2/.p2rc'],
'dir': ['/etc/khubd.p2', '/etc/lolzz.p2', '/usr/lib/zupzz.p2'], 'ksyms': []}
PORTACELO_FILES = {'name': 'Portacelo Rootkit',
'file': ['/var/lib/.../.ak', '/var/lib/.../.hk', '/var/lib/.../.rs', '/var/lib/.../.p',
'/var/lib/.../getty', '/var/lib/.../lkt.o', '/var/lib/.../show',
'/var/lib/.../nlkt.o', '/var/lib/.../ssshrc', '/var/lib/.../sssh_equiv',
'/var/lib/.../sssh_known_hosts', '/var/lib/.../sssh_pid ~/.sssh/known_hosts'],
'dir': [], 'ksyms': []}
PROTON_FILES = {'name': 'OSX Proton backdoor', 'file': ['Library/LaunchAgents/com.apple.xpcd.plist',
'/Library/LaunchAgents/com.Eltima.UpdaterAgent.plist',
'/Library/.rand/updateragent.app', '/tmp/Updater.app'],
'dir': ['/Library/.rand', '/Library/.cachedir', '/Library/.random'], 'ksyms': []}
REDSTORM_FILES = {'name': 'R3dstorm Toolkit',
'file': ['/var/log/tk02/see_all', '/var/log/tk02/.scris', '/bin/.../sshd/sbin/sshd1',
'/bin/.../hate/sk', '/bin/.../see_all'],
'dir': ['/var/log/tk02', '/var/log/tk02/old', '/bin/...'], 'ksyms': []}
RHSHARPES_FILES = {'name': 'RH-Sharpe Rootkit',
'file': ['/bin/lps', '/usr/bin/lpstree', '/usr/bin/ltop', '/usr/bin/lkillall',
'/usr/bin/ldu', '/usr/bin/lnetstat', '/usr/bin/wp', '/usr/bin/shad',
'/usr/bin/vadim', '/usr/bin/slice', '/usr/bin/cleaner', '/usr/include/rpcsvc/du'],
'dir': [], 'ksyms': []}
RSHA_FILES = {'name': 'RSHA Rootkit',
'file': ['/bin/kr4p', '/usr/bin/n3tstat', '/usr/bin/chsh2', '/usr/bin/slice2',
'/usr/src/linux/arch/alpha/lib/.lib/.1proc', '/etc/rc.d/arch/alpha/lib/.lib/.1addr'],
'dir': ['/etc/rc.d/rsha', '/etc/rc.d/arch/alpha/lib/.lib'], 'ksyms': []}
SHUTDOWN_FILES = {'name': 'Shutdown Rootkit',
'file': ['/usr/man/man5/..%/.dir/scannah/asus', '/usr/man/man5/..%/.dir/see',
'/usr/man/man5/..%/.dir/nscd', '/usr/man/man5/..%/.dir/alpd', '/etc/rc.d/rc.local%'],
'dir': ['/usr/man/man5/..%/.dir', '/usr/man/man5/..%/.dir/scannah',
'/etc/rc.d/rc0.d/..%/.dir'], 'ksyms': []}
SCALPER_FILES = {'name': 'Scalper Worm', 'file': ['/tmp/.a', '/tmp/.uua'], 'dir': [], 'ksyms': []}
SHV4_FILES = {'name': 'SHV4 Rootkit',
'file': ['/etc/ld.so.hash', '/lib/libext-2.so.7', '/lib/lidps1.so', '/lib/libproc.a',
'/lib/libproc.so.2.0.6', '/lib/ldd.so/tks', '/lib/ldd.so/tkp', '/lib/ldd.so/tksb',
'/lib/security/.config/sshd', '/lib/security/.config/ssh/ssh_host_key',
'/lib/security/.config/ssh/ssh_host_key.pub',
'/lib/security/.config/ssh/ssh_random_seed', '/usr/include/file.h',
'/usr/include/hosts.h', '/usr/include/lidps1.so', '/usr/include/log.h',
'/usr/include/proc.h', '/usr/sbin/xntps', '/dev/srd0'],
'dir': ['/lib/ldd.so', '/lib/security/.config', '/lib/security/.config/ssh'], 'ksyms': []}
SHV5_FILES = {'name': 'SHV5 Rootkit',
'file': ['/etc/sh.conf', '/lib/libproc.a', '/lib/libproc.so.2.0.6', '/lib/lidps1.so',
'/lib/libsh.so/bash', '/usr/include/file.h', '/usr/include/hosts.h',
'/usr/include/log.h', '/usr/include/proc.h', '/lib/libsh.so/shdcf2',
'/lib/libsh.so/shhk', '/lib/libsh.so/shhk.pub', '/lib/libsh.so/shrs',
'/usr/lib/libsh/.bashrc', '/usr/lib/libsh/shsb', '/usr/lib/libsh/hide',
'/usr/lib/libsh/.sniff/shsniff', '/usr/lib/libsh/.sniff/shp', '/dev/srd0'],
'dir': ['/lib/libsh.so', '/usr/lib/libsh', '/usr/lib/libsh/utilz', '/usr/lib/libsh/.backup'],
'ksyms': []}
SINROOTKIT_FILES = {'name': 'Sin Rootkit',
'file': ['/dev/.haos/haos1/.f/Denyed', '/dev/ttyoa', '/dev/ttyof', '/dev/ttyop',
'/dev/ttyos', '/usr/lib/.lib', '/usr/lib/sn/.X', '/usr/lib/sn/.sys',
'/usr/lib/ld/.X', '/usr/man/man1/...', '/usr/man/man1/.../.m',
'/usr/man/man1/.../.w'],
'dir': ['/usr/lib/sn', '/usr/lib/man1/...', '/dev/.haos'], 'ksyms': []}
SLAPPER_FILES = {'name': 'Slapper Worm',
'file': ['/tmp/.bugtraq', '/tmp/.uubugtraq', '/tmp/.bugtraq.c', '/tmp/httpd', '/tmp/.unlock',
'/tmp/update', '/tmp/.cinik', '/tmp/.b'], 'dir': [], 'ksyms': []}
SNEAKIN_FILES = {'name': 'Sneakin Rootkit', 'file': [], 'dir': ['/tmp/.X11-unix/.../rk'], 'ksyms': []}
WANUKDOOR_FILES = {'name': 'Solaris Wanuk backdoor',
'file': ['/var/adm/sa/.adm/.lp-door.i86pc', '/var/adm/sa/.adm/.lp-door.sun4',
'/var/spool/lp/admins/.lp-door.i86pc', '/var/spool/lp/admins/.lp-door.sun4',
'/var/spool/lp/admins/lpshut', '/var/spool/lp/admins/lpsystem',
'/var/spool/lp/admins/lpadmin', '/var/spool/lp/admins/lpmove',
'/var/spool/lp/admins/lpusers', '/var/spool/lp/admins/lpfilter',
'/var/spool/lp/admins/lpstat', '/var/spool/lp/admins/lpd',
'/var/spool/lp/admins/lpsched', '/var/spool/lp/admins/lpc'],
'dir': ['/var/adm/sa/.adm'], 'ksyms': []}
WANUKWORM_FILES = {'name': '<NAME>',
'file': ['/var/adm/.adm', '/var/adm/.i86pc', '/var/adm/.sun4', '/var/adm/sa/.adm',
'/var/adm/sa/.adm/.i86pc', '/var/adm/sa/.adm/.sun4', '/var/adm/sa/.adm/.crontab',
'/var/adm/sa/.adm/devfsadmd', '/var/adm/sa/.adm/svcadm', '/var/adm/sa/.adm/cfgadm',
'/var/adm/sa/.adm/kadmind', '/var/adm/sa/.adm/zoneadmd', '/var/adm/sa/.adm/sadm',
'/var/adm/sa/.adm/sysadm', '/var/adm/sa/.adm/dladm', '/var/adm/sa/.adm/bootadm',
'/var/adm/sa/.adm/routeadm', '/var/adm/sa/.adm/uadmin', '/var/adm/sa/.adm/acctadm',
'/var/adm/sa/.adm/cryptoadm', '/var/adm/sa/.adm/inetadm', '/var/adm/sa/.adm/logadm',
'/var/adm/sa/.adm/nlsadmin', '/var/adm/sa/.adm/sacadm',
'/var/adm/sa/.adm/syseventadmd', '/var/adm/sa/.adm/ttyadmd',
'/var/adm/sa/.adm/consadmd', '/var/adm/sa/.adm/metadevadm', '/var/adm/sa/.i86pc',
'/var/adm/sa/.sun4', '/var/adm/sa/acctadm', '/var/adm/sa/bootadm',
'/var/adm/sa/cfgadm', '/var/adm/sa/consadmd', '/var/adm/sa/cryptoadm',
'/var/adm/sa/devfsadmd', '/var/adm/sa/dladm', '/var/adm/sa/inetadm',
'/var/adm/sa/kadmind', '/var/adm/sa/logadm', '/var/adm/sa/metadevadm',
'/var/adm/sa/nlsadmin', '/var/adm/sa/routeadm', '/var/adm/sa/sacadm',
'/var/adm/sa/sadm', '/var/adm/sa/svcadm', '/var/adm/sa/sysadm',
'/var/adm/sa/syseventadmd', '/var/adm/sa/ttyadmd', '/var/adm/sa/uadmin',
'/var/adm/sa/zoneadmd', '/var/spool/lp/admins/.lp/.crontab',
'/var/spool/lp/admins/.lp/lpshut', '/var/spool/lp/admins/.lp/lpsystem',
'/var/spool/lp/admins/.lp/lpadmin', '/var/spool/lp/admins/.lp/lpmove',
'/var/spool/lp/admins/.lp/lpusers', '/var/spool/lp/admins/.lp/lpfilter',
'/var/spool/lp/admins/.lp/lpstat', '/var/spool/lp/admins/.lp/lpd',
'/var/spool/lp/admins/.lp/lpsched', '/var/spool/lp/admins/.lp/lpc'],
'dir': ['/var/adm/sa/.adm', '/var/spool/lp/admins/.lp'], 'ksyms': []}
SPANISH_FILES = {'name': 'Spanish Rootkit',
'file': ['/dev/ptyq', '/bin/ad', '/bin/ava', '/bin/server', '/usr/sbin/rescue',
'/usr/share/.../chrps', '/usr/share/.../chrifconfig', '/usr/share/.../netstat',
'/usr/share/.../linsniffer', '/usr/share/.../charbd', '/usr/share/.../charbd2',
'/usr/share/.../charbd3', '/usr/share/.../charbd4', '/usr/man/tmp/update.tgz',
'/var/lib/rpm/db.rpm', '/var/cache/man/.cat', '/var/spool/lpd/remote/.lpq'],
'dir': ['/usr/share/...'], 'ksyms': []}
SUCKIT_FILES = {'name': 'Suckit Rootkit',
'file': ['/sbin/initsk12', '/sbin/initxrk', '/usr/bin/null', '/usr/share/locale/sk/.sk12/sk',
'/etc/rc.d/rc0.d/S23kmdac', '/etc/rc.d/rc1.d/S23kmdac', '/etc/rc.d/rc2.d/S23kmdac',
'/etc/rc.d/rc3.d/S23kmdac', '/etc/rc.d/rc4.d/S23kmdac', '/etc/rc.d/rc5.d/S23kmdac',
'/etc/rc.d/rc6.d/S23kmdac'],
'dir': ['/dev/sdhu0/tehdrakg', '/etc/.MG', '/usr/share/locale/sk/.sk12',
'/usr/lib/perl5/site_perl/i386-linux/auto/TimeDate/.packlist'], 'ksyms': []}
NSDAP_FILES = {'name': 'NSDAP Rootkit',
'file': ['/dev/pts/01/55su', '/dev/pts/01/55ps', '/dev/pts/01/55ping', '/dev/pts/01/55login',
'/dev/pts/01/PATCHER_COMPLETED', '/dev/prom/sn.l', '/dev/prom/dos',
'/usr/lib/vold/nsdap/.kit', '/usr/lib/vold/nsdap/defines',
'/usr/lib/vold/nsdap/patcher', '/usr/lib/vold/nsdap/pg', '/usr/lib/vold/nsdap/cleaner',
'/usr/lib/vold/nsdap/utime', '/usr/lib/vold/nsdap/crypt', '/usr/lib/vold/nsdap/findkit',
'/usr/lib/vold/nsdap/sn2', '/usr/lib/vold/nsdap/sniffload',
'/usr/lib/vold/nsdap/runsniff', '/usr/lib/lpset', '/usr/lib/lpstart',
'/usr/bin/mc68000', '/usr/bin/mc68010', '/usr/bin/mc68020', '/usr/ucb/bin/ps',
'/usr/bin/m68k', '/usr/bin/sun2', '/usr/bin/mc68030', '/usr/bin/mc68040',
'/usr/bin/sun3', '/usr/bin/sun3x', '/usr/bin/lso', '/usr/bin/u370'],
'dir': ['/dev/pts/01', '/dev/prom', '/usr/lib/vold/nsdap', '/.pat'], 'ksyms': []}
SUNOSROOTKIT_FILES = {'name': 'SunOS Rootkit',
'file': ['/etc/ld.so.hash', '/lib/libext-2.so.7', '/usr/bin/ssh2d', '/bin/xlogin',
'/usr/lib/crth.o', '/usr/lib/crtz.o', '/sbin/login', '/lib/security/.config/sn',
'/lib/security/.config/lpsched', '/dev/kmod', '/dev/dos'],
'dir': [], 'ksyms': []}
SUPERKIT_FILES = {'name': 'Superkit Rootkit',
'file': ['/usr/man/.sman/sk/backsh', '/usr/man/.sman/sk/izbtrag', '/usr/man/.sman/sk/sksniff',
'/var/www/cgi-bin/cgiback.cgi'], 'dir': ['/usr/man/.sman/sk'], 'ksyms': []}
TBD_FILES = {'name': 'TBD(Telnet Backdoor)', 'file': ['/usr/lib/.tbd'], 'dir': [], 'ksyms': []}
TELEKIT_FILES = {'name': 'TeLeKiT Rootkit',
'file': ['/usr/man/man3/.../TeLeKiT/bin/sniff', '/usr/man/man3/.../TeLeKiT/bin/telnetd',
'/usr/man/man3/.../TeLeKiT/bin/teleulo', '/usr/man/man3/.../cl', '/dev/ptyr',
'/dev/ptyp', '/dev/ptyq', '/dev/hda06', '/usr/info/libc1.so'],
'dir': ['/usr/man/man3/...', '/usr/man/man3/.../lsniff', '/usr/man/man3/.../TeLeKiT'],
'ksyms': []}
TOGROOT_FILES = {'name': 'OSX Togroot Rootkit',
'file': ['/System/Library/Extensions/Togroot.kext/Contents/Info.plist',
'/System/Library/Extensions/Togroot.kext/Contents/pbdevelopment.plist',
'/System/Library/Extensions/Togroot.kext/Contents/MacOS/togrootkext'],
'dir': ['/System/Library/Extensions/Togroot.kext',
'/System/Library/Extensions/Togroot.kext/Contents',
'/System/Library/Extensions/Togroot.kext/Contents/MacOS'], 'ksyms': []}
TORN_FILES = {'name': 'T0rn Rootkit',
'file': ['/dev/.lib/lib/lib/t0rns', '/dev/.lib/lib/lib/du', '/dev/.lib/lib/lib/ls',
'/dev/.lib/lib/lib/t0rnsb', '/dev/.lib/lib/lib/ps', '/dev/.lib/lib/lib/t0rnp',
'/dev/.lib/lib/lib/find', '/dev/.lib/lib/lib/ifconfig', '/dev/.lib/lib/lib/pg',
'/dev/.lib/lib/lib/ssh.tgz', '/dev/.lib/lib/lib/top', '/dev/.lib/lib/lib/sz',
'/dev/.lib/lib/lib/login', '/dev/.lib/lib/lib/in.fingerd', '/dev/.lib/lib/lib/1i0n.sh',
'/dev/.lib/lib/lib/pstree', '/dev/.lib/lib/lib/in.telnetd', '/dev/.lib/lib/lib/mjy',
'/dev/.lib/lib/lib/sush', '/dev/.lib/lib/lib/tfn', '/dev/.lib/lib/lib/name',
'/dev/.lib/lib/lib/getip.sh', '/usr/info/.torn/sh*', '/usr/src/.puta/.1addr',
'/usr/src/.puta/.1file', '/usr/src/.puta/.1proc', '/usr/src/.puta/.1logz',
'/usr/info/.t0rn'],
'dir': ['/dev/.lib', '/dev/.lib/lib', '/dev/.lib/lib/lib', '/dev/.lib/lib/lib/dev',
'/dev/.lib/lib/scan', '/usr/src/.puta', '/usr/man/man1/man1', '/usr/man/man1/man1/lib',
'/usr/man/man1/man1/lib/.lib', '/usr/man/man1/man1/lib/.lib/.backup'],
'ksyms': []}
TRNKIT_FILES = {'name': 'trNkit Rootkit',
'file': ['/usr/lib/libbins.la', '/usr/lib/libtcs.so', '/dev/.ttpy/ulogin.sh',
'/dev/.ttpy/tcpshell.sh', '/dev/.ttpy/bupdu', '/dev/.ttpy/buloc', '/dev/.ttpy/buloc1',
'/dev/.ttpy/buloc2', '/dev/.ttpy/stat', '/dev/.ttpy/backps', '/dev/.ttpy/tree',
'/dev/.ttpy/topk', '/dev/.ttpy/wold', '/dev/.ttpy/whoold', '/dev/.ttpy/backdoors'],
'dir': [], 'ksyms': []}
TROJANIT_FILES = {'name': 'Trojanit Kit Rootkit',
'file': ['bin/.ls', '/bin/.ps', '/bin/.netstat', '/usr/bin/.nop', '/usr/bin/.who'], 'dir': [],
'ksyms': []}
TURTLE_FILES = {'name': 'Turtle Rootkit', 'file': ['/dev/turtle2dev'], 'dir': [], 'ksyms': []}
TUXTENDO_FILES = {'name': 'Tuxtendo Rootkit',
'file': ['/lib/libproc.so.2.0.7', '/usr/bin/xchk', '/usr/bin/xsf', '/dev/tux/suidsh',
'/dev/tux/.addr', '/dev/tux/.cron', '/dev/tux/.file', '/dev/tux/.log',
'/dev/tux/.proc', '/dev/tux/.iface', '/dev/tux/.pw', '/dev/tux/.df', '/dev/tux/.ssh',
'/dev/tux/.tux', '/dev/tux/ssh2/sshd2_config', '/dev/tux/ssh2/hostkey',
'/dev/tux/ssh2/hostkey.pub', '/dev/tux/ssh2/logo', '/dev/tux/ssh2/random_seed',
'/dev/tux/backup/crontab', '/dev/tux/backup/df', '/dev/tux/backup/dir',
'/dev/tux/backup/find', '/dev/tux/backup/ifconfig', '/dev/tux/backup/locate',
'/dev/tux/backup/netstat', '/dev/tux/backup/ps', '/dev/tux/backup/pstree',
'/dev/tux/backup/syslogd', '/dev/tux/backup/tcpd', '/dev/tux/backup/top',
'/dev/tux/backup/updatedb', '/dev/tux/backup/vdir'],
'dir': ['/dev/tux', '/dev/tux/ssh2', '/dev/tux/backup'], 'ksyms': []}
URK_FILES = {'name': 'Universal Rootkit',
'file': ['/dev/prom/sn.l', '/usr/lib/ldlibps.so', '/usr/lib/ldlibnet.so', '/dev/pts/01/uconf.inv',
'/dev/pts/01/cleaner', '/dev/pts/01/bin/psniff', '/dev/pts/01/bin/du',
'/dev/pts/01/bin/ls', '/dev/pts/01/bin/passwd', '/dev/pts/01/bin/ps',
'/dev/pts/01/bin/psr', '/dev/pts/01/bin/su', '/dev/pts/01/bin/find',
'/dev/pts/01/bin/netstat', '/dev/pts/01/bin/ping', '/dev/pts/01/bin/strings',
'/dev/pts/01/bin/bash', '/usr/man/man1/xxxxxxbin/du', '/usr/man/man1/xxxxxxbin/ls',
'/usr/man/man1/xxxxxxbin/passwd', '/usr/man/man1/xxxxxxbin/ps',
'/usr/man/man1/xxxxxxbin/psr', '/usr/man/man1/xxxxxxbin/su',
'/usr/man/man1/xxxxxxbin/find', '/usr/man/man1/xxxxxxbin/netstat',
'/usr/man/man1/xxxxxxbin/ping', '/usr/man/man1/xxxxxxbin/strings',
'/usr/man/man1/xxxxxxbin/bash', '/tmp/conf.inv'],
'dir': ['/dev/prom', '/dev/pts/01', '/dev/pts/01/bin', '/usr/man/man1/xxxxxxbin'], 'ksyms': []}
VCKIT_FILES = {'name': 'VcKit Rootkit', 'file': [],
'dir': ['/usr/include/linux/modules/lib.so', '/usr/include/linux/modules/lib.so/bin'],
'ksyms': []}
VAMPIRE_FILES = {'name': 'Vampire Rootkit', 'file': [], 'dir': [],
'ksyms': ['new_getdents', 'old_getdents', 'should_hide_file_name', 'should_hide_task_name']}
VOLC_FILES = {'name': 'Volc Rootkit',
'file': ['/usr/bin/volc', '/usr/lib/volc/backdoor/divine', '/usr/lib/volc/linsniff',
'/etc/rc.d/rc1.d/S25sysconf', '/etc/rc.d/rc2.d/S25sysconf', '/etc/rc.d/rc3.d/S25sysconf',
'/etc/rc.d/rc4.d/S25sysconf', '/etc/rc.d/rc5.d/S25sysconf'],
'dir': ['/var/spool/.recent', '/var/spool/.recent/.files', '/usr/lib/volc',
'/usr/lib/volc/backup'], 'ksyms': []}
WEAPONX_FILES = {'name': 'weaponX', 'file': ['/System/Library/Extensions/WeaponX.kext'], 'dir': ['/tmp/...'],
'ksyms': []}
XZIBIT_FILES = {'name': 'Xzibit Rootkit',
'file': ['/dev/dsx', '/dev/caca', '/dev/ida/.inet/linsniffer', '/dev/ida/.inet/logclear',
'/dev/ida/.inet/sense', '/dev/ida/.inet/sl2', '/dev/ida/.inet/sshdu',
'/dev/ida/.inet/s', '/dev/ida/.inet/ssh_host_key', '/dev/ida/.inet/ssh_random_seed',
'/dev/ida/.inet/sl2new.c', '/dev/ida/.inet/tcp.log', '/home/httpd/cgi-bin/becys.cgi',
'/usr/local/httpd/cgi-bin/becys.cgi', '/usr/local/apache/cgi-bin/becys.cgi',
'/www/httpd/cgi-bin/becys.cgi', '/www/cgi-bin/becys.cgi'],
'dir': ['/dev/ida/.inet'], 'ksyms': []}
XORGSUNOS_FILES = {'name': 'X-Org SunOS Rootkit',
'file': ['/usr/lib/libX.a/bin/tmpfl', '/usr/lib/libX.a/bin/rps', '/usr/bin/srload',
'/usr/lib/libX.a/bin/sparcv7/rps', '/usr/sbin/modcheck'],
'dir': ['/usr/lib/libX.a', '/usr/lib/libX.a/bin', '/usr/lib/libX.a/bin/sparcv7',
'/usr/share/man...'], 'ksyms': []}
ZARWT_FILES = {'name': 'zaRwT.KiT Rootkit',
'file': ['/dev/rd/s/sendmeil', '/dev/ttyf', '/dev/ttyp', '/dev/ttyn', '/rk/tulz'],
'dir': ['/rk', '/dev/rd/s'], 'ksyms': []}
ZK_FILES = {'name': 'ZK Rootkit',
'file': ['/usr/share/.zk/zk', '/usr/X11R6/.zk/xfs', '/usr/X11R6/.zk/echo', '/etc/1ssue.net',
'/etc/sysconfig/console/load.zk'],
'dir': ['/usr/share/.zk', '/usr/X11R6/.zk'], 'ksyms': []}
LOGIN_BACKDOOR_FILES = {'name': 'Miscellaneous login backdoors', 'file': ['/bin/.login', '/sbin/.login'],
'dir': [], 'ksyms': []}
Sniffer_FILES = {'name': 'Sniffer log',
'file': ['/usr/lib/libice.log', '/dev/prom/sn.l', '/dev/fd/.88/zxsniff.log'],
'dir': [], 'ksyms': []}
SUSPICIOUS_DIRS = {'name': 'Suspicious dir', 'file': [], 'dir': ['/usr/X11R6/bin/.,/copy', '/dev/rd/cdb'],
'ksyms': []}
Apache_Door = {'name': 'Apache backdoor',
'file': ['/etc/apache2/mods-enabled/mod_rootme.so', '/etc/apache2/mods-enabled/mod_rootme2.so',
'/etc/httpd/modules/mod_rootme.so', '/etc/httpd/modules/mod_rootme2.so',
'/usr/apache/libexec/mod_rootme.so', '/usr/apache/libexec/mod_rootme2.so',
'/usr/lib/modules/mod_rootme.so', '/usr/lib/modules/mod_rootme2.so',
'/usr/local/apache/modules/mod_rootme.so', '/usr/local/apache/modules/mod_rootme2.so',
'/usr/local/apache/conf/mod_rootme.so', '/usr/local/apache/conf/mod_rootme2.so',
'/usr/local/etc/apache/mod_rootme.so', '/usr/local/etc/apache/mod_rootme2.so',
'/etc/apache/mod_rootme.so', '/etc/apache/mod_rootme2.so',
'/etc/httpd/conf/mod_rootme.so', '/etc/httpd/conf/mod_rootme2.so'], 'dir': [],
'ksyms': []}
self.LKM_BADNAMES = ['adore.o', 'bkit-adore.o', 'cleaner.o', 'flkm.o', 'knark.o', 'modhide.o', 'mod_klgr.o',
'phide_mod.o', 'vlogger.o', 'p2.ko', 'rpldev.o', 'xC.o', 'strings.o', 'wkmr26.o']
self.rootkit_rules = []
self.rootkit_rules = [W55808A, Adore_Rootkit, AjaKit_Rootkit, aPa_Kit_Rootkit, Apache_Worm, Ambient_Rootkit,
Balaur_Rootkit, Beastkit_Rootkit, beX2_Rootkit, BOBkit_Rootkit,
OSX_Boonana_A_Trojan, cb_Rootkit, CiNIK_Worm, CX_Rootkit, Abuse_Kit, Devil_Rootkit,
Diamorphine_LKM, Dica_Kit_Rootkit, Dreams_Rootkit, Duarawkz_Rootkit, Ebury_sshd_backdoor,
ENYE_LKM, Flea_Rootkit, FreeBSD_Rootkit, Fu_Rootkit, Fuckit_Rootkit, GasKit_Rootkit,
Heroin_LKM, HjC_Kit_Rootkit, ignoKit_Rootkit, iLLogiC_Rootkit, OSX_Inqtana, OSX_Inqtana2,
OSX_Inqtana3, IntoXonia_NG_Rootkit, Irix_Rootkit, Jynx_Rootkit, Jynx2_Rootkit,
KBeast_Rootkit, OSX_Keydnap_backdoor, Kitko_Rootkit, KNARK_FILES, KOMPLEX_FILES,
LINUXV_FILES, LION_FILES, LOCKIT_FILES, MOKES_FILES, MRK_FILES, MOODNT_FILES, NIO_FILES,
OHHARA_FILES, OPTICKIT_FILES, OSXRK_FILES, OZ_FILES, PHALANX_FILES, PHALANX2_FILES,
PORTACELO_FILES, PROTON_FILES, REDSTORM_FILES, RHSHARPES_FILES, RSHA_FILES,
SHUTDOWN_FILES, SCALPER_FILES, SHV4_FILES, SHV5_FILES, SINROOTKIT_FILES, SLAPPER_FILES,
SNEAKIN_FILES, WANUKDOOR_FILES, WANUKWORM_FILES, SPANISH_FILES, SUCKIT_FILES, NSDAP_FILES,
SUNOSROOTKIT_FILES, SUPERKIT_FILES, TBD_FILES, TELEKIT_FILES, TOGROOT_FILES, TORN_FILES,
TRNKIT_FILES, TROJANIT_FILES, TURTLE_FILES, TUXTENDO_FILES, URK_FILES, VCKIT_FILES,
VAMPIRE_FILES, VOLC_FILES, WEAPONX_FILES, XZIBIT_FILES, XORGSUNOS_FILES, ZARWT_FILES,
ZK_FILES, LOGIN_BACKDOOR_FILES, Sniffer_FILES, SUSPICIOUS_DIRS, Apache_Door]
# 获取内核符号表
def get_kmsinfo(self):
try:
# cat /proc/kallsyms |awk '{print $3}'
if os.path.exists('/proc/kallsyms'):
self.kallsyms = os.popen("cat /proc/kallsyms 2>/dev/null|awk '{print $3}'").read().splitlines()
return
elif os.path.exists('/proc/ksyms'):
self.kallsyms = os.popen("cat /proc/ksyms").read().splitlines()
return
except:
return
# 检测rootkit规则特征
def check_rootkit_rules(self, rootkit_info):
suspicious, malice = False, False
try:
for file in rootkit_info['file']:
if os.path.exists(file):
malice_result(self.name, rootkit_info['name'], file, '',
u'匹配到名为%s的rootkit文件规则 %s' % (rootkit_info['name'], file),
u'[1]strings %s' % file, u'风险', programme=u'rm %s #删除rootkit恶意文件' % file)
malice = True
return suspicious, malice
for dir in rootkit_info['dir']:
if os.path.exists(dir):
malice_result(self.name, rootkit_info['name'], dir, '',
u'匹配到名为%s的rootkit目录规则 %s' % (rootkit_info['name'], dir), u'[1]ls -a %s' % dir, u'风险',
programme=u'rm -rf %s #删除rootkit恶意文件' % dir)
malice = True
return suspicious, malice
self.get_kmsinfo()
for kms in self.kallsyms:
for ksyms in rootkit_info['ksyms']:
if ksyms in kms:
malice_result(self.name, rootkit_info['name'], '/proc/kallsyms', '',
u'匹配到名为 %s 的rootkit内核符合表特征 %s' % (rootkit_info['name'], ksyms),
u'[1]cat /proc/kallsyms', u'风险')
malice = True
return suspicious, malice
return suspicious, malice
except:
return suspicious, malice
# 检测恶意so文件
def check_bad_LKM(self):
suspicious, malice = False, False
try:
if not os.path.exists('/lib/modules/'): return suspicious, malice
infos = os.popen(
'find /lib/modules/ -name "*.so" -o -name "*.ko" -o -name "*.ko.xz" 2>/dev/null').read().splitlines()
for file in infos:
for lkm in self.LKM_BADNAMES:
if lkm == os.path.basename(file):
malice_result(self.name, u'LKM内核模块检测', file, '', u'匹配文件 %s 具备恶意特征 %s' % (file, lkm),
u'[1]cat /proc/kallsyms', u'风险', programme=u'rm %s #删除rootkit恶意文件' % file)
malice = True
return suspicious, malice
return suspicious, malice
except:
return suspicious, malice
def run(self):
print(u'\n开始Rootkit类安全扫描')
file_write(u'\n开始Rootkit类安全扫描\n')
i = 0
for rootkit_info in self.rootkit_rules:
i += 1
string_output(u' [%d]%s' % (i, rootkit_info['name']))
suspicious, malice = self.check_rootkit_rules(rootkit_info)
result_output_tag(suspicious, malice)
string_output(u' [%d]检测LKM内核模块' % (i + 1))
suspicious, malice = self.check_bad_LKM()
result_output_tag(suspicious, malice)
# 检测结果输出到文件
result_output_file(self.name)
if __name__ == '__main__':
info = Rootkit_Analysis()
info.run()
| 1.921875 | 2 |
numpy_benchmarks/benchmarks/harris.py | adriendelsalle/numpy-benchmarks | 33 | 12759997 | #from: parakeet testbed
#setup: import numpy as np ; np.random.seed(0); M, N = 512, 512 ; I = np.random.randn(M,N)
#run: harris(I)
#pythran export harris(float64[][])
import numpy as np
def harris(I):
m,n = I.shape
dx = (I[1:, :] - I[:m-1, :])[:, 1:]
dy = (I[:, 1:] - I[:, :n-1])[1:, :]
#
# At each point we build a matrix
# of derivative products
# M =
# | A = dx^2 C = dx * dy |
# | C = dy * dx B = dy * dy |
#
# and the score at that point is:
# det(M) - k*trace(M)^2
#
A = dx * dx
B = dy * dy
C = dx * dy
tr = A + B
det = A * B - C * C
k = 0.05
return det - k * tr * tr
| 3.03125 | 3 |
main.py | hannu-hell/digu_card_game | 5 | 12759998 | <gh_stars>1-10
from digu import *
from tkinter import *
import pygame
import random
from tkinter import messagebox
# tkinter preconditions
game_quit = False
check_for_quit = False
def quit_pi():
global run, game_quit, check_for_quit
initial_info.destroy()
game_quit = True
check_for_quit = True
player_information = []
Trumps = False
# Pregame information
# tkinter Functions
def get_info():
global Trumps
player_name = text_1.get()
if player_name == '':
messagebox.showerror('Error', 'Please Enter a valid Player Name')
else:
player_information.append(player_name)
if coin_toss(call_trumps.get()):
Trumps = True
else:
Trumps = False
initial_info.quit()
initial_info.destroy()
initial_info = Tk()
initial_info.title('Digu Player Information')
frame_1 = Frame(initial_info)
frame_1.pack(side=LEFT)
label_1 = Label(frame_1, text='Player Name')
label_1.pack(side=TOP)
text_1 = Entry(frame_1)
text_1.pack(side=TOP)
label_2 = Label(frame_1, text='Call Heads or Tails for calling Trumps')
label_2.pack(side=TOP)
call_trumps = StringVar(initial_info)
call_trumps.set('Heads')
option = OptionMenu(frame_1, call_trumps, 'Heads', 'Tails')
option.pack(side=TOP)
button_1 = Button(frame_1, text='Confirm', command=get_info)
button_1.pack(side=BOTTOM)
initial_info.protocol("WM_DELETE_WINDOW", quit_pi) # if 'x' on the window is pressed then the function is performed
initial_info.mainloop()
# Py_game Preconditions
if game_quit is False:
pygame.init()
win = pygame.display.set_mode((1000, 668))
pygame.display.set_caption('DIGU')
background = pygame.image.load('wooden_table.jpg')
dhu = pygame.image.load('dhufun.png')
icon = pygame.image.load('cards_icon.png')
pygame.display.set_icon(icon)
font = pygame.font.Font(None, 30)
i_font = pygame.font.Font(None, 20)
player_name = font.render(player_information[0].upper(), True, (0, 150, 255))
player_teammate = font.render(player_information[0].upper() + " TEAMMATE", True, (0, 150, 255))
computer = font.render('COMP', True, (255, 255, 255))
computer = pygame.transform.rotate(computer, 90)
comp_team_mate = font.render("COMP TEAMMATE", True, (255, 255, 255))
comp_team_mate = pygame.transform.rotate(comp_team_mate, 270)
main_deck = Deck()
main_deck.shuffle()
player_hand = main_deck.deal_hand(13)
player_teammate_hand = main_deck.deal_hand(13)
computer_hand = main_deck.deal_hand(13)
computer_teammate_hand = main_deck.deal_hand(13)
displayed_names = False
player_teammate_X = 350
player_trump_x = 750
player_trump_change = 1
player_trump_selected = False
comp_trump_selected = False
trump = ''
stop_initialize = False
player_round_done = False
comp_round_done = False
player_tm_round_done = False
comp_tm_round_done = False
comp_trumps = []
comp_teammate_trumps = []
player_teammate_trumps = []
player_played_hands = []
comp_played_hands = []
player_teammate_played_hands = []
comp_teammate_played_hands = []
trump_list_chosen = False
comp_won_hands = 0
player_won_hands = 0
comp_tm_won_hands = 0
player_tm_won_hands = 0
all_rounds_done = False
winner = None
game_intro = True
x1 = 90
y1 = 260
x2 = 250
y2 = 480
x3 = 820
y3 = 260
x4 = 250
y4 = 130
round_status = dict(round1=False, round2=False, round3=False, round4=False, round5=False, round6=False,
round7=False,
round8=False, round9=False, round10=False, round11=False, round12=False, round13=False)
deck_clubs = [pygame.image.load('Clubs 1.png'), pygame.image.load('Clubs 2.png'), pygame.image.load('Clubs 3.png'),
pygame.image.load('Clubs 4.png'), pygame.image.load('Clubs 5.png'), pygame.image.load('Clubs 6.png'),
pygame.image.load('Clubs 7.png'),
pygame.image.load('Clubs 8.png'), pygame.image.load('Clubs 9.png'), pygame.image.load('Clubs 10.png'),
pygame.image.load('Clubs 11.png'),
pygame.image.load('Clubs 12.png'), pygame.image.load('Clubs 13.png')]
deck_diamonds = [pygame.image.load('Diamond 1.png'), pygame.image.load('Diamond 2.png'),
pygame.image.load('Diamond 3.png'), pygame.image.load('Diamond 4.png'),
pygame.image.load('Diamond 5.png'), pygame.image.load('Diamond 6.png'),
pygame.image.load('Diamond 7.png'), pygame.image.load('Diamond 8.png'),
pygame.image.load('Diamond 9.png'),
pygame.image.load('Diamond 10.png'), pygame.image.load('Diamond 11.png'),
pygame.image.load('Diamond 12.png'), pygame.image.load('Diamond 13.png')]
deck_hearts = [pygame.image.load('Hearts 1.png'), pygame.image.load('Hearts 2.png'), pygame.image.load('Hearts 3.png'),
pygame.image.load('Hearts 4.png'), pygame.image.load('Hearts 5.png'), pygame.image.load('Hearts 6.png'),
pygame.image.load('Hearts 7.png'),
pygame.image.load('Hearts 8.png'), pygame.image.load('Hearts 9.png'), pygame.image.load('Hearts 10.png'),
pygame.image.load('Hearts 11.png'), pygame.image.load('Hearts 12.png'),
pygame.image.load('Hearts 13.png')]
deck_spades = [pygame.image.load('Spades 1.png'), pygame.image.load('Spades 2.png'), pygame.image.load('Spades 3.png'),
pygame.image.load('Spades 4.png'), pygame.image.load('Spades 5.png'), pygame.image.load('Spades 6.png'),
pygame.image.load('Spades 7.png'),
pygame.image.load('Spades 8.png'), pygame.image.load('Spades 9.png'), pygame.image.load('Spades 10.png'),
pygame.image.load('Spades 11.png'),
pygame.image.load('Spades 12.png'), pygame.image.load('Spades 13.png')]
d_intro_suit = pygame.image.load('diamonds.png')
i_intro_suit = pygame.image.load('clubs.png')
g_intro_suit = pygame.image.load('hearts.png')
u_intro_suit = pygame.image.load('spades.png')
card_back = pygame.image.load('Back Red 1.png')
player_turn = pygame.image.load('player_turn.png')
player_trump = pygame.image.load('player_trump.png')
comp_turn = pygame.image.load('comp_turn.png')
comp_trump = pygame.image.load('comp_trump.png')
first = [num for num in range(20, 920 + 75, 75)]
second = [num for num in range(67, 967 + 75, 75)]
card_images = [deck_hearts, deck_clubs, deck_spades, deck_diamonds]
player_deck_rectangle = pygame.Rect(20, 540, 900, 100)
player_deck_bg = pygame.image.load('ply_deck_bg.jpg')
comp_deck_rectangle = pygame.Rect(40, 150, 50, 405)
comp_deck = pygame.image.load('comp_deck.jpg')
comp_tm_deck_rectangle = pygame.Rect(900, 150, 50, 405)
comp_tm_deck = pygame.image.load('comp_tm_deck.jpg')
player_tm_deck_rectangle = pygame.Rect(350, 60, 610, 65)
player_tm_deck = pygame.image.load('ply_tm_deck.jpg')
play_area_rectangle = pygame.Rect(430, 267, 62, 72)
play_area = pygame.image.load('play_area.jpg')
# py_game Functions
# Initialize_game functions
def display_player_names():
win = pygame.display.set_mode((1000, 668))
win.blit(background, (0, 0))
win.blit(player_name, (450, 630))
win.blit(player_teammate, (400, 20))
win.blit(computer, (10, 300))
win.blit(comp_team_mate, (970, 220))
def player_deck_clear():
win.blit(player_deck_bg, player_deck_rectangle)
win.blit(player_name, (450, 630))
def comp_deck_clear():
win.blit(comp_deck, comp_deck_rectangle)
def comp_tm_deck_clear():
win.blit(comp_tm_deck, comp_tm_deck_rectangle)
def player_tm_deck_clear():
win.blit(player_tm_deck, player_tm_deck_rectangle)
def play_area_clear():
win.blit(play_area, play_area_rectangle)
def clear_all_decks():
player_deck_clear()
comp_deck_clear()
comp_tm_deck_clear()
player_tm_deck_clear()
play_area_clear()
def start_new_round():
global player_round_done, player_tm_round_done, comp_round_done, comp_tm_round_done
if player_round_done and player_tm_round_done and comp_round_done and comp_tm_round_done:
player_round_done = False
player_tm_round_done = False
comp_round_done = False
comp_tm_round_done = False
def match_cards(i, x, y):
k = 0
j = 0
for s in suits:
if i.suit == s:
for v in values:
if i.value == v:
win.blit(card_images[j][k], (x, y))
if k <= 12:
k += 1
if j <= 3:
j += 1
def card_assignment_for_players(x_limit, no_comp, no_comptm, no_plytm, comp_limit, comptm_limit, playertm_limit):
global player_hand
x = 20
y = 550
computer_y = 150
computer_teammate_y = 150
player_teammate_x = 350
for i in player_hand:
match_cards(i, x, y)
x += 75
if x > x_limit:
x = 20
for card in range(no_comp):
win.blit(card_back, (40, computer_y))
computer_y += 20
if computer_y > comp_limit:
computer_y = 150
for card in range(no_comptm):
win.blit(card_back, (900, computer_teammate_y))
computer_teammate_y += 20
if computer_teammate_y > comptm_limit:
computer_teammate_y = 150
for card in range(no_plytm):
win.blit(card_back, (player_teammate_x, 60))
player_teammate_x += 20
if player_teammate_x > playertm_limit:
player_teammate_x = 350
def player_select_trump():
global player_trump_selected, trump, run
for event in pygame.event.get():
if event.type == pygame.QUIT:
run = False
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
if 550 <= event.pos[1] <= 612:
for i in range(len(first)):
if first[i] <= event.pos[0] <= second[i]:
trump = player_hand[i].suit
player_trump_selected = True
show_trump = font.render('TRUMP IS ' + trump.upper(), True, (0, 150, 255))
win.blit(show_trump, (20, 60))
win.blit(player_turn, (700, 480))
def comp_select_trump():
global comp_trump_selected, trump
hearts = 0
spades = 0
diamonds = 0
clubs = 0
for i in computer_hand:
if i.suit == 'hearts':
hearts += 1
if i.suit == 'spades':
spades += 1
if i.suit == 'clubs':
clubs += 1
if i.suit == 'diamonds':
diamonds += 1
if hearts >= spades and hearts >= clubs and hearts >= diamonds:
trump = 'hearts'
if spades >= hearts and spades >= clubs and spades >= diamonds:
trump = 'spades'
if clubs >= hearts and clubs >= spades and clubs >= diamonds:
trump = 'clubs'
if diamonds >= hearts and diamonds >= spades and diamonds >= clubs:
trump = 'diamonds'
show_trump = font.render('TRUMP IS ' + trump.upper(), True, (255, 255, 255))
win.blit(show_trump, (20, 60))
win.blit(comp_turn, (90, 180))
comp_trump_selected = True
def trump_movement():
global player_trump_x, player_trump_change
win.blit(player_trump, (player_trump_x, 480))
player_trump_x += player_trump_change
if player_trump_x > 800:
player_trump_change = -1
elif player_trump_x < 750:
player_trump_change = 1
def determine_trumps():
if Trumps:
win.blit(player_turn, (700, 480))
trump_movement()
player_select_trump()
elif Trumps is False:
comp_select_trump()
card_assignment_for_players(920, 13, 13, 13, 390, 390, 590)
def initialize_game():
if player_trump_selected is False and comp_trump_selected is False:
display_player_names()
card_assignment_for_players(920, 13, 13, 13, 390, 390, 590)
determine_trumps()
# Round Functions
def initial_layout():
display_player_names()
show_trump = font.render('TRUMP IS ' + trump.upper(), True, (255, 255, 255))
win.blit(show_trump, (20, 60))
def trump_list():
global trump_list_chosen
if player_trump_selected or comp_trump_selected:
for i in computer_hand:
if i.suit == trump:
comp_trumps.append(i)
computer_hand.remove(i)
for j in computer_teammate_hand:
if j.suit == trump:
comp_teammate_trumps.append(j)
computer_teammate_hand.remove(j)
for k in player_teammate_hand:
if k.suit == trump:
player_teammate_trumps.append(k)
player_teammate_hand.remove(k)
trump_list_chosen = True
def check_cheat(rnd, card):
if comp_round_done:
for j in player_hand:
if winner is None or winner == 'comp':
if j.suit == comp_played_hands[rnd].suit:
if card.suit != comp_played_hands[rnd].suit:
return True
elif winner == 'comp_tm':
if j.suit == comp_teammate_played_hands[rnd].suit:
if card.suit != comp_teammate_played_hands[rnd].suit:
return True
elif winner == 'player_tm':
if j.suit == player_teammate_played_hands[rnd].suit:
if card.suit != player_teammate_played_hands[rnd].suit:
return True
return False
if not comp_round_done:
return False
def player_play(cl, rnd, turn, a, b, c, d, e, f, g):
global player_round_done, run
rnd -= 1
# event = pygame.event.wait()
for event in pygame.event.get():
if event.type == pygame.QUIT:
run = False
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
if 550 <= event.pos[1] <= 612:
for i in range(turn):
if first[i] <= event.pos[0] <= second[i]:
if cl:
clear_all_decks()
win.blit(player_turn, (700, 480))
if check_cheat(rnd, player_hand[i]):
player_play(cl, rnd, turn, a, b, c, d, e, f, g)
elif check_cheat(rnd, player_hand[i]) is False:
match_cards(player_hand[i], 450, 330)
player_played_hands.append(player_hand.pop(i))
player_deck_clear()
card_assignment_for_players(a, b, c, d, e, f, g)
player_round_done = True
def have_suits(hand, s):
for i in hand:
if i.suit == s:
return True
return False
def check_least_high_card(c, hand):
n = 0
for s in suits:
for m in range(13):
if c.suit == s and c.value == values[n]:
for k in range(13):
for i in hand:
if n == 0:
return None
elif n == 12:
if i.suit == s and i.value == 'A':
return i
elif 0 < n < 12:
if i.suit == s and i.value == values[n + 1]:
return i
if n == 12:
break
n += 1
if n > 12:
n = 0
break
if n == 12:
break
n += 1
n = 0
def check_lowest_value_card_of_suit(hand, s):
n = 0
for k in range(13):
for i in hand:
if i.suit == s:
if i.value == values[n + 1]:
return i
n += 1
if n == 12:
for j in hand:
if j.suit == s:
if j.value == 'A':
return j
if n > 12:
return None
def check_lowest_value_card(hand):
n = 0
for k in range(13):
for i in hand:
if i.value == values[n + 1]:
return i
n += 1
if n == 12:
for j in hand:
if j.value == 'A':
return j
if n > 12:
return None
def check_highest_value_card_of_suit(hand, s):
n = 13
for j in hand:
if j.suit == s:
if j.value == "A":
return j
for k in range(13):
for i in hand:
if i.suit == s:
if i.value == values[n - 1]:
return i
n -= 1
if n < 2:
return None
def check_highest_value_card(hand):
n = 13
for j in hand:
if j.value == "A":
return j
for k in range(13):
for i in hand:
if i.value == values[n - 1]:
return i
n -= 1
if n < 2:
return None
def comp_tm_set_play(played_trumps, card, a, b, c, d, e, f, g):
global comp_tm_round_done
if played_trumps is False:
match_cards(card, 470, 300)
comp_teammate_played_hands.append(computer_teammate_hand.pop(computer_teammate_hand.index(card)))
comp_tm_deck_clear()
card_assignment_for_players(a, b, c, d, e, f, g)
comp_tm_round_done = True
elif played_trumps:
match_cards(card, 470, 300)
comp_teammate_played_hands.append(comp_teammate_trumps.pop(comp_teammate_trumps.index(card)))
comp_tm_deck_clear()
card_assignment_for_players(a, b, c, d, e, f, g)
comp_tm_round_done = True
def player_tm_set_play(played_trumps, card, a, b, c, d, e, f, g):
global player_tm_round_done
if played_trumps is False:
match_cards(card, 450, 270)
player_teammate_played_hands.append(player_teammate_hand.pop(player_teammate_hand.index(card)))
player_tm_deck_clear()
card_assignment_for_players(a, b, c, d, e, f, g)
player_tm_round_done = True
elif played_trumps:
match_cards(card, 450, 270)
player_teammate_played_hands.append(player_teammate_trumps.pop(player_teammate_trumps.index(card)))
player_tm_deck_clear()
card_assignment_for_players(a, b, c, d, e, f, g)
player_tm_round_done = True
def comp_set_play(played_trumps, card, a, b, c, d, e, f, g):
global comp_round_done
if played_trumps is False:
match_cards(card, 430, 300)
comp_played_hands.append(computer_hand.pop(computer_hand.index(card)))
comp_deck_clear()
card_assignment_for_players(a, b, c, d, e, f, g)
comp_round_done = True
elif played_trumps:
match_cards(card, 430, 300)
comp_played_hands.append(comp_trumps.pop(comp_trumps.index(card)))
comp_deck_clear()
card_assignment_for_players(a, b, c, d, e, f, g)
comp_round_done = True
def comp_tm_play(rnd, ply_tm_card_pos):
x_limit = 920 - (75 * rnd)
cards = 13 - rnd
card_pos = 390 - (20 * rnd)
rnd -= 1
c_pos = len(comp_played_hands)
p_pos = len(player_played_hands)
ct_pos = len(comp_teammate_played_hands)
pt_pos = len(player_teammate_played_hands)
if ct_pos == rnd and pt_pos == rnd + 1 and c_pos == rnd + 1 and p_pos == rnd + 1:
a = player_teammate_played_hands[rnd]
b = comp_played_hands[rnd]
c = player_played_hands[rnd]
if a.suit != trump and b.suit != trump and c.suit != trump:
if a.suit == b.suit == c.suit:
if have_suits(computer_teammate_hand, a.suit):
d = compare_cards(compare_cards(a, b), c)
d0 = check_least_high_card(d, computer_teammate_hand)
if d0 is not None:
comp_tm_set_play(False, d0, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif d0 is None:
d1 = check_lowest_value_card_of_suit(computer_teammate_hand, a.suit)
comp_tm_set_play(False, d1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif have_suits(computer_teammate_hand, a.suit) is False:
if len(comp_teammate_trumps) > 0:
d2 = check_lowest_value_card(comp_teammate_trumps)
comp_tm_set_play(True, d2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(comp_teammate_trumps) == 0:
d3 = check_lowest_value_card(computer_teammate_hand)
comp_tm_set_play(False, d3, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif a.suit == b.suit and b.suit != c.suit:
if have_suits(computer_teammate_hand, a.suit):
d4 = check_least_high_card(compare_cards(a, b), computer_teammate_hand)
if d4 is not None:
comp_tm_set_play(False, d4, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif d4 is None:
d5 = check_lowest_value_card_of_suit(computer_teammate_hand, a.suit)
comp_tm_set_play(False, d5, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif have_suits(computer_teammate_hand, a.suit) is False:
if len(comp_teammate_trumps) > 0:
d6 = check_lowest_value_card(comp_teammate_trumps)
comp_tm_set_play(True, d6, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(comp_teammate_trumps) == 0:
d7 = check_lowest_value_card(computer_teammate_hand)
comp_tm_set_play(False, d7, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif a.suit == c.suit and c.suit != b.suit:
if have_suits(computer_teammate_hand, a.suit):
d8 = check_least_high_card(compare_cards(a, c), computer_teammate_hand)
if d8 is not None:
comp_tm_set_play(False, d8, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif d8 is None:
d9 = check_lowest_value_card_of_suit(computer_teammate_hand, a.suit)
comp_tm_set_play(False, d9, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif have_suits(computer_teammate_hand, a.suit) is False:
if len(comp_teammate_trumps) > 0:
d10 = check_lowest_value_card(comp_teammate_trumps)
comp_tm_set_play(True, d10, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(comp_teammate_trumps) == 0:
d11 = check_lowest_value_card(computer_teammate_hand)
comp_tm_set_play(False, d11, x_limit, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
elif a.suit != b.suit and b.suit == c.suit:
if have_suits(computer_teammate_hand, a.suit):
w = check_least_high_card(a, computer_teammate_hand)
if w is not None:
comp_tm_set_play(False, w, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif w is None:
w1 = check_lowest_value_card_of_suit(computer_teammate_hand, a.suit)
comp_tm_set_play(False, w1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif have_suits(computer_teammate_hand, a.suit) is False:
if len(comp_teammate_trumps) > 0:
w2 = check_lowest_value_card(comp_teammate_trumps)
comp_tm_set_play(True, w2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(comp_teammate_trumps) == 0:
w3 = check_lowest_value_card(computer_teammate_hand)
comp_tm_set_play(False, w3, x_limit, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
elif a.suit != b.suit != c.suit:
if have_suits(computer_teammate_hand, a.suit):
d12 = check_least_high_card(a, computer_teammate_hand)
if d12 is not None:
comp_tm_set_play(False, d12, x_limit, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
elif d12 is None:
d13 = check_lowest_value_card_of_suit(computer_teammate_hand, a.suit)
comp_tm_set_play(False, d13, x_limit, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
elif have_suits(computer_teammate_hand, a.suit) is False:
if len(comp_teammate_trumps) > 0:
d14 = check_lowest_value_card(comp_teammate_trumps)
comp_tm_set_play(True, d14, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(comp_teammate_trumps) == 0:
d15 = check_lowest_value_card(computer_teammate_hand)
comp_tm_set_play(False, d15, x_limit, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
if a.suit != trump and b.suit != trump and c.suit == trump:
if have_suits(computer_teammate_hand, a.suit):
e = check_lowest_value_card_of_suit(computer_teammate_hand, a.suit)
comp_tm_set_play(False, e, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif have_suits(computer_teammate_hand, a.suit) is False:
if len(comp_teammate_trumps) > 0:
e1 = check_least_high_card(c, comp_teammate_trumps)
if e1 is not None:
comp_tm_set_play(True, e1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif e1 is None:
e2 = check_lowest_value_card(computer_teammate_hand)
comp_tm_set_play(False, e2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(comp_teammate_trumps) == 0:
e3 = check_lowest_value_card(computer_teammate_hand)
comp_tm_set_play(False, e3, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if a.suit != trump and b.suit == trump and c.suit != trump:
if have_suits(computer_teammate_hand, a.suit):
f = check_lowest_value_card_of_suit(computer_teammate_hand, a.suit)
comp_tm_set_play(False, f, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif have_suits(computer_teammate_hand, a.suit) is False:
if len(comp_teammate_trumps) > 0:
f1 = check_least_high_card(b, comp_teammate_trumps)
if f1 is not None:
comp_tm_set_play(True, f1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif f1 is None:
f2 = check_lowest_value_card(comp_teammate_trumps)
comp_tm_set_play(True, f2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(comp_teammate_trumps) == 0:
f3 = check_lowest_value_card(computer_teammate_hand)
comp_tm_set_play(False, f3, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if a.suit == trump and b.suit != trump and c.suit != trump:
if len(comp_teammate_trumps) > 0:
g = check_least_high_card(a, comp_teammate_trumps)
if g is not None:
comp_tm_set_play(True, g, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif g is None:
g1 = check_lowest_value_card(comp_teammate_trumps)
comp_tm_set_play(True, g1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(comp_teammate_trumps) == 0:
g2 = check_lowest_value_card(computer_teammate_hand)
comp_tm_set_play(False, g2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if a.suit == trump and b.suit == trump and c.suit != trump:
if len(comp_teammate_trumps) > 0:
h = check_least_high_card(compare_cards(a, b), comp_teammate_trumps)
if h is not None:
comp_tm_set_play(True, h, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if h is None:
h1 = check_lowest_value_card(comp_teammate_trumps)
comp_tm_set_play(True, h1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(comp_teammate_trumps) == 0:
h2 = check_lowest_value_card(computer_teammate_hand)
comp_tm_set_play(False, h2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if a.suit == trump and b.suit != trump and c.suit == trump:
if len(comp_teammate_trumps) > 0:
i = check_least_high_card(compare_cards(a, c), comp_teammate_trumps)
if i is not None:
comp_tm_set_play(True, i, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if i is None:
i1 = check_lowest_value_card(comp_teammate_trumps)
comp_tm_set_play(True, i1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(comp_teammate_trumps) == 0:
i2 = check_lowest_value_card(computer_teammate_hand)
comp_tm_set_play(False, i2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if a.suit != trump and b.suit == trump and c.suit == trump:
if len(comp_teammate_trumps) > 0:
j = check_least_high_card(compare_cards(b, c), comp_teammate_trumps)
if j is not None:
comp_tm_set_play(True, j, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if j is None:
j1 = check_lowest_value_card(comp_teammate_trumps)
comp_tm_set_play(True, j1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(comp_teammate_trumps) == 0:
j2 = check_lowest_value_card(computer_teammate_hand)
comp_tm_set_play(False, j2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if a.suit == trump and b.suit == trump and c.suit == trump:
if len(comp_teammate_trumps) > 0:
k = check_least_high_card(compare_cards(compare_cards(a, b), c), comp_teammate_trumps)
if k is not None:
comp_tm_set_play(True, k, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if k is None:
k1 = check_lowest_value_card(comp_teammate_trumps)
comp_tm_set_play(True, k1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(comp_teammate_trumps) == 0:
k2 = check_lowest_value_card(computer_teammate_hand)
comp_tm_set_play(False, k2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif ct_pos == rnd and pt_pos == rnd and c_pos == rnd + 1 and p_pos == rnd + 1:
a = comp_played_hands[rnd]
b = player_played_hands[rnd]
if a.suit != trump and b.suit != trump:
if a.suit == b.suit:
if have_suits(computer_teammate_hand, a.suit):
c = compare_cards(a, b)
d = check_least_high_card(c, computer_teammate_hand)
if d is not None:
comp_tm_set_play(False, d, x_limit, cards, cards, cards + 1, card_pos, card_pos,
ply_tm_card_pos)
elif d is None:
e = check_lowest_value_card_of_suit(computer_teammate_hand, a.suit)
comp_tm_set_play(False, e, x_limit, cards, cards, cards + 1, card_pos, card_pos,
ply_tm_card_pos)
elif have_suits(computer_teammate_hand, a.suit) is False:
if len(comp_teammate_trumps) > 0:
a1 = check_lowest_value_card(comp_teammate_trumps)
comp_tm_set_play(True, a1, x_limit, cards, cards, cards + 1, card_pos, card_pos,
ply_tm_card_pos)
elif len(comp_teammate_trumps) == 0:
z1 = check_lowest_value_card(computer_teammate_hand)
comp_tm_set_play(False, z1, x_limit, cards, cards, cards + 1, card_pos, card_pos,
ply_tm_card_pos)
elif a.suit != b.suit:
if have_suits(computer_teammate_hand, a.suit):
b1 = check_least_high_card(a, computer_teammate_hand)
if b1 is not None:
comp_tm_set_play(False, b1, x_limit, cards, cards, cards + 1, card_pos, card_pos,
ply_tm_card_pos)
elif b1 is None:
b2 = check_lowest_value_card_of_suit(computer_teammate_hand, a.suit)
comp_tm_set_play(False, b2, x_limit, cards, cards, cards + 1, card_pos, card_pos,
ply_tm_card_pos)
elif have_suits(computer_teammate_hand, a.suit) is False:
if len(comp_teammate_trumps) > 0:
b3 = check_lowest_value_card(comp_teammate_trumps)
comp_tm_set_play(True, b3, x_limit, cards, cards, cards + 1, card_pos, card_pos,
ply_tm_card_pos)
elif len(comp_teammate_trumps) == 0:
b4 = check_lowest_value_card(computer_teammate_hand)
comp_tm_set_play(False, b4, x_limit, cards, cards, cards + 1, card_pos, card_pos,
ply_tm_card_pos)
elif a.suit != trump and b.suit == trump:
if have_suits(computer_teammate_hand, a.suit):
g = check_lowest_value_card_of_suit(computer_teammate_hand, a.suit)
comp_tm_set_play(False, g, x_limit, cards, cards, cards + 1, card_pos, card_pos, ply_tm_card_pos)
elif have_suits(computer_teammate_hand, a.suit) is False:
if len(comp_teammate_trumps) > 0:
h = check_least_high_card(b, comp_teammate_trumps)
if h is not None:
comp_tm_set_play(True, h, x_limit, cards, cards, cards + 1, card_pos, card_pos, ply_tm_card_pos)
elif h is None:
i = check_lowest_value_card(comp_teammate_trumps)
comp_tm_set_play(True, i, x_limit, cards, cards, cards + 1, card_pos, card_pos,
ply_tm_card_pos)
elif len(comp_teammate_trumps) == 0:
j = check_lowest_value_card(computer_teammate_hand)
comp_tm_set_play(False, j, x_limit, cards, cards, cards + 1, card_pos, card_pos, ply_tm_card_pos)
elif a.suit == trump and b.suit != trump:
if len(comp_teammate_trumps) > 0:
k = check_least_high_card(a, comp_teammate_trumps)
if k is not None:
comp_tm_set_play(True, k, x_limit, cards, cards, cards + 1, card_pos, card_pos, ply_tm_card_pos)
elif k is None:
m = check_lowest_value_card(comp_teammate_trumps)
comp_tm_set_play(True, m, x_limit, cards, cards, cards + 1, card_pos, card_pos, ply_tm_card_pos)
elif len(comp_teammate_trumps) == 0:
n = check_lowest_value_card(computer_teammate_hand)
comp_tm_set_play(False, n, x_limit, cards, cards, cards + 1, card_pos, card_pos, ply_tm_card_pos)
elif a.suit == trump and b.suit == trump:
if len(comp_teammate_trumps) > 0:
p = check_least_high_card(compare_cards(a, b), comp_teammate_trumps)
if p is not None:
comp_tm_set_play(True, p, x_limit, cards, cards, cards + 1, card_pos, card_pos, ply_tm_card_pos)
elif p is None:
q = check_lowest_value_card(comp_teammate_trumps)
comp_tm_set_play(True, q, x_limit, cards, cards, cards + 1, card_pos, card_pos, ply_tm_card_pos)
elif len(comp_teammate_trumps) == 0:
r = check_lowest_value_card(computer_teammate_hand)
comp_tm_set_play(False, r, x_limit, cards, cards, cards + 1, card_pos, card_pos, ply_tm_card_pos)
elif ct_pos == rnd and pt_pos == rnd and c_pos == rnd and p_pos == rnd + 1:
s = player_played_hands[rnd]
if s.suit != trump:
if have_suits(computer_teammate_hand, s.suit):
t = check_least_high_card(s, computer_teammate_hand)
if t is not None:
comp_tm_set_play(False, t, x_limit, cards + 1, cards, cards + 1, card_pos + 20, card_pos,
ply_tm_card_pos)
elif t is None:
t1 = check_lowest_value_card_of_suit(computer_teammate_hand, s.suit)
comp_tm_set_play(False, t1, x_limit, cards + 1, cards, cards + 1, card_pos + 20, card_pos,
ply_tm_card_pos)
elif have_suits(computer_teammate_hand, s.suit) is False:
if len(comp_teammate_trumps) > 0:
u = check_lowest_value_card(comp_teammate_trumps)
comp_tm_set_play(True, u, x_limit, cards + 1, cards, cards + 1, card_pos + 20, card_pos,
ply_tm_card_pos)
elif len(comp_teammate_trumps) == 0:
v = check_lowest_value_card(computer_teammate_hand)
comp_tm_set_play(False, v, x_limit, cards + 1, cards, cards + 1, card_pos + 20, card_pos,
ply_tm_card_pos)
elif s.suit == trump:
if len(comp_teammate_trumps) > 0:
w = check_least_high_card(s, comp_teammate_trumps)
if w is not None:
comp_tm_set_play(True, w, x_limit, cards + 1, cards, cards + 1, card_pos + 20, card_pos,
ply_tm_card_pos)
if w is None:
x = check_lowest_value_card(comp_teammate_trumps)
comp_tm_set_play(True, x, x_limit, cards + 1, cards, cards + 1, card_pos + 20, card_pos,
ply_tm_card_pos)
elif len(comp_teammate_trumps) == 0:
y = check_lowest_value_card(computer_teammate_hand)
comp_tm_set_play(False, y, x_limit, cards + 1, cards, cards + 1, card_pos + 20, card_pos,
ply_tm_card_pos)
elif ct_pos == rnd and pt_pos == rnd and c_pos == rnd and p_pos == rnd:
if len(computer_teammate_hand) == 0:
kk_1 = random.choice(comp_teammate_trumps)
comp_tm_set_play(True, kk_1, x_limit + 75, cards + 1, cards, cards + 1, card_pos + 20, card_pos,
ply_tm_card_pos)
if len(computer_teammate_hand) > 0:
k = random.choice(computer_teammate_hand)
comp_tm_set_play(False, k, x_limit + 75, cards + 1, cards, cards + 1, card_pos + 20, card_pos,
ply_tm_card_pos)
def player_tm_play(rnd, ply_tm_card_pos):
x_limit = 920 - (75 * rnd)
cards = 13 - rnd
card_pos = 390 - (20 * rnd)
rnd -= 1
c_pos = len(comp_played_hands)
p_pos = len(player_played_hands)
ct_pos = len(comp_teammate_played_hands)
pt_pos = len(player_teammate_played_hands)
if pt_pos == rnd and c_pos == rnd + 1 and p_pos == rnd + 1 and ct_pos == rnd + 1:
a = comp_played_hands[rnd]
b = player_played_hands[rnd]
c = comp_teammate_played_hands[rnd]
if a.suit != trump and b.suit != trump and c.suit != trump:
if a.suit == b.suit == c.suit:
if have_suits(player_teammate_hand, a.suit):
d = compare_cards(compare_cards(a, b), c)
d0 = check_least_high_card(d, player_teammate_hand)
if d0 is not None:
player_tm_set_play(False, d0, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif d0 is None:
d1 = check_lowest_value_card_of_suit(player_teammate_hand, a.suit)
player_tm_set_play(False, d1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif have_suits(player_teammate_hand, a.suit) is False:
if len(player_teammate_trumps) > 0:
d2 = check_lowest_value_card(player_teammate_trumps)
player_tm_set_play(True, d2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(player_teammate_trumps) == 0:
d3 = check_lowest_value_card(player_teammate_hand)
player_tm_set_play(False, d3, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif a.suit == b.suit and b.suit != c.suit:
if have_suits(player_teammate_hand, a.suit):
d4 = check_least_high_card(compare_cards(a, b), player_teammate_hand)
if d4 is not None:
player_tm_set_play(False, d4, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif d4 is None:
d5 = check_lowest_value_card_of_suit(player_teammate_hand, a.suit)
player_tm_set_play(False, d5, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif have_suits(player_teammate_hand, a.suit) is False:
if len(player_teammate_trumps) > 0:
d6 = check_lowest_value_card(player_teammate_trumps)
player_tm_set_play(True, d6, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(player_teammate_trumps) == 0:
d7 = check_lowest_value_card(player_teammate_hand)
player_tm_set_play(False, d7, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif a.suit == c.suit and c.suit != b.suit:
if have_suits(player_teammate_hand, a.suit):
d8 = check_least_high_card(compare_cards(a, c), player_teammate_hand)
if d8 is not None:
player_tm_set_play(False, d8, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif d8 is None:
d9 = check_lowest_value_card_of_suit(player_teammate_hand, a.suit)
player_tm_set_play(False, d9, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif have_suits(player_teammate_hand, a.suit) is False:
if len(player_teammate_trumps) > 0:
d10 = check_lowest_value_card(player_teammate_trumps)
player_tm_set_play(True, d10, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(player_teammate_trumps) == 0:
d11 = check_lowest_value_card(player_teammate_hand)
player_tm_set_play(False, d11, x_limit, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
elif a.suit != b.suit and b.suit == c.suit:
if have_suits(player_teammate_hand, a.suit):
w = check_least_high_card(a, player_teammate_hand)
if w is not None:
player_tm_set_play(False, w, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif w is None:
w1 = check_lowest_value_card_of_suit(player_teammate_hand, a.suit)
player_tm_set_play(False, w1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif have_suits(player_teammate_hand, a.suit) is False:
if len(player_teammate_trumps) > 0:
w2 = check_lowest_value_card(player_teammate_trumps)
player_tm_set_play(True, w2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(player_teammate_trumps) == 0:
w3 = check_lowest_value_card(player_teammate_hand)
player_tm_set_play(False, w3, x_limit, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
elif a.suit != b.suit != c.suit:
if have_suits(player_teammate_hand, a.suit):
d12 = check_least_high_card(a, player_teammate_hand)
if d12 is not None:
player_tm_set_play(False, d12, x_limit, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
elif d12 is None:
d13 = check_lowest_value_card_of_suit(player_teammate_hand, a.suit)
player_tm_set_play(False, d13, x_limit, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
elif have_suits(player_teammate_hand, a.suit) is False:
if len(player_teammate_trumps) > 0:
d14 = check_lowest_value_card(player_teammate_trumps)
player_tm_set_play(True, d14, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(player_teammate_trumps) == 0:
d15 = check_lowest_value_card(player_teammate_hand)
player_tm_set_play(False, d15, x_limit, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
if a.suit != trump and b.suit != trump and c.suit == trump:
if have_suits(player_teammate_hand, a.suit):
e = check_lowest_value_card_of_suit(player_teammate_hand, a.suit)
player_tm_set_play(False, e, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif have_suits(player_teammate_hand, a.suit) is False:
if len(player_teammate_trumps) > 0:
e1 = check_least_high_card(c, player_teammate_trumps)
if e1 is not None:
player_tm_set_play(True, e1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif e1 is None:
e2 = check_lowest_value_card(player_teammate_hand)
player_tm_set_play(False, e2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(player_teammate_trumps) == 0:
e3 = check_lowest_value_card(player_teammate_hand)
player_tm_set_play(False, e3, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if a.suit != trump and b.suit == trump and c.suit != trump:
if have_suits(player_teammate_hand, a.suit):
f = check_lowest_value_card_of_suit(player_teammate_hand, a.suit)
player_tm_set_play(False, f, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif have_suits(player_teammate_hand, a.suit) is False:
if len(player_teammate_trumps) > 0:
f1 = check_least_high_card(b, player_teammate_trumps)
if f1 is not None:
player_tm_set_play(True, f1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif f1 is None:
f2 = check_lowest_value_card(player_teammate_trumps)
player_tm_set_play(True, f2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(player_teammate_trumps) == 0:
f3 = check_lowest_value_card(player_teammate_hand)
player_tm_set_play(False, f3, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if a.suit == trump and b.suit != trump and c.suit != trump:
if len(player_teammate_trumps) > 0:
g = check_least_high_card(a, player_teammate_trumps)
if g is not None:
player_tm_set_play(True, g, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif g is None:
g1 = check_lowest_value_card(player_teammate_trumps)
player_tm_set_play(True, g1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(player_teammate_trumps) == 0:
g2 = check_lowest_value_card(player_teammate_hand)
player_tm_set_play(False, g2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if a.suit == trump and b.suit == trump and c.suit != trump:
if len(player_teammate_trumps) > 0:
h = check_least_high_card(compare_cards(a, b), player_teammate_trumps)
if h is not None:
player_tm_set_play(True, h, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if h is None:
h1 = check_lowest_value_card(player_teammate_trumps)
player_tm_set_play(True, h1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(player_teammate_trumps) == 0:
h2 = check_lowest_value_card(player_teammate_hand)
player_tm_set_play(False, h2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if a.suit == trump and b.suit != trump and c.suit == trump:
if len(player_teammate_trumps) > 0:
i = check_least_high_card(compare_cards(a, c), player_teammate_trumps)
if i is not None:
player_tm_set_play(True, i, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if i is None:
i1 = check_lowest_value_card(player_teammate_trumps)
player_tm_set_play(True, i1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(player_teammate_trumps) == 0:
i2 = check_lowest_value_card(player_teammate_hand)
player_tm_set_play(False, i2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if a.suit != trump and b.suit == trump and c.suit == trump:
if len(player_teammate_trumps) > 0:
j = check_least_high_card(compare_cards(b, c), player_teammate_trumps)
if j is not None:
player_tm_set_play(True, j, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if j is None:
j1 = check_lowest_value_card(player_teammate_trumps)
player_tm_set_play(True, j1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(player_teammate_trumps) == 0:
j2 = check_lowest_value_card(player_teammate_hand)
player_tm_set_play(False, j2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if a.suit == trump and b.suit == trump and c.suit == trump:
if len(player_teammate_trumps) > 0:
k = check_least_high_card(compare_cards(compare_cards(a, b), c), player_teammate_trumps)
if k is not None:
player_tm_set_play(True, k, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if k is None:
k1 = check_lowest_value_card(player_teammate_trumps)
player_tm_set_play(True, k1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(player_teammate_trumps) == 0:
k2 = check_lowest_value_card(player_teammate_hand)
player_tm_set_play(False, k2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif pt_pos == rnd and c_pos == rnd and p_pos == rnd + 1 and ct_pos == rnd + 1:
s = player_played_hands[rnd]
h = comp_teammate_played_hands[rnd]
if s.suit != trump and h.suit != trump:
if s.suit == h.suit:
if have_suits(player_teammate_hand, s.suit):
m = check_least_high_card(compare_cards(s, h), player_teammate_hand)
if m is not None:
player_tm_set_play(False, m, x_limit, cards + 1, cards, cards, card_pos + 20, card_pos,
ply_tm_card_pos)
elif m is None:
m1 = check_lowest_value_card_of_suit(player_teammate_hand, s.suit)
player_tm_set_play(False, m1, x_limit, cards + 1, cards, cards, card_pos + 20, card_pos,
ply_tm_card_pos)
elif have_suits(player_teammate_hand, s.suit) is False:
if len(player_teammate_trumps) > 0:
m2 = check_lowest_value_card(player_teammate_trumps)
player_tm_set_play(True, m2, x_limit, cards + 1, cards, cards, card_pos + 20, card_pos,
ply_tm_card_pos)
elif len(player_teammate_trumps) == 0:
m3 = check_lowest_value_card(player_teammate_hand)
player_tm_set_play(False, m3, x_limit, cards + 1, cards, cards, card_pos + 20, card_pos,
ply_tm_card_pos)
elif s.suit != h.suit:
if have_suits(player_teammate_hand, s.suit):
m4 = check_least_high_card(s, player_teammate_hand)
if m4 is not None:
player_tm_set_play(False, m4, x_limit, cards + 1, cards, cards, card_pos + 20, card_pos,
ply_tm_card_pos)
if m4 is None:
m5 = check_lowest_value_card_of_suit(player_teammate_hand, s.suit)
player_tm_set_play(False, m5, x_limit, cards + 1, cards, cards, card_pos + 20, card_pos,
ply_tm_card_pos)
elif have_suits(player_teammate_hand, s.suit) is False:
if len(player_teammate_trumps) > 0:
m6 = check_lowest_value_card(player_teammate_trumps)
player_tm_set_play(True, m6, x_limit, cards + 1, cards, cards, card_pos + 20, card_pos,
ply_tm_card_pos)
elif len(player_teammate_trumps) == 0:
m7 = check_lowest_value_card(player_teammate_hand)
player_tm_set_play(False, m7, x_limit, cards + 1, cards, cards, card_pos + 20, card_pos,
ply_tm_card_pos)
if s.suit != trump and h.suit == trump:
if have_suits(player_teammate_hand, s.suit):
n = check_lowest_value_card_of_suit(player_teammate_hand, s.suit)
player_tm_set_play(False, n, x_limit, cards + 1, cards, cards, card_pos + 20, card_pos, ply_tm_card_pos)
elif have_suits(player_teammate_hand, s.suit) is False:
if len(player_teammate_trumps) > 0:
n1 = check_least_high_card(h, player_teammate_trumps)
if n1 is not None:
player_tm_set_play(True, n1, x_limit, cards + 1, cards, cards, card_pos + 20, card_pos,
ply_tm_card_pos)
if n1 is None:
n2 = check_lowest_value_card(player_teammate_trumps)
player_tm_set_play(True, n2, x_limit, cards + 1, cards, cards, card_pos + 20, card_pos,
ply_tm_card_pos)
elif len(player_teammate_trumps) == 0:
n3 = check_lowest_value_card(player_teammate_hand)
player_tm_set_play(False, n3, x_limit, cards + 1, cards, cards, card_pos + 20, card_pos,
ply_tm_card_pos)
if s.suit == trump and h.suit != trump:
if len(player_teammate_trumps) > 0:
o = check_least_high_card(s, player_teammate_trumps)
if o is not None:
player_tm_set_play(True, o, x_limit, cards + 1, cards, cards, card_pos + 20, card_pos,
ply_tm_card_pos)
elif o is None:
o1 = check_lowest_value_card(player_teammate_trumps)
player_tm_set_play(True, o1, x_limit, cards + 1, cards, cards, card_pos + 20, card_pos,
ply_tm_card_pos)
elif len(player_teammate_trumps) == 0:
o2 = check_lowest_value_card(player_teammate_hand)
player_tm_set_play(False, o2, x_limit, cards + 1, cards, cards, card_pos + 20, card_pos,
ply_tm_card_pos)
if s.suit == trump and h.suit == trump:
if len(player_teammate_trumps) > 0:
p = check_least_high_card(compare_cards(s, h), player_teammate_trumps)
if p is not None:
player_tm_set_play(True, p, x_limit, cards + 1, cards, cards, card_pos + 20, card_pos,
ply_tm_card_pos)
elif p is None:
p1 = check_lowest_value_card(player_teammate_trumps)
player_tm_set_play(True, p1, x_limit, cards + 1, cards, cards, card_pos + 20, card_pos,
ply_tm_card_pos)
elif len(player_teammate_trumps) == 0:
p2 = check_lowest_value_card(player_teammate_hand)
player_tm_set_play(False, p2, x_limit, cards + 1, cards, cards, card_pos + 20, card_pos,
ply_tm_card_pos)
elif pt_pos == rnd and c_pos == rnd and p_pos == rnd and ct_pos == rnd + 1:
s = comp_teammate_played_hands[rnd]
if s.suit != trump:
if have_suits(player_teammate_hand, s.suit):
t = check_least_high_card(s, player_teammate_hand)
if t is not None:
player_tm_set_play(False, t, x_limit + 75, cards + 1, cards, cards, card_pos + 20, card_pos,
ply_tm_card_pos)
elif t is None:
t1 = check_lowest_value_card_of_suit(player_teammate_hand, s.suit)
player_tm_set_play(False, t1, x_limit + 75, cards + 1, cards, cards, card_pos + 20, card_pos,
ply_tm_card_pos)
elif have_suits(player_teammate_hand, s.suit) is False:
if len(player_teammate_trumps) > 0:
u = check_lowest_value_card(player_teammate_trumps)
player_tm_set_play(True, u, x_limit + 75, cards + 1, cards, cards, card_pos + 20, card_pos,
ply_tm_card_pos)
elif len(player_teammate_trumps) == 0:
v = check_lowest_value_card(player_teammate_hand)
player_tm_set_play(False, v, x_limit + 75, cards + 1, cards, cards, card_pos + 20, card_pos,
ply_tm_card_pos)
elif s.suit == trump:
if len(player_teammate_trumps) > 0:
w = check_least_high_card(s, player_teammate_trumps)
if w is not None:
player_tm_set_play(True, w, x_limit + 75, cards + 1, cards, cards, card_pos + 20, card_pos,
ply_tm_card_pos)
if w is None:
x = check_lowest_value_card(player_teammate_trumps)
player_tm_set_play(True, x, x_limit + 75, cards + 1, cards, cards, card_pos + 20, card_pos,
ply_tm_card_pos)
elif len(player_teammate_trumps) == 0:
y = check_lowest_value_card(player_teammate_hand)
player_tm_set_play(False, y, x_limit + 75, cards + 1, cards, cards, card_pos + 20, card_pos,
ply_tm_card_pos)
elif pt_pos == rnd and c_pos == rnd and p_pos == rnd and ct_pos == rnd:
if len(player_teammate_hand) == 0:
kk_1 = random.choice(player_teammate_trumps)
player_tm_set_play(True, kk_1, x_limit + 75, cards + 1, cards + 1, cards, card_pos + 20, card_pos + 20,
ply_tm_card_pos)
if len(player_teammate_hand) > 0:
k = random.choice(player_teammate_hand)
player_tm_set_play(False, k, x_limit + 75, cards + 1, cards + 1, cards, card_pos + 20, card_pos + 20,
ply_tm_card_pos)
def comp_play(first_play, rnd, ply_tm_card_pos):
global comp_round_done
x_limit = 920 - (75 * rnd)
cards = 13 - rnd
card_pos = 390 - (20 * rnd)
rnd -= 1
c_pos = len(comp_played_hands)
p_pos = len(player_played_hands)
ct_pos = len(comp_teammate_played_hands)
pt_pos = len(player_teammate_played_hands)
if first_play:
clear_all_decks()
if len(computer_hand) == 0:
kk_1 = random.choice(comp_trumps)
comp_set_play(True, kk_1, x_limit + 75, cards, cards + 1, cards + 1, card_pos, card_pos + 20,
ply_tm_card_pos)
if len(computer_hand) > 0:
k = random.choice(computer_hand)
comp_set_play(False, k, x_limit + 75, cards, cards + 1, cards + 1, card_pos, card_pos + 20, ply_tm_card_pos)
elif not first_play:
if c_pos == rnd and p_pos == rnd + 1 and ct_pos == rnd + 1 and pt_pos == rnd + 1:
a = player_played_hands[rnd]
b = comp_teammate_played_hands[rnd]
c = player_teammate_played_hands[rnd]
if a.suit != trump and b.suit != trump and c.suit != trump:
if a.suit == b.suit == c.suit:
if have_suits(computer_hand, a.suit):
d = compare_cards(compare_cards(a, b), c)
d0 = check_least_high_card(d, computer_hand)
if d0 is not None:
comp_set_play(False, d0, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif d0 is None:
d1 = check_lowest_value_card_of_suit(computer_hand, a.suit)
comp_set_play(False, d1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif have_suits(computer_hand, a.suit) is False:
if len(comp_trumps) > 0:
d2 = check_lowest_value_card(comp_trumps)
comp_set_play(True, d2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(comp_trumps) == 0:
d3 = check_lowest_value_card(computer_hand)
comp_set_play(False, d3, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif a.suit == b.suit and b.suit != c.suit:
if have_suits(computer_hand, a.suit):
d4 = check_least_high_card(compare_cards(a, b), computer_hand)
if d4 is not None:
comp_set_play(False, d4, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif d4 is None:
d5 = check_lowest_value_card_of_suit(computer_hand, a.suit)
comp_set_play(False, d5, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif have_suits(computer_hand, a.suit) is False:
if len(comp_trumps) > 0:
d6 = check_lowest_value_card(comp_trumps)
comp_set_play(True, d6, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(comp_trumps) == 0:
d7 = check_lowest_value_card(computer_hand)
comp_set_play(False, d7, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif a.suit == c.suit and c.suit != b.suit:
if have_suits(computer_hand, a.suit):
d8 = check_least_high_card(compare_cards(a, c), computer_hand)
if d8 is not None:
comp_set_play(False, d8, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif d8 is None:
d9 = check_lowest_value_card_of_suit(computer_hand, a.suit)
comp_set_play(False, d9, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif have_suits(computer_hand, a.suit) is False:
if len(comp_trumps) > 0:
d10 = check_lowest_value_card(comp_trumps)
comp_set_play(True, d10, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(comp_trumps) == 0:
d11 = check_lowest_value_card(computer_hand)
comp_set_play(False, d11, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif a.suit != b.suit and b.suit == c.suit:
if have_suits(computer_hand, a.suit):
m = check_least_high_card(a, computer_hand)
if m is not None:
comp_set_play(False, m, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif m is None:
m1 = check_lowest_value_card_of_suit(computer_hand, a.suit)
comp_set_play(False, m1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif have_suits(computer_hand, a.suit) is False:
if len(comp_trumps) > 0:
m2 = check_lowest_value_card(comp_trumps)
comp_set_play(True, m2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(comp_trumps) == 0:
m3 = check_lowest_value_card(computer_hand)
comp_set_play(False, m3, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif a.suit != b.suit != c.suit:
if have_suits(computer_hand, a.suit):
d12 = check_least_high_card(a, computer_hand)
if d12 is not None:
comp_set_play(False, d12, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif d12 is None:
d13 = check_lowest_value_card_of_suit(computer_hand, a.suit)
comp_set_play(False, d13, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif have_suits(computer_hand, a.suit) is False:
if len(comp_trumps) > 0:
d14 = check_lowest_value_card(comp_trumps)
comp_set_play(True, d14, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(comp_trumps) == 0:
d15 = check_lowest_value_card(computer_hand)
comp_set_play(False, d15, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if a.suit != trump and b.suit != trump and c.suit == trump:
if have_suits(computer_hand, a.suit):
e = check_lowest_value_card_of_suit(computer_hand, a.suit)
comp_set_play(False, e, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif have_suits(computer_hand, a.suit) is False:
if len(comp_trumps) > 0:
e1 = check_least_high_card(c, comp_trumps)
if e1 is not None:
comp_set_play(True, e1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif e1 is None:
e2 = check_lowest_value_card(computer_hand)
comp_set_play(False, e2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(comp_trumps) == 0:
e3 = check_lowest_value_card(computer_hand)
comp_set_play(False, e3, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if a.suit != trump and b.suit == trump and c.suit != trump:
if have_suits(computer_hand, a.suit):
f = check_lowest_value_card_of_suit(computer_hand, a.suit)
comp_set_play(False, f, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif have_suits(computer_hand, a.suit) is False:
if len(comp_trumps) > 0:
f1 = check_least_high_card(b, comp_trumps)
if f1 is not None:
comp_set_play(True, f1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif f1 is None:
f2 = check_lowest_value_card(comp_trumps)
comp_set_play(True, f2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(comp_trumps) == 0:
f3 = check_lowest_value_card(computer_hand)
comp_set_play(False, f3, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if a.suit == trump and b.suit != trump and c.suit != trump:
if len(comp_trumps) > 0:
g = check_least_high_card(a, comp_trumps)
if g is not None:
comp_set_play(True, g, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif g is None:
g1 = check_lowest_value_card(comp_trumps)
comp_set_play(True, g1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(comp_trumps) == 0:
g2 = check_lowest_value_card(computer_hand)
comp_set_play(False, g2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if a.suit == trump and b.suit == trump and c.suit != trump:
if len(comp_trumps) > 0:
h = check_least_high_card(compare_cards(a, b), comp_trumps)
if h is not None:
comp_set_play(True, h, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if h is None:
h1 = check_lowest_value_card(comp_trumps)
comp_set_play(True, h1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(comp_trumps) == 0:
h2 = check_lowest_value_card(computer_hand)
comp_set_play(False, h2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if a.suit == trump and b.suit != trump and c.suit == trump:
if len(comp_trumps) > 0:
i = check_least_high_card(compare_cards(a, c), comp_trumps)
if i is not None:
comp_set_play(True, i, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if i is None:
i1 = check_lowest_value_card(comp_trumps)
comp_set_play(True, i1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(comp_trumps) == 0:
i2 = check_lowest_value_card(computer_hand)
comp_set_play(False, i2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if a.suit != trump and b.suit == trump and c.suit == trump:
if len(comp_trumps) > 0:
j = check_least_high_card(compare_cards(b, c), comp_trumps)
if j is not None:
comp_set_play(True, j, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if j is None:
j1 = check_lowest_value_card(comp_trumps)
comp_set_play(True, j1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(comp_trumps) == 0:
j2 = check_lowest_value_card(computer_hand)
comp_set_play(False, j2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if a.suit == trump and b.suit == trump and c.suit == trump:
if len(comp_trumps) > 0:
k = check_least_high_card(compare_cards(compare_cards(a, b), c), comp_trumps)
if k is not None:
comp_set_play(True, k, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
if k is None:
k1 = check_lowest_value_card(comp_trumps)
comp_set_play(True, k1, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif len(comp_trumps) == 0:
k2 = check_lowest_value_card(computer_hand)
comp_set_play(False, k2, x_limit, cards, cards, cards, card_pos, card_pos, ply_tm_card_pos)
elif c_pos == rnd and p_pos == rnd and ct_pos == rnd + 1 and pt_pos == rnd + 1:
s = comp_teammate_played_hands[rnd]
h = player_teammate_played_hands[rnd]
if s.suit != trump and h.suit != trump:
if s.suit == h.suit:
if have_suits(computer_hand, s.suit):
m = check_least_high_card(compare_cards(s, h), computer_hand)
if m is not None:
comp_set_play(False, m, x_limit + 75, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
elif m is None:
m1 = check_lowest_value_card_of_suit(computer_hand, s.suit)
comp_set_play(False, m1, x_limit + 75, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
elif have_suits(computer_hand, s.suit) is False:
if len(comp_trumps) > 0:
m2 = check_lowest_value_card(comp_trumps)
comp_set_play(True, m2, x_limit + 75, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
elif len(player_teammate_trumps) == 0:
m3 = check_lowest_value_card(computer_hand)
comp_set_play(False, m3, x_limit + 75, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
elif s.suit != h.suit:
if have_suits(computer_hand, s.suit):
m4 = check_least_high_card(s, computer_hand)
if m4 is not None:
comp_set_play(False, m4, x_limit + 75, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
if m4 is None:
m5 = check_lowest_value_card_of_suit(computer_hand, s.suit)
comp_set_play(False, m5, x_limit + 75, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
elif have_suits(computer_hand, s.suit) is False:
if len(comp_trumps) > 0:
m6 = check_lowest_value_card(comp_trumps)
comp_set_play(True, m6, x_limit + 75, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
elif len(comp_trumps) == 0:
m7 = check_lowest_value_card(computer_hand)
comp_set_play(False, m7, x_limit + 75, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
if s.suit != trump and h.suit == trump:
if have_suits(computer_hand, s.suit):
n = check_lowest_value_card_of_suit(computer_hand, s.suit)
comp_set_play(False, n, x_limit + 75, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
elif have_suits(computer_hand, s.suit) is False:
if len(comp_trumps) > 0:
n1 = check_least_high_card(h, comp_trumps)
if n1 is not None:
comp_set_play(True, n1, x_limit + 75, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
if n1 is None:
n2 = check_lowest_value_card(comp_trumps)
comp_set_play(True, n2, x_limit + 75, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
elif len(comp_trumps) == 0:
n3 = check_lowest_value_card(computer_hand)
comp_set_play(False, n3, x_limit + 75, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
if s.suit == trump and h.suit != trump:
if len(comp_trumps) > 0:
o = check_least_high_card(s, comp_trumps)
if o is not None:
comp_set_play(True, o, x_limit + 75, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
elif o is None:
o1 = check_lowest_value_card(comp_trumps)
comp_set_play(True, o1, x_limit + 75, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
elif len(comp_trumps) == 0:
o2 = check_lowest_value_card(computer_hand)
comp_set_play(False, o2, x_limit + 75, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
if s.suit == trump and h.suit == trump:
if len(comp_trumps) > 0:
p = check_least_high_card(compare_cards(s, h), comp_trumps)
if p is not None:
comp_set_play(True, p, x_limit + 75, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
elif p is None:
p1 = check_lowest_value_card(comp_trumps)
comp_set_play(True, p1, x_limit + 75, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
elif len(comp_trumps) == 0:
p2 = check_lowest_value_card(computer_hand)
comp_set_play(False, p2, x_limit + 75, cards, cards, cards, card_pos, card_pos,
ply_tm_card_pos)
elif c_pos == rnd and p_pos == rnd and ct_pos == rnd and pt_pos == rnd + 1:
s = player_teammate_played_hands[rnd]
if s.suit != trump:
if have_suits(computer_hand, s.suit):
t = check_least_high_card(s, computer_hand)
if t is not None:
comp_set_play(False, t, x_limit + 75, cards, cards + 1, cards, card_pos, card_pos + 20,
ply_tm_card_pos)
elif t is None:
t1 = check_lowest_value_card_of_suit(computer_hand, s.suit)
comp_set_play(False, t1, x_limit + 75, cards, cards + 1, cards, card_pos, card_pos + 20,
ply_tm_card_pos)
elif have_suits(computer_hand, s.suit) is False:
if len(comp_trumps) > 0:
u = check_lowest_value_card(comp_trumps)
comp_set_play(True, u, x_limit + 75, cards, cards + 1, cards, card_pos, card_pos + 20,
ply_tm_card_pos)
elif len(comp_trumps) == 0:
v = check_lowest_value_card(computer_hand)
comp_set_play(False, v, x_limit + 75, cards, cards + 1, cards, card_pos, card_pos + 20,
ply_tm_card_pos)
elif s.suit == trump:
if len(comp_trumps) > 0:
w = check_least_high_card(s, comp_trumps)
if w is not None:
comp_set_play(True, w, x_limit + 75, cards, cards + 1, cards, card_pos, card_pos + 20,
ply_tm_card_pos)
if w is None:
x = check_lowest_value_card(comp_trumps)
comp_set_play(True, x, x_limit + 75, cards, cards + 1, cards, card_pos, card_pos + 20,
ply_tm_card_pos)
elif len(comp_trumps) == 0:
y = check_lowest_value_card(computer_hand)
comp_set_play(False, y, x_limit + 75, cards, cards + 1, cards, card_pos, card_pos + 20,
ply_tm_card_pos)
elif c_pos == rnd and p_pos == rnd and ct_pos == rnd and pt_pos == rnd:
if len(computer_hand) == 0:
kk_1 = random.choice(comp_trumps)
comp_set_play(True, kk_1, x_limit + 75, cards, cards + 1, cards + 1, card_pos, card_pos + 20,
ply_tm_card_pos)
if len(computer_hand) > 0:
k = random.choice(computer_hand)
comp_set_play(False, k, x_limit + 75, cards, cards + 1, cards + 1, card_pos, card_pos + 20,
ply_tm_card_pos)
def check_won_hand(rnd):
global comp_won_hands, player_won_hands, comp_tm_won_hands, player_tm_won_hands
rnd -= 1
compare_list = []
a = comp_played_hands[rnd]
compare_list.append(a)
b = player_played_hands[rnd]
compare_list.append(b)
c = comp_teammate_played_hands[rnd]
compare_list.append(c)
d = player_teammate_played_hands[rnd]
compare_list.append(d)
if (comp_trump_selected and winner is None)or winner == 'comp':
if a.suit != trump and b.suit != trump and c.suit != trump and d.suit != trump:
e = check_highest_value_card_of_suit(compare_list, a.suit)
if e == a:
comp_won_hands += 1
return 'comp'
elif e == b:
player_won_hands += 1
return 'player'
elif e == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif e == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit != trump and b.suit != trump and c.suit != trump and d.suit == trump:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit != trump and b.suit != trump and c.suit == trump and d.suit != trump:
comp_tm_won_hands += 1
return 'comp_tm'
elif a.suit != trump and b.suit == trump and c.suit != trump and d.suit != trump:
player_won_hands += 1
return 'player'
elif a.suit == trump and b.suit != trump and c.suit != trump and d.suit != trump:
comp_won_hands += 1
return 'comp'
elif a.suit == trump and b.suit == trump and c.suit != trump and d.suit != trump:
e1 = compare_cards(a, b)
if e1 == a:
comp_won_hands += 1
return 'comp'
elif e1 == b:
player_won_hands += 1
return 'player'
elif a.suit == trump and b.suit != trump and c.suit == trump and d.suit != trump:
e2 = compare_cards(a, c)
if e2 == a:
comp_won_hands += 1
return 'comp'
elif e2 == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif a.suit == trump and b.suit != trump and c.suit != trump and d.suit == trump:
e3 = compare_cards(a, d)
if e3 == a:
comp_won_hands += 1
return 'comp'
elif e3 == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit != trump and b.suit == trump and c.suit == trump and d.suit != trump:
e4 = compare_cards(b, c)
if e4 == b:
player_won_hands += 1
return 'player'
elif e4 == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif a.suit != trump and b.suit == trump and c.suit != trump and d.suit == trump:
e5 = compare_cards(b, d)
if e5 == b:
player_won_hands += 1
return 'player'
elif e5 == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit != trump and b.suit != trump and c.suit == trump and d.suit == trump:
e6 = compare_cards(c, d)
if e6 == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif e6 == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit == trump and b.suit == trump and c.suit == trump and d.suit != trump:
f = compare_cards(compare_cards(a, b), c)
if f == a:
comp_won_hands += 1
return 'comp'
elif f == b:
player_won_hands += 1
return 'player'
elif f == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif a.suit == trump and b.suit == trump and c.suit != trump and d.suit == trump:
f1 = compare_cards(compare_cards(a, b), d)
if f1 == a:
comp_won_hands += 1
return 'comp'
elif f1 == b:
player_won_hands += 1
return 'player'
elif f1 == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit == trump and b.suit != trump and c.suit == trump and d.suit == trump:
f2 = compare_cards(compare_cards(a, c), d)
if f2 == a:
comp_won_hands += 1
return 'comp'
elif f2 == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif f2 == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit != trump and b.suit == trump and c.suit == trump and d.suit == trump:
f3 = compare_cards(compare_cards(b, c), d)
if f3 == b:
player_won_hands += 1
return 'player'
elif f3 == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif f3 == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit == trump and b.suit == trump and c.suit == trump and d.suit == trump:
f4 = check_highest_value_card(compare_list)
if f4 == a:
comp_won_hands += 1
return 'comp'
elif f4 == b:
player_won_hands += 1
return 'player'
elif f4 == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif f4 == d:
player_tm_won_hands += 1
return 'player_tm'
elif (player_trump_selected and winner is None) or winner == 'player':
if a.suit != trump and b.suit != trump and c.suit != trump and d.suit != trump:
e = check_highest_value_card_of_suit(compare_list, b.suit)
if e == a:
comp_won_hands += 1
return 'comp'
elif e == b:
player_won_hands += 1
return 'player'
elif e == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif e == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit != trump and b.suit != trump and c.suit != trump and d.suit == trump:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit != trump and b.suit != trump and c.suit == trump and d.suit != trump:
comp_tm_won_hands += 1
return 'comp_tm'
elif a.suit != trump and b.suit == trump and c.suit != trump and d.suit != trump:
player_won_hands += 1
return 'player'
elif a.suit == trump and b.suit != trump and c.suit != trump and d.suit != trump:
comp_won_hands += 1
return 'comp'
elif a.suit == trump and b.suit == trump and c.suit != trump and d.suit != trump:
e1 = compare_cards(a, b)
if e1 == a:
comp_won_hands += 1
return 'comp'
elif e1 == b:
player_won_hands += 1
return 'player'
elif a.suit == trump and b.suit != trump and c.suit == trump and d.suit != trump:
e2 = compare_cards(a, c)
if e2 == a:
comp_won_hands += 1
return 'comp'
elif e2 == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif a.suit == trump and b.suit != trump and c.suit != trump and d.suit == trump:
e3 = compare_cards(a, d)
if e3 == a:
comp_won_hands += 1
return 'comp'
elif e3 == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit != trump and b.suit == trump and c.suit == trump and d.suit != trump:
e4 = compare_cards(b, c)
if e4 == b:
player_won_hands += 1
return 'player'
elif e4 == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif a.suit != trump and b.suit == trump and c.suit != trump and d.suit == trump:
e5 = compare_cards(b, d)
if e5 == b:
player_won_hands += 1
return 'player'
elif e5 == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit != trump and b.suit != trump and c.suit == trump and d.suit == trump:
e6 = compare_cards(c, d)
if e6 == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif e6 == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit == trump and b.suit == trump and c.suit == trump and d.suit != trump:
f = compare_cards(compare_cards(a, b), c)
if f == a:
comp_won_hands += 1
return 'comp'
elif f == b:
player_won_hands += 1
return 'player'
elif f == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif a.suit == trump and b.suit == trump and c.suit != trump and d.suit == trump:
f1 = compare_cards(compare_cards(a, b), d)
if f1 == a:
comp_won_hands += 1
return 'comp'
elif f1 == b:
player_won_hands += 1
return 'player'
elif f1 == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit == trump and b.suit != trump and c.suit == trump and d.suit == trump:
f2 = compare_cards(compare_cards(a, c), d)
if f2 == a:
comp_won_hands += 1
return 'comp'
elif f2 == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif f2 == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit != trump and b.suit == trump and c.suit == trump and d.suit == trump:
f3 = compare_cards(compare_cards(b, c), d)
if f3 == b:
player_won_hands += 1
return 'player'
elif f3 == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif f3 == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit == trump and b.suit == trump and c.suit == trump and d.suit == trump:
f4 = check_highest_value_card(compare_list)
if f4 == a:
comp_won_hands += 1
return 'comp'
elif f4 == b:
player_won_hands += 1
return 'player'
elif f4 == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif f4 == d:
player_tm_won_hands += 1
return 'player_tm'
elif (player_trump_selected or comp_trump_selected) and winner == 'comp_tm':
if a.suit != trump and b.suit != trump and c.suit != trump and d.suit != trump:
e = check_highest_value_card_of_suit(compare_list, c.suit)
if e == a:
comp_won_hands += 1
return 'comp'
elif e == b:
player_won_hands += 1
return 'player'
elif e == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif e == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit != trump and b.suit != trump and c.suit != trump and d.suit == trump:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit != trump and b.suit != trump and c.suit == trump and d.suit != trump:
comp_tm_won_hands += 1
return 'comp_tm'
elif a.suit != trump and b.suit == trump and c.suit != trump and d.suit != trump:
player_won_hands += 1
return 'player'
elif a.suit == trump and b.suit != trump and c.suit != trump and d.suit != trump:
comp_won_hands += 1
return 'comp'
elif a.suit == trump and b.suit == trump and c.suit != trump and d.suit != trump:
e1 = compare_cards(a, b)
if e1 == a:
comp_won_hands += 1
return 'comp'
elif e1 == b:
player_won_hands += 1
return 'player'
elif a.suit == trump and b.suit != trump and c.suit == trump and d.suit != trump:
e2 = compare_cards(a, c)
if e2 == a:
comp_won_hands += 1
return 'comp'
elif e2 == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif a.suit == trump and b.suit != trump and c.suit != trump and d.suit == trump:
e3 = compare_cards(a, d)
if e3 == a:
comp_won_hands += 1
return 'comp'
elif e3 == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit != trump and b.suit == trump and c.suit == trump and d.suit != trump:
e4 = compare_cards(b, c)
if e4 == b:
player_won_hands += 1
return 'player'
elif e4 == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif a.suit != trump and b.suit == trump and c.suit != trump and d.suit == trump:
e5 = compare_cards(b, d)
if e5 == b:
player_won_hands += 1
return 'player'
elif e5 == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit != trump and b.suit != trump and c.suit == trump and d.suit == trump:
e6 = compare_cards(c, d)
if e6 == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif e6 == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit == trump and b.suit == trump and c.suit == trump and d.suit != trump:
f = compare_cards(compare_cards(a, b), c)
if f == a:
comp_won_hands += 1
return 'comp'
elif f == b:
player_won_hands += 1
return 'player'
elif f == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif a.suit == trump and b.suit == trump and c.suit != trump and d.suit == trump:
f1 = compare_cards(compare_cards(a, b), d)
if f1 == a:
comp_won_hands += 1
return 'comp'
elif f1 == b:
player_won_hands += 1
return 'player'
elif f1 == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit == trump and b.suit != trump and c.suit == trump and d.suit == trump:
f2 = compare_cards(compare_cards(a, c), d)
if f2 == a:
comp_won_hands += 1
return 'comp'
elif f2 == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif f2 == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit != trump and b.suit == trump and c.suit == trump and d.suit == trump:
f3 = compare_cards(compare_cards(b, c), d)
if f3 == b:
player_won_hands += 1
return 'player'
elif f3 == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif f3 == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit == trump and b.suit == trump and c.suit == trump and d.suit == trump:
f4 = check_highest_value_card(compare_list)
if f4 == a:
comp_won_hands += 1
return 'comp'
elif f4 == b:
player_won_hands += 1
return 'player'
elif f4 == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif f4 == d:
player_tm_won_hands += 1
return 'player_tm'
elif (player_trump_selected or comp_trump_selected) and winner == 'player_tm':
if a.suit != trump and b.suit != trump and c.suit != trump and d.suit != trump:
e = check_highest_value_card_of_suit(compare_list, d.suit)
if e == a:
comp_won_hands += 1
return 'comp'
elif e == b:
player_won_hands += 1
return 'player'
elif e == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif e == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit != trump and b.suit != trump and c.suit != trump and d.suit == trump:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit != trump and b.suit != trump and c.suit == trump and d.suit != trump:
comp_tm_won_hands += 1
return 'comp_tm'
elif a.suit != trump and b.suit == trump and c.suit != trump and d.suit != trump:
player_won_hands += 1
return 'player'
elif a.suit == trump and b.suit != trump and c.suit != trump and d.suit != trump:
comp_won_hands += 1
return 'comp'
elif a.suit == trump and b.suit == trump and c.suit != trump and d.suit != trump:
e1 = compare_cards(a, b)
if e1 == a:
comp_won_hands += 1
return 'comp'
elif e1 == b:
player_won_hands += 1
return 'player'
elif a.suit == trump and b.suit != trump and c.suit == trump and d.suit != trump:
e2 = compare_cards(a, c)
if e2 == a:
comp_won_hands += 1
return 'comp'
elif e2 == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif a.suit == trump and b.suit != trump and c.suit != trump and d.suit == trump:
e3 = compare_cards(a, d)
if e3 == a:
comp_won_hands += 1
return 'comp'
elif e3 == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit != trump and b.suit == trump and c.suit == trump and d.suit != trump:
e4 = compare_cards(b, c)
if e4 == b:
player_won_hands += 1
return 'player'
elif e4 == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif a.suit != trump and b.suit == trump and c.suit != trump and d.suit == trump:
e5 = compare_cards(b, d)
if e5 == b:
player_won_hands += 1
return 'player'
elif e5 == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit != trump and b.suit != trump and c.suit == trump and d.suit == trump:
e6 = compare_cards(c, d)
if e6 == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif e6 == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit == trump and b.suit == trump and c.suit == trump and d.suit != trump:
f = compare_cards(compare_cards(a, b), c)
if f == a:
comp_won_hands += 1
return 'comp'
elif f == b:
player_won_hands += 1
return 'player'
elif f == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif a.suit == trump and b.suit == trump and c.suit != trump and d.suit == trump:
f1 = compare_cards(compare_cards(a, b), d)
if f1 == a:
comp_won_hands += 1
return 'comp'
elif f1 == b:
player_won_hands += 1
return 'player'
elif f1 == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit == trump and b.suit != trump and c.suit == trump and d.suit == trump:
f2 = compare_cards(compare_cards(a, c), d)
if f2 == a:
comp_won_hands += 1
return 'comp'
elif f2 == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif f2 == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit != trump and b.suit == trump and c.suit == trump and d.suit == trump:
f3 = compare_cards(compare_cards(b, c), d)
if f3 == b:
player_won_hands += 1
return 'player'
elif f3 == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif f3 == d:
player_tm_won_hands += 1
return 'player_tm'
elif a.suit == trump and b.suit == trump and c.suit == trump and d.suit == trump:
f4 = check_highest_value_card(compare_list)
if f4 == a:
comp_won_hands += 1
return 'comp'
elif f4 == b:
player_won_hands += 1
return 'player'
elif f4 == c:
comp_tm_won_hands += 1
return 'comp_tm'
elif f4 == d:
player_tm_won_hands += 1
return 'player_tm'
def comp_win_cords():
global x1, y1
x1 += 80
if x1 > 330:
x1 = 90
y1 = 325
elif x1 > 330 and y1 == 315:
x1 = 90
y1 = 395
elif x1 > 330 and y1 == 385:
x1 = 90
y1 = 465
elif x1 > 330 and y1 == 455:
x1 = 90
y1 = 535
def comp_tm_win_cords():
global x3, y3
x3 -= 80
if x3 < 580:
x3 = 820
y3 = 325
elif x3 < 580 and y3 == 315:
x3 = 820
y3 = 395
elif x3 < 580 and y3 == 385:
x3 = 820
y3 = 465
elif x3 < 580 and y3 == 455:
x3 = 820
y3 = 535
def player_win_cord():
global x2, y2
x2 += 80
if x2 > 570:
x2 = 250
y2 = 415
elif x2 > 570 and y2 == 415:
x2 = 250
y2 = 345
def player_tm_win_cord():
global x4, y4
x4 += 80
if x4 > 570:
x4 = 250
y4 = 195
elif x4 > 410 and y4 == 200:
x4 = 250
y4 = 265
def set_won_hand(rnd, hand_winner):
global x1, y1, x2, y2, x3, y3, x4, y4
rnd -= 1
if comp_trump_selected:
if hand_winner == 'comp':
match_cards(comp_played_hands[rnd], x1, y1)
match_cards(player_played_hands[rnd], x1 + 10, y1)
match_cards(comp_teammate_played_hands[rnd], x1 + 20, y1)
match_cards(player_teammate_played_hands[rnd], x1 + 30, y1)
comp_win_cords()
elif hand_winner == 'player':
match_cards(comp_played_hands[rnd], x2, y2)
match_cards(player_played_hands[rnd], x2 + 10, y2)
match_cards(comp_teammate_played_hands[rnd], x2 + 20, y2)
match_cards(player_teammate_played_hands[rnd], x2 + 30, y2)
player_win_cord()
elif hand_winner == 'comp_tm':
match_cards(comp_played_hands[rnd], x3, y3)
match_cards(player_played_hands[rnd], x3 + 10, y3)
match_cards(comp_teammate_played_hands[rnd], x3 + 20, y3)
match_cards(player_teammate_played_hands[rnd], x3 + 30, y3)
comp_tm_win_cords()
elif hand_winner == 'player_tm':
match_cards(comp_played_hands[rnd], x4, y4)
match_cards(player_played_hands[rnd], x4 + 10, y4)
match_cards(comp_teammate_played_hands[rnd], x4 + 20, y4)
match_cards(player_teammate_played_hands[rnd], x4 + 30, y4)
player_tm_win_cord()
elif player_trump_selected:
if hand_winner == 'comp':
match_cards(player_played_hands[rnd], x1, y1)
match_cards(comp_teammate_played_hands[rnd], x1 + 10, y1)
match_cards(player_teammate_played_hands[rnd], x1 + 20, y1)
match_cards(comp_played_hands[rnd], x1 + 30, y1)
comp_win_cords()
elif hand_winner == 'player':
match_cards(player_played_hands[rnd], x2, y2)
match_cards(comp_teammate_played_hands[rnd], x2 + 10, y2)
match_cards(player_teammate_played_hands[rnd], x2 + 20, y2)
match_cards(comp_played_hands[rnd], x2 + 30, y2)
player_win_cord()
elif hand_winner == 'comp_tm':
match_cards(player_played_hands[rnd], x3, y3)
match_cards(comp_teammate_played_hands[rnd], x3 + 10, y3)
match_cards(player_teammate_played_hands[rnd], x3 + 20, y3)
match_cards(comp_played_hands[rnd], x3 + 30, y3)
comp_tm_win_cords()
elif hand_winner == 'player_tm':
match_cards(player_played_hands[rnd], x4, y4)
match_cards(comp_teammate_played_hands[rnd], x4 + 10, y4)
match_cards(player_teammate_played_hands[rnd], x4 + 20, y4)
match_cards(comp_played_hands[rnd], x4 + 30, y4)
player_tm_win_cord()
def each_player_round_done():
global comp_round_done, player_round_done, comp_tm_round_done, player_tm_round_done
if comp_round_done and player_round_done and comp_tm_round_done and player_tm_round_done:
return True
return False
def play_round(rnd):
global round_status, winner
x_limit = 920 - (75 * rnd)
turn = 14 - rnd
c1 = 14 - rnd
c2 = 14 - rnd
c3 = 14 - rnd
c1_pos = 410 - (20 * rnd)
c2_pos = 410 - (20 * rnd)
c3_pos = 610 - (20 * rnd)
if comp_trump_selected and comp_round_done is False and winner is None:
comp_play(True, rnd, c3_pos)
if player_trump_selected and player_round_done is False and winner is None:
player_play(True, rnd, turn, x_limit, c1, c2, c3, c1_pos, c2_pos, c3_pos)
if comp_round_done and player_round_done is False and winner is None:
player_play(False, rnd, turn, x_limit, c1 - 1, c2, c3, c1_pos - 20, c2_pos, c3_pos)
if player_round_done:
comp_tm_play(rnd, c3_pos)
if comp_tm_round_done:
player_tm_play(rnd, c3_pos - 20)
if player_round_done and comp_round_done is False and winner is None:
if comp_tm_round_done is False:
comp_tm_play(rnd, c3_pos)
if comp_tm_round_done:
player_tm_play(rnd, c3_pos - 20)
if player_tm_round_done:
comp_play(False, rnd, c3_pos - 20)
if winner == 'comp' and each_player_round_done() is False:
comp_play(False, rnd, c3_pos)
if comp_round_done:
player_play(False, rnd, turn, x_limit, c1 - 1, c2, c3, c1_pos - 20, c2_pos, c3_pos)
if player_round_done:
comp_tm_play(rnd, c3_pos)
if comp_tm_round_done:
player_tm_play(rnd, c3_pos - 20)
if winner == 'player' and each_player_round_done() is False:
player_play(False, rnd, turn, x_limit, c1, c2, c3, c1_pos, c2_pos, c3_pos)
if player_round_done:
comp_tm_play(rnd, c3_pos)
if comp_tm_round_done:
player_tm_play(rnd, c3_pos - 20)
if player_tm_round_done:
comp_play(False, rnd, c3_pos - 20)
if winner == 'comp_tm' and each_player_round_done() is False:
comp_tm_play(rnd, c3_pos)
if comp_tm_round_done:
player_tm_play(rnd, c3_pos - 20)
if player_tm_round_done:
comp_play(False, rnd, c3_pos - 20)
if comp_round_done:
player_play(False, rnd, turn, x_limit, c1 - 1, c2 - 1, c3 - 1, c1_pos - 20, c2_pos - 20,
c3_pos - 20)
if winner == 'player_tm' and each_player_round_done() is False:
player_tm_play(rnd, c3_pos - 20)
if player_tm_round_done:
comp_play(False, rnd, c3_pos - 20)
if comp_round_done:
player_play(False, rnd, turn, x_limit, c1 - 1, c2, c3 - 1, c1_pos - 20, c2_pos, c3_pos - 20)
if player_round_done:
comp_tm_play(rnd, c3_pos - 20)
if player_tm_round_done and player_round_done and comp_round_done and comp_tm_round_done:
winner = check_won_hand(rnd)
set_won_hand(rnd, winner)
play_area_clear()
round_status['round' + str(rnd)] = True
def check_rounds_completed():
global all_rounds_done
for j in range(1, 14):
if round_status['round' + str(j)] is True:
all_rounds_done = True
else:
all_rounds_done = False
break
def winner_screen():
global run
win2 = pygame.display.set_mode((400, 200))
win2.fill((0, 0, 0))
win2.blit(d_intro_suit, (0, 70))
win2.blit(i_intro_suit, (100, 80))
win2.blit(g_intro_suit, (200, 70))
win2.blit(u_intro_suit, (300, 80))
leave_game = i_font.render("Press Enter to Quit", True, (161, 17, 17))
win2.blit(leave_game, (150, 180))
a = comp_won_hands
b = player_won_hands
c = comp_tm_won_hands
d = player_tm_won_hands
total_player_won = b + d
total_comp_won = a + c
comp_wins = i_font.render("Comp " + str(a), True, (255, 255, 255))
player_wins = i_font.render("Player " + str(b), True, (255, 255, 255))
comp_tm_wins = i_font.render("Comp Teammate " + str(c), True, (255, 255, 255))
player_tm_wins = i_font.render("Player Teammate " + str(d), True, (255, 255, 255))
win2.blit(comp_wins, (5, 50))
win2.blit(player_wins, (70, 50))
win2.blit(comp_tm_wins, (140, 50))
win2.blit(player_tm_wins, (270, 50))
winner_player = font.render(
player_information[0].upper() + " TEAM WINS!", True, (255, 255, 255))
winner_comp = font.render("COMP TEAM WINS!", True, (255, 255, 255))
match_draw = font.render("ITS A DRAW!", True, (255, 255, 255))
if total_player_won > total_comp_won:
win2.blit(winner_player, (100, 20))
elif total_player_won < total_comp_won:
win2.blit(winner_comp, (120, 20))
elif total_comp_won == total_player_won:
win2.blit(match_draw, (120, 20))
for event in pygame.event.get():
if event.type == pygame.QUIT:
run = False
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_RETURN:
run = False
if event.key == pygame.K_SPACE:
pass
def start_play():
global all_rounds_done
if trump_list_chosen is False:
trump_list()
if round_status['round1'] is False:
play_round(1)
start_new_round()
for i in range(2, 13):
if round_status['round' + str(i)] is False and round_status['round' + str(i - 1)]:
play_round(i)
start_new_round()
if round_status['round13'] is False and round_status['round12']:
play_round(13)
check_rounds_completed()
if all_rounds_done:
play_area_clear()
winner_screen()
if check_for_quit:
run = False
else:
run = True
d_x = 0
d_y = -100
i_x = 100
i_y = -150
g_x = 200
g_y = -100
u_x = 300
u_y = -150
game_intro = False
while run:
while game_intro is False:
win2 = pygame.display.set_mode((400, 200))
win2.fill((0, 0, 0))
user_comm = i_font.render("Press Enter to begin game", True, (161, 17, 17))
if Trumps:
intro_show_trump = i_font.render("You select trumps", True, (161, 17, 17))
win2.blit(intro_show_trump, (260, 180))
elif Trumps is False:
intro_show_trump = i_font.render("Computer selects trumps", True, (161, 17, 17))
win2.blit(intro_show_trump, (230, 180))
win2.blit(user_comm, (20, 180))
win2.blit(d_intro_suit, (d_x, d_y))
win2.blit(i_intro_suit, (i_x, i_y))
win2.blit(g_intro_suit, (g_x, g_y))
win2.blit(u_intro_suit, (u_x, u_y))
d_y += 1
i_y += 1
g_y += 1
u_y += 1
if d_y == 70:
d_y = -100
if i_y == 70:
i_y = -150
if g_y == 70:
g_y = -100
if u_y == 70:
u_y = -150
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_RETURN:
game_intro = True
if game_intro:
initialize_game()
start_play()
for event in pygame.event.get():
if event.type == pygame.QUIT:
run = False
pygame.display.update()
| 3.140625 | 3 |
plasticparser/grammar_parsers.py | Aplopio/plasticparser | 22 | 12759999 | <filename>plasticparser/grammar_parsers.py
# -*- coding: utf-8 -*-
import plasticparser
RESERVED_CHARS = ('\\', '+', '-', '&&',
'||', '!', '(', ')',
'{', '}', '[', ']',
'^', '~', '*',
'?', '/', ':')
class Facets(object):
def __init__(self, facets_dsl):
self.facets_dsl = facets_dsl
def get_query(self):
return self.facets_dsl
class Highlight(object):
def __init__(self, fields):
self.highlight_dsl = {"fields": fields}
def get_query(self):
return self.highlight_dsl
class Sort(object):
def __init__(self, fields):
self.sort_dsl = fields
def get_query(self):
return self.sort_dsl
class Aggregations(object):
def __init__(self, aggregations_dsl):
self.aggregations_dsl = aggregations_dsl
def get_query(self):
return self.aggregations_dsl
class Nested(object):
def __init__(self, nested_dsl):
self.nested_dsl = nested_dsl
def get_query(self):
return self.nested_dsl
class Type(object):
def __init__(self, type_dsl):
self.type_dsl = type_dsl
def get_query(self):
return self.type_dsl
class Query(object):
def __init__(self, query):
self.query = query
def get_query(self):
return self.query.strip()
def sanitize_value(value):
if not isinstance(value, basestring):
return value
for char in RESERVED_CHARS:
if char not in "(":
value = value.replace(char, u'\{}'.format(char))
return value
def sanitize_facet_value(value):
if not isinstance(value, basestring):
return value
for char in RESERVED_CHARS:
if char not in ['"', '(', ')']:
value = value.replace(char, u'\{}'.format(char))
return value
def sanitize_free_text(value):
if not isinstance(value, basestring):
return value
for char in RESERVED_CHARS:
if char not in ['(', ')', ':']:
value = value.replace(char, u'\{}'.format(char))
return value
def parse_free_text(tokens):
return sanitize_free_text(tokens[0])
def parse_compare_expression(tokens):
return u"{}{}{}".format(tokens[0], tokens[1], sanitize_value(tokens[2]))
def parse_facet_compare_expression(tokens):
return u"{}{}{}".format(tokens[0], tokens[1], sanitize_facet_value(tokens[2]))
def parse_logical_expression(tokens):
return u' '.join(tokens.asList())
def parse_paren_base_logical_expression(tokens):
return u'{}{}{}'.format(tokens[0], tokens[1], tokens[2])
def default_parse_func(tokens):
token_list = tokens.asList()
return_list = []
for token in token_list:
if isinstance(token, Nested):
return_list.append(token)
token_list.remove(token)
if isinstance(token, Facets):
return_list.append(token)
token_list.remove(token)
if isinstance(token, Highlight):
return_list.append(token)
token_list.remove(token)
if isinstance(token, Aggregations):
return_list.append(token)
token_list.remove(token)
if isinstance(token, type):
return_list.append(token)
token_list.remove(token)
query = Query(' '.join(token_list))
return_list.append(query)
return return_list
parse_one_or_more_logical_expressions = parse_base_logical_expression = default_parse_func
def parse_type_expression(tokens):
return Type({
"type": {"value": tokens[1]}
})
def parse_type_logical_facets_expression(tokens):
must_list = []
should_list = []
must_not_list = []
facets = {}
aggs = {}
highlights = None
sort = None
for token in tokens.asList():
if isinstance(token, Nested):
nested = token.get_query()
must_list.append(nested)
if isinstance(token, Query):
query = token.get_query()
if isinstance(token, Facets):
facets = token.get_query()
if isinstance(token, Highlight):
highlights = token.get_query()
if isinstance(token, Sort):
sort = token.get_query()
if isinstance(token, Aggregations):
aggs = token.get_query()
if isinstance(token, Type):
type = token.get_query()
must_list.append(type)
query_dsl = {
"query": {
"filtered": {
"filter": {
"bool": {
"must": must_list,
"should": should_list,
"must_not": must_not_list
}
}
}
}
}
if highlights is not None:
query_dsl['highlight'] = highlights
if sort is not None:
query_dsl['sort'] = sort
if facets:
query_dsl['facets'] = facets
if aggs:
query_dsl['aggregations'] = aggs
# `size` is added in version 2.0
# `size` is used to return only counts without hits
query_dsl['size'] = 0
if query:
query_dsl["query"]["filtered"]["query"] = {
"query_string": {
"query": query,
"default_operator": getattr(
plasticparser, 'DEFAULT_OPERATOR', 'and')
}
}
return query_dsl
def parse_single_facet_expression(tokens):
facet_key = tokens[0]
filters = {
facet_key: {}
}
field = facet_key
if "." in facet_key:
nested_keys = facet_key.split(".")
nested_field = u".".join(nested_keys[:-1])
field = "{}_nonngram".format(field)
filters[facet_key]["terms"] = {"field": field, "size": getattr(
plasticparser, 'FACETS_QUERY_SIZE', 20)}
if len(tokens) > 1:
filters[facet_key]["facet_filter"] = {
"query": {
"query_string": {"query": tokens[1], "default_operator": "and"}
}
}
if len(tokens) > 1 and "." in facet_key:
filters[facet_key]['nested'] = nested_field
return filters
def parse_highlight_field_expression(tokens):
"""Parse single single highlight field from query.
eg:
query: highlight[field1, field2]
parsed output:
{"field1": {}}
TODO: add support for highlighting options provided by ElasticSearch
"""
return {tokens[0]: {}}
def parse_sort_field_option(tokens):
if len(tokens) > 1:
return (tokens[0], parse_sort_field_option(tokens[1]))
return tokens[0][0], tokens[0][1]
def parse_sort_field_expression(tokens):
"""Parse single sort field from query.
eg:
query: sort[field1, field2]
parsed output:
{"field1": {}} and {"field2": {}}
"""
result = {}
tokens = tokens.asList()
order = 'desc' if "-" in tokens else 'asc'
if order == 'desc':
tokens.pop(0)
def to_dict(toks):
if type(toks) == tuple:
if type(toks[1]) == tuple:
return {toks[0]: to_dict(toks[1])}
else:
return {toks[0]: toks[1]}
return toks
if len(tokens) > 1:
result = {tokens[0]: {t[0]: to_dict(t[1]) for t in tokens[1]}}
else:
result = {tokens[0]: {}}
result[tokens[0]]['order'] = order
return result
def parse_single_aggs_expression(tokens):
"""
Parses single aggregation query. Following is example input and output:
INPUT
type:candidates (name:"<NAME>" starred:true) (python or java) facets:[location]
OUTPUT
{
...
"aggregations": {
"location": {
"aggregations": {
"location": {
"terms": {
"field": "location_nonngram",
"size": 20
}
}
}
}
}
...
}
"""
aggs_key = tokens[0]
filters = {
aggs_key: {
"aggregations": {
aggs_key: {}
}
}
}
field = aggs_key
if "." in aggs_key:
nested_keys = aggs_key.split(".")
nested_field = u".".join(nested_keys[:-1])
field = "{}_nonngram".format(field)
filters[aggs_key]["aggregations"][aggs_key]["terms"] = {
"field": field, "size": getattr(
plasticparser, 'FACETS_QUERY_SIZE', 20)
}
if len(tokens) > 1:
filters[aggs_key]["aggregations"][aggs_key]["aggregations"] = {
aggs_key: {'filter': {
"query": {
"query_string": {
"query": tokens[1], "default_operator": "and"
}
}
}}
}
if len(tokens) > 1 and "." in aggs_key:
filters[aggs_key]['nested'] = {'path': nested_field}
return filters
def parse_base_facets_expression(tokens):
facets = {}
for tok in tokens.asList():
facets.update(tok)
return Facets(facets)
def parse_highlight_expression(tokens):
"""Generates query DSL from parsed single highlight fields from query."""
return Highlight({k: v for t in tokens.asList() for k, v in t.items()})
def parse_sort_expression(tokens):
"""Generates query DSL from parsed single sort fields from query."""
return Sort({k: v for t in tokens.asList() for k, v in t.items()})
def parse_base_aggs_expression(tokens):
aggs = {}
for tok in tokens.asList():
aggs.update(tok)
return Aggregations(aggs)
def join_words(tokens):
return u' '.join(tokens.asList())
def join_brackets(tokens):
return u''.join(tokens.asList())
def parse_one_or_more_facets_expression(tokens):
return u' '.join(tokens)
def parse_one_or_more_aggs_expression(tokens):
return u' '.join(tokens)
def parse_base_nested_expression(tokens):
return tokens[0]
def parse_single_nested_expression(tokens):
return Nested({
"nested": {
"path": tokens[0],
"query": {
"query_string": {
"query": tokens[1],
"default_operator": "and"
}
}
}
})
| 2.8125 | 3 |
tests/pipeline_test.py | isi-vista/adam | 8 | 12760000 | from vistautils.iter_utils import only
from adam.language import TokenSequenceLinguisticDescription
from adam.learner import LearningExample, MemorizingLanguageLearner
from adam.perception import (
BagOfFeaturesPerceptualRepresentationFrame,
PerceptualRepresentation,
)
def test_pipeline():
curriculum = [
LearningExample(
perception=PerceptualRepresentation(
[BagOfFeaturesPerceptualRepresentationFrame(("red", "truck"))]
),
linguistic_description=TokenSequenceLinguisticDescription(("red", "truck")),
)
]
learner: MemorizingLanguageLearner[
BagOfFeaturesPerceptualRepresentationFrame, TokenSequenceLinguisticDescription
] = MemorizingLanguageLearner()
for example in curriculum:
learner.observe(example)
# shouldn't be able to describe "red" or "truck" alone
assert not learner.describe(
PerceptualRepresentation([BagOfFeaturesPerceptualRepresentationFrame(("red",))])
)
assert not learner.describe(
PerceptualRepresentation([BagOfFeaturesPerceptualRepresentationFrame(("truck",))])
)
# but should be able to describe "red truck"
red_truck_descriptions = learner.describe(
PerceptualRepresentation(
[BagOfFeaturesPerceptualRepresentationFrame(("red", "truck"))]
)
)
assert len(red_truck_descriptions) == 1
red_truck_description = only(red_truck_descriptions)
assert red_truck_description.as_token_sequence() == ("red", "truck")
| 2.671875 | 3 |
app/main.py | VikashKothary/market-information-service | 0 | 12760001 | <gh_stars>0
#!/usr/bin/env python3
"""
main.py - Run the complete application
"""
from app import app, db
from views import views
from phone import phone
from chatbot import chatbot
from listenbot import listenbot
from models import *
db.create_tables()
app.register_blueprint(views)
app.register_blueprint(phone, url_prefix='/phone')
app.register_blueprint(chatbot, url_prefix='/chat')
app.register_blueprint(listenbot, url_prefix='/listen')
def run():
app.run(host='0.0.0.0', port=5000, debug=True)
if __name__ == '__main__':
run()
| 2.171875 | 2 |
f5/bigip/tm/sys/test/functional/test_sshd.py | nghia-tran/f5-common-python | 272 | 12760002 | <reponame>nghia-tran/f5-common-python<filename>f5/bigip/tm/sys/test/functional/test_sshd.py<gh_stars>100-1000
# Copyright 2016 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
V11_SUPPORTED = ['11.5.4', '11.6.0', '11.6.1', '11.6.2']
V12_SUPPORTED = ['12.0.0', '12.1.0']
def setup_sshd_test(request, mgmt_root):
def teardown():
d.allow = ['ALL']
d.banner = 'disabled'
d.bannerText = ''
d.inactivityTimeout = 0
d.logLevel = 'info'
d.login = 'enabled'
if pytest.config.getoption('--release') in V12_SUPPORTED:
d.port = 22
d.update()
request.addfinalizer(teardown)
d = mgmt_root.tm.sys.sshd.load()
return d
@pytest.mark.skipif(pytest.config.getoption('--release') not in V11_SUPPORTED,
reason='Needs v11 TMOS to pass')
class TestSshd11(object):
def test_load(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
assert ssh1.allow == ssh2.allow
assert ssh1.banner == ssh2.banner
assert ssh1.inactivityTimeout == ssh2.inactivityTimeout
assert ssh1.logLevel == ssh2.logLevel
assert ssh1.login == ssh2.login
def test_update_allow(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
ssh1.allow = ['192.168.1.1']
ssh1.update()
assert ['192.168.1.1'] == ssh1.allow
assert ['192.168.1.1'] != ssh2.allow
# Refresh
ssh2.refresh()
assert ['192.168.1.1'] == ssh2.allow
def test_update_banner(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
banners = ['enabled', 'disabled']
for banner in banners:
ssh1.banner = banner
ssh1.update()
assert banner == ssh1.banner
assert banner != ssh2.banner
# Refresh
ssh2.refresh()
assert banner == ssh2.banner
def test_update_bannerText(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
ssh1.bannerText = 'foo banner'
ssh1.update()
assert 'foo banner' == ssh1.bannerText
assert not hasattr(ssh2, 'bannerText')
# Refresh
ssh2.refresh()
assert 'foo banner' == ssh2.bannerText
def test_update_inactivityTimeout(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
ssh1.inactivityTimeout = 10
ssh1.update()
assert 10 == ssh1.inactivityTimeout
assert 10 != ssh2.inactivityTimeout
# Refresh
ssh2.refresh()
assert 10 == ssh2.inactivityTimeout
def test_update_logLevel(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
levels = ['debug', 'debug1', 'debug2', 'debug3', 'error', 'fatal',
'info', 'quiet', 'verbose']
for level in levels:
ssh1.logLevel = level
ssh1.update()
assert level == ssh1.logLevel
assert level != ssh2.logLevel
# Refresh
ssh2.refresh()
assert level == ssh2.logLevel
def test_update_login(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
logins = ['disabled', 'enabled']
for login in logins:
ssh1.login = login
ssh1.update()
assert login == ssh1.login
assert login != ssh2.login
# Refresh
ssh2.refresh()
assert login == ssh2.login
@pytest.mark.skipif(pytest.config.getoption('--release') not in V12_SUPPORTED,
reason='Needs v12 TMOS to pass')
class TestSshd12(object):
def test_load(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
assert ssh1.allow == ssh2.allow
assert ssh1.banner == ssh2.banner
assert ssh1.inactivityTimeout == ssh2.inactivityTimeout
assert ssh1.logLevel == ssh2.logLevel
assert ssh1.login == ssh2.login
assert ssh1.port == ssh2.port
def test_update_allow(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
ssh1.allow = ['192.168.1.1']
ssh1.update()
assert ['192.168.1.1'] == ssh1.allow
assert ['192.168.1.1'] != ssh2.allow
# Refresh
ssh2.refresh()
assert ['192.168.1.1'] == ssh2.allow
def test_update_banner(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
banners = ['enabled', 'disabled']
for banner in banners:
ssh1.banner = banner
ssh1.update()
assert banner == ssh1.banner
assert banner != ssh2.banner
# Refresh
ssh2.refresh()
assert banner == ssh2.banner
def test_update_bannerText(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
ssh1.bannerText = 'foo banner'
ssh1.update()
assert 'foo banner' == ssh1.bannerText
assert not hasattr(ssh2, 'bannerText')
# Refresh
ssh2.refresh()
assert 'foo banner' == ssh2.bannerText
def test_update_inactivityTimeout(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
ssh1.inactivityTimeout = 10
ssh1.update()
assert 10 == ssh1.inactivityTimeout
assert 10 != ssh2.inactivityTimeout
# Refresh
ssh2.refresh()
assert 10 == ssh2.inactivityTimeout
def test_update_logLevel(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
levels = ['debug', 'debug1', 'debug2', 'debug3', 'error', 'fatal',
'info', 'quiet', 'verbose']
for level in levels:
ssh1.logLevel = level
ssh1.update()
assert level == ssh1.logLevel
assert level != ssh2.logLevel
# Refresh
ssh2.refresh()
assert level == ssh2.logLevel
def test_update_login(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
logins = ['disabled', 'enabled']
for login in logins:
ssh1.login = login
ssh1.update()
assert login == ssh1.login
assert login != ssh2.login
# Refresh
ssh2.refresh()
assert login == ssh2.login
def test_update_port(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
ssh1.port = 1234
ssh1.update()
assert 1234 == ssh1.port
assert 1234 != ssh2.port
# Refresh
ssh2.refresh()
assert 1234 == ssh2.port
| 1.921875 | 2 |
lxserv/tagger_pTagSelectionFCL.py | 9bstudios/mecco_tagger | 0 | 12760003 | # python
import lx, lxifc, lxu.command, modo, tagger, random
CMD_NAME = tagger.CMD_PTAG_SELECTION_FCL
global_tags = None
global_poly_count = 0
def list_commands():
timer = tagger.DebugTimer()
global global_tags
global global_poly_count
fcl = []
global_tags = [
set(),
set(),
set()
]
global_poly_count = 0
mesh_editor = MeshEditorClass()
mesh_read_successful = mesh_editor.do_mesh_read()
selmode = tagger.selection.get_mode()
if global_poly_count == 0 or selmode not in ['polygon', 'edge', 'vertex']:
fcl.append("%s {%s}" % (tagger.CMD_NOOP, tagger.LABEL_NO_POLYS))
timer.end()
return fcl
elif global_poly_count > tagger.MAX_FCL_POLY_INSPECT:
fcl.append("%s {%s}" % (tagger.CMD_NOOP, tagger.LABEL_MAX_POLY))
timer.end()
return fcl
if sum([len(tags) for tags in global_tags]) == 0:
fcl.append("%s {%s}" % (tagger.CMD_NOOP, tagger.LABEL_NO_TAGS))
timer.end()
return fcl
if len(global_tags) > tagger.MAX_FCL:
fcl.append("%s {%s}" % (tagger.CMD_NOOP, tagger.LABEL_MAX_FCL))
timer.end()
return fcl
for n in range(len(global_tags)):
if not global_tags[n]:
continue
for tag in sorted(global_tags[n]):
tagType = [tagger.MATERIAL, tagger.PART, tagger.PICK][n]
if tagType == tagger.MATERIAL:
command = tagger.CMD_SELECT_ALL_BY_MATERIAL
elif tagType == tagger.PART:
command = tagger.CMD_SELECT_ALL_BY_PART
elif tagType == tagger.PICK:
command = tagger.CMD_SELECT_ALL_BY_SET
fcl.append("%s {%s}" % (command, tag))
timer.end()
return fcl
class CommandClass(tagger.CommanderClass):
#_commander_default_values = []
def commander_arguments(self):
return [
{
'name': tagger.QUERY,
'label': tagger.LABEL_QUERY,
'datatype': 'integer',
'default': '',
'values_list_type': 'fcl',
'values_list': list_commands,
'flags': ['query'],
}
]
def commander_notifiers(self):
return [("select.event", "polygon +ldt"),("select.event", "item +ldt"), ("tagger.notifier", "")]
lx.bless(CommandClass, CMD_NAME)
class MeshEditorClass(tagger.MeshEditorClass):
def mesh_read_action(self):
global global_tags
global global_poly_count
stringTag = lx.object.StringTag()
stringTag.set(self.polygon_accessor)
selected_polys = self.get_selected_polys()
for poly in selected_polys:
global_poly_count += 1
if global_poly_count > tagger.MAX_FCL_POLY_INSPECT:
break
self.polygon_accessor.Select(poly)
material = stringTag.Get(lx.symbol.i_POLYTAG_MATERIAL)
if material:
global_tags[0].add(material)
part = stringTag.Get(lx.symbol.i_POLYTAG_PART)
if part:
global_tags[1].add(part)
pick = stringTag.Get(lx.symbol.i_POLYTAG_PICK)
if pick:
global_tags[2].update(pick.split(";"))
| 2.203125 | 2 |
trove/common/template.py | citrix-openstack-build/trove | 0 | 12760004 | <reponame>citrix-openstack-build/trove<gh_stars>0
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import jinja2
ENV = jinja2.Environment(loader=jinja2.ChoiceLoader([
jinja2.FileSystemLoader("/etc/trove/templates"),
jinja2.PackageLoader("trove", "templates")
]))
class SingleInstanceConfigTemplate(object):
""" This class selects a single configuration file by database type for
rendering on the guest """
def __init__(self, service_type, flavor_dict, instance_id):
""" Constructor
:param service_type: The database type.
:type name: str.
:param flavor_dict: dict containing flavor details for use in jinja.
:type flavor_dict: dict.
:param instance_id: trove instance id
:type: instance_id: str
"""
self.flavor_dict = flavor_dict
template_filename = "%s.config.template" % service_type
self.template = ENV.get_template(template_filename)
self.instance_id = instance_id
def render(self):
""" Renders the jinja template
:returns: str -- The rendered configuration file
"""
server_id = self._calculate_unique_id()
self.config_contents = self.template.render(
flavor=self.flavor_dict, server_id=server_id)
return self.config_contents
def _calculate_unique_id(self):
"""
Returns a positive unique id based off of the instance id
:return: a positive integer
"""
return abs(hash(self.instance_id) % (2 ** 31))
class HeatTemplate(object):
template_contents = """HeatTemplateFormatVersion: '2012-12-12'
Description: Instance creation
Parameters:
KeyName: {Type: String}
Flavor: {Type: String}
VolumeSize: {Type: Number}
ServiceType: {Type: String}
InstanceId: {Type: String}
AvailabilityZone : {Type: String}
Resources:
BaseInstance:
Type: AWS::EC2::Instance
Metadata:
AWS::CloudFormation::Init:
config:
files:
/etc/guest_info:
content:
Fn::Join:
- ''
- ["[DEFAULT]\\nguest_id=", {Ref: InstanceId},
"\\nservice_type=", {Ref: ServiceType}]
mode: '000644'
owner: root
group: root
Properties:
ImageId:
Fn::Join:
- ''
- ["ubuntu_", {Ref: ServiceType}]
InstanceType: {Ref: Flavor}
KeyName: {Ref: KeyName}
AvailabilityZone: {Ref: AvailabilityZone}
UserData:
Fn::Base64:
Fn::Join:
- ''
- ["#!/bin/bash -v\\n",
"/opt/aws/bin/cfn-init\\n",
"sudo service trove-guest start\\n"]
DataVolume:
Type: AWS::EC2::Volume
Properties:
Size: {Ref: VolumeSize}
AvailabilityZone: {Ref: AvailabilityZone}
Tags:
- {Key: Usage, Value: Test}
MountPoint:
Type: AWS::EC2::VolumeAttachment
Properties:
InstanceId: {Ref: BaseInstance}
VolumeId: {Ref: DataVolume}
Device: /dev/vdb"""
def template(self):
return self.template_contents
| 2.234375 | 2 |
examples/03_connectivity/plot_compare_resting_state_decomposition.py | Titan-C/nilearn | 1 | 12760005 | <reponame>Titan-C/nilearn
"""
Dictionary Learning and ICA for doing group analysis of resting-state fMRI
==========================================================================
This example applies dictionary learning and ICA to resting-state data,
visualizing resulting components using atlas plotting tools.
Dictionary learning is a sparsity based decomposition method for extracting
spatial maps. It extracts maps that are naturally sparse and usually cleaner
than ICA
* <NAME> et al.
Multi-subject dictionary learning to segment an atlas of brain spontaneous
activity
Information Processing in Medical Imaging, 2011, pp. 562-573, Lecture Notes
in Computer Science
Available on https://hal.inria.fr/inria-00588898/en/
"""
###############################################################################
# Load ADHD rest dataset
from nilearn import datasets
adhd_dataset = datasets.fetch_adhd(n_subjects=30)
func_filenames = adhd_dataset.func # list of 4D nifti files for each subject
# print basic information on the dataset
print('First functional nifti image (4D) is at: %s' %
adhd_dataset.func[0]) # 4D data
###############################################################################
# Create two decomposition estimators
from nilearn.decomposition import DictLearning, CanICA
n_components = 40
###############################################################################
# Dictionary learning
dict_learning = DictLearning(n_components=n_components,
memory="nilearn_cache", memory_level=2,
verbose=1,
random_state=0,
n_epochs=1)
###############################################################################
# CanICA
canica = CanICA(n_components=n_components,
memory="nilearn_cache", memory_level=2,
threshold=3.,
n_init=1,
verbose=1)
###############################################################################
# Fit both estimators
estimators = [dict_learning, canica]
names = {dict_learning: 'DictionaryLearning', canica: 'CanICA'}
components_imgs = []
for estimator in estimators:
print('[Example] Learning maps using %s model' % names[estimator])
estimator.fit(func_filenames)
print('[Example] Saving results')
# Decomposition estimator embeds their own masker
masker = estimator.masker_
# Drop output maps to a Nifti file
components_img = masker.inverse_transform(estimator.components_)
components_img.to_filename('%s_resting_state.nii.gz' %
names[estimator])
components_imgs.append(components_img)
###############################################################################
# Visualize the results
from nilearn.plotting import (plot_prob_atlas, find_xyz_cut_coords, show,
plot_stat_map)
from nilearn.image import index_img
# Selecting specific maps to display: maps were manually chosen to be similar
indices = {dict_learning: 1, canica: 31}
# We select relevant cut coordinates for displaying
cut_component = index_img(components_imgs[0], indices[dict_learning])
cut_coords = find_xyz_cut_coords(cut_component)
for estimator, components in zip(estimators, components_imgs):
# 4D plotting
plot_prob_atlas(components, view_type="filled_contours",
title="%s" % names[estimator],
cut_coords=cut_coords, colorbar=False)
# 3D plotting
plot_stat_map(index_img(components, indices[estimator]),
title="%s" % names[estimator],
cut_coords=cut_coords, colorbar=False)
show()
| 2.5 | 2 |
src/controls/size_control.py | furbrain/CVExplorer | 0 | 12760006 | # noinspection PyPep8Naming
from controls.composite import CompositeControl
class SizeControl(CompositeControl):
@classmethod
def get_fields(cls):
from functions import ParameterTemplate
fields = [
ParameterTemplate("Width", "int", default=3),
ParameterTemplate("Height", "int", default=3)
]
return fields
| 2.015625 | 2 |
rackspace/heat_store/catalog/__init__.py | rohithkumar-rackspace/rcbops | 0 | 12760007 | <gh_stars>0
from .solution import Solution, Catalog
| 1.007813 | 1 |
BoxesDots.py | lmunro0402/ShallowBlue | 0 | 12760008 | # Dots & Boxes Class
#
# Author: <NAME>
import copy
from Minimax import Minimax
from Player import Player
import utils as UTIL
class BoxesDots:
def __init__(self, dim, players):
""" Only square games allowed"""
self.dim = dim
assert self.dim < 10, "Less than 10 please"
self.usedBoxes = 0
self.players = players
self.game_state = []
for i in range(self.dim):
self.game_state.append([0]*dim)
self.game_state.append([0]*(dim+1))
self.game_state.append([0]*dim)
self.old_state = []
def reset(self):
self.players[0].reset()
self.players[1].reset()
self. usedBoxes = 0
for i, row in enumerate(self.game_state):
for x in range(len(row)):
self.game_state[i][x] = 0
# ------------------ Funcs for minimax -----------------
def get_depth(self, base): # IMPROVE THIS WITH THINKING
moves_made = sum(UTIL.clean_game_state(self.game_state))
num_moves = 2*(self.dim**2+self.dim)
available_moves = num_moves - moves_made
if list(base)[:3] == ["S", "E", "T"]: # this can be better!!!!! WAY BETTER!!!
depth = int(list(base)[3])
elif available_moves-2 > self.dim*(self.dim+1):
depth = 2
else:
depth = int(base)+2
return depth
def getDim(self):
return self.dim
def add_players(self, players):
self.players = players
def turn(self, player):
self.old_state = copy.deepcopy(self.game_state) # BREAKING CONNECTION
if isinstance(player, Minimax):
move = player.getMove(self.game_state, self.get_depth(player.base)).split(" ")
else:
move = player.getMove(self.game_state).split(" ")
while not self.valid_move(move):
print 'Invalid Move'
move = player.getMove(self.game_state).split(" ")
move = [int(x) for x in move]
player.last_move = move
self.move(move[0], move[1])
self.update_scores(player)
def move(self, row, index):
self.game_state[row][index] = 1
#------------------------------ Checks ----------------------------------
def valid_move(self, move):
try:
move = [int(x) for x in move]
row = move[0]
index = move[1]
if (row%2 == 0 and index > self.dim-1) or\
(row%2 == 1 and index > self.dim) or (row > self.dim*2):
return False
elif self.game_state[row][index] == 1:
return False
return True
except:
return False
# ---------------------------- Scoring -----------------------------------
def game_status(self):
return (self.dim**2) != self.usedBoxes
def update_scores(self, player):
count = sum(self.check_boxes())
if count != self.usedBoxes:
diff = abs(self.usedBoxes-count)
if diff == 1:
player.plusOne()
else:
player.plusOne()
player.plusOne()
self.usedBoxes = count
def get_boxes(self):
"'Converts game_state into list of each box, contains repeats.'"
boxes = []
box_scores = []
for i in range(0, self.dim*2, 2):
# Go by rows
for j in range(self.dim):
# Each box
boxes.append([self.game_state[i][j], self.game_state[i+1][j], \
self.game_state[i+1][j+1], self.game_state[i+2][j]])
return boxes
def check_boxes(self):
boxes = self.get_boxes()
box_scores = [sum(x)//4 for x in boxes]
return box_scores
# ------------------------- Display methods --------------------------------
def display_moves(self):
return self.game_state
def display_game(self):
buffer = [] #what is this
hLine = "+---"
hEmpty = "+ "
vLine = "| "
vEmpty = " "
# Top row
for i in range(self.dim):
if self.game_state[0][i] == 1:
buffer.append(hLine)
else: buffer.append(hEmpty)
buffer.append("+\n")
# Middle rows
for i in range(1, self.dim*2, 2):
# Make horizontal passes
for j in range(self.dim+1):
if self.game_state[i][j] == 1:
buffer.append(vLine)
else: buffer.append(vEmpty)
buffer.append("\n")
# Vertical passes
for j in range(self.dim):
if self.game_state[i+1][j] == 1:
buffer.append(hLine)
else: buffer.append(hEmpty)
buffer.append("+\n")
print "".join(buffer)
def show_results(self):
# print "---GAME RESULTS---"
print self.players[0].getName() + " score is " + str(self.players[0].getScore())
print self.players[1].getName() + " score is " + str(self.players[1].getScore())
if self.players[0].getScore() == self.players[1].getScore():
print "Tie"
elif self.players[0].getScore() > self.players[1].getScore():
print "Winner is " + self.players[0].getName()
else:
print "Winner is " + self.players[1].getName() | 3.109375 | 3 |
src/neural_network_scripts/models/RGN.py | lpbsscientist/targettrack | 0 | 12760009 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Author: <NAME>
@Contact: <EMAIL>
@File: model.py
@Time: 2018/10/13 6:35 PM
Modified by
@Author: <NAME>
@Contact: <EMAIL>
@Time: 2020/3/9 9:32 PM
"""
import os
import sys
import copy
import math
import numpy as np
import torch
import torch.nn as nn
import torch.nn.init as init
import torch.nn.functional as F
def knn(x, k):
inner = -2*torch.matmul(x.transpose(2, 1), x)
xx = torch.sum(x**2, dim=1, keepdim=True)
pairwise_distance = -xx - inner - xx.transpose(2, 1)
idx = pairwise_distance.topk(k=k, dim=2)[1] # (batch_size, num_points, k) this picks largest
return idx
def get_graph_feature(x,k=20,spatial_dim=None):
batch_size = x.size(0)
num_dims = x.size(1)
num_points = x.size(2)
if spatial_dim is None:
idx = knn(x, k=k)
else:
idx = knn(x[:, :spatial_dim], k=k)
idx_base = torch.arange(0, batch_size, device=x.device).view(-1, 1, 1)*num_points
idx = idx + idx_base
idx = idx.view(-1)
x = x.transpose(2, 1).contiguous() # (batch_size, num_points, num_dims) -> (batch_size*num_points, num_dims) # batch_size * num_points * k + range(0, batch_size*num_points)
feature = x.view(batch_size*num_points, -1)[idx, :]
feature = feature.view(batch_size, num_points, k, num_dims)
x = x.view(batch_size, num_points, 1, num_dims).repeat(1, 1, k, 1)
feature = torch.cat((feature-x, x), dim=3).permute(0, 3, 1, 2).contiguous()
return feature # (batch_size, 2*num_dims, num_points, k)
class Net(nn.Module):
def __init__(self,spatial_dim=3,num_in_feat=3,k=20,dim_embed=1024,num_classes=10):
super(Net, self).__init__()
self.spatial_dim=spatial_dim
self.num_in_feat=num_in_feat
self.num_classes=num_classes
self.k = k
self.dim_embed=dim_embed
self.bn1 = nn.BatchNorm2d(64)
self.bn2 = nn.BatchNorm2d(64)
self.bn3 = nn.BatchNorm2d(64)
self.bn4 = nn.BatchNorm2d(64)
self.bn5 = nn.BatchNorm2d(64)
self.bn6 = nn.BatchNorm1d(self.dim_embed)
self.bn7 = nn.BatchNorm1d(512)
self.bn8 = nn.BatchNorm1d(256)
self.conv1 = nn.Sequential(nn.Conv2d(2*self.num_in_feat, 64, kernel_size=1, bias=False),
self.bn1,
nn.LeakyReLU(negative_slope=0.2))
self.conv2 = nn.Sequential(nn.Conv2d(64, 64, kernel_size=1, bias=False),
self.bn2,
nn.LeakyReLU(negative_slope=0.2))
self.conv3 = nn.Sequential(nn.Conv2d(64*2, 64, kernel_size=1, bias=False),
self.bn3,
nn.LeakyReLU(negative_slope=0.2))
self.conv4 = nn.Sequential(nn.Conv2d(64, 64, kernel_size=1, bias=False),
self.bn4,
nn.LeakyReLU(negative_slope=0.2))
self.conv5 = nn.Sequential(nn.Conv2d(64*2, 64, kernel_size=1, bias=False),
self.bn5,
nn.LeakyReLU(negative_slope=0.2))
self.conv6 = nn.Sequential(nn.Conv1d(64*3,self.dim_embed, kernel_size=1, bias=False),
self.bn6,
nn.LeakyReLU(negative_slope=0.2))
self.conv7 = nn.Sequential(nn.Conv1d(self.dim_embed+64*3, 512, kernel_size=1, bias=False),
self.bn7,
nn.LeakyReLU(negative_slope=0.2))
self.conv8 = nn.Sequential(nn.Conv1d(512, 256, kernel_size=1, bias=False),
self.bn8,
nn.LeakyReLU(negative_slope=0.2))
self.conv9 = nn.Conv1d(256,self.num_classes, kernel_size=1, bias=False)
def forward(self, x):
batch_size = x.size(0)
num_points = x.size(2)
x = get_graph_feature(x, k=self.k,spatial_dim=self.spatial_dim) # (batch_size, 9, num_points) -> (batch_size, 9*2, num_points, k)
x = self.conv1(x) # (batch_size, 9*2, num_points, k) -> (batch_size, 64, num_points, k)
x = self.conv2(x) # (batch_size, 64, num_points, k) -> (batch_size, 64, num_points, k)
x1 = x.max(dim=-1, keepdim=False)[0] # (batch_size, 64, num_points, k) -> (batch_size, 64, num_points)
x = get_graph_feature(x1, k=self.k) # (batch_size, 64, num_points) -> (batch_size, 64*2, num_points, k)
x = self.conv3(x) # (batch_size, 64*2, num_points, k) -> (batch_size, 64, num_points, k)
x = self.conv4(x) # (batch_size, 64, num_points, k) -> (batch_size, 64, num_points, k)
x2 = x.max(dim=-1, keepdim=False)[0] # (batch_size, 64, num_points, k) -> (batch_size, 64, num_points)
x = get_graph_feature(x2, k=self.k) # (batch_size, 64, num_points) -> (batch_size, 64*2, num_points, k)
x = self.conv5(x) # (batch_size, 64*2, num_points, k) -> (batch_size, 64, num_points, k)
x3 = x.max(dim=-1, keepdim=False)[0] # (batch_size, 64, num_points, k) -> (batch_size, 64, num_points)
x = torch.cat((x1, x2, x3), dim=1) # (batch_size, 64*3, num_points)
x = self.conv6(x) # (batch_size, 64*3, num_points) -> (batch_size, self.dim_embed, num_points)
x = x.max(dim=-1, keepdim=True)[0] # (batch_size, self.dim_embed, num_points) -> (batch_size, self.dim_embed, 1)
x = x.repeat(1, 1, num_points) # (batch_size, 1024, num_points)
x = torch.cat((x, x1, x2, x3), dim=1) # (batch_size, 1024+64*3, num_points)
x = self.conv7(x) # (batch_size, 1024+64*3, num_points) -> (batch_size, 512, num_points)
x = self.conv8(x) # (batch_size, 512, num_points) -> (batch_size, 256, num_points)
x = self.conv9(x) # (batch_size, 256, num_points) -> (batch_size, num_classes, num_points)
return x
| 2.375 | 2 |
tutorials/tutorial_code/custom_debugging_info/custom_callback.py | Ming-blue/docs | 1 | 12760010 | <reponame>Ming-blue/docs
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
callback function
"""
import time
from mindspore.train.callback import Callback
from mindspore import save_checkpoint
# stop training at runtime*60 second
class StopAtTime(Callback):
"""
Args:
run_time (float): set training time
Example:
>>> StopAtTime(1)
"""
def __init__(self, run_time):
super(StopAtTime, self).__init__()
self.run_time = run_time*60
def begin(self, run_context):
cb_params = run_context.original_args()
cb_params.init_time = time.time()
def step_end(self, run_context):
cb_params = run_context.original_args()
epoch_num = cb_params.cur_epoch_num
step_num = cb_params.cur_step_num
loss = cb_params.net_outputs
cur_time = time.time()
if (cur_time - cb_params.init_time) > self.run_time:
print("epoch: ", epoch_num, " step: ", step_num, " loss: ", loss)
run_context.request_stop()
def end(self, run_context):
cb_params = run_context.original_args()
print(cb_params.list_callback)
class SaveCallback(Callback):
"""
save the maximum accuracy checkpoint
"""
def __init__(self, model, eval_dataset):
super(SaveCallback, self).__init__()
self.model = model
self.eval_dataset = eval_dataset
self.acc = 0.5
def step_end(self, run_context):
cb_params = run_context.original_args()
result = self.model.eval(self.eval_dataset)
if result['accuracy'] > self.acc:
self.acc = result['accuracy']
file_name = str(self.acc) + ".ckpt"
save_checkpoint(save_obj=cb_params.train_network, ckpt_file_name=file_name)
print("Save the maximum accuracy checkpoint,the accuracy is", self.acc)
| 2.390625 | 2 |
figures/plot_pareto.py | wdlynch/symbolic_experiments | 0 | 12760011 | <reponame>wdlynch/symbolic_experiments<gh_stars>0
# this script generates kernel density estimation plots of sensitivity indices
# for results according to cluster (hue), and category, neighborhood, and lag of input
import os
import pandas as pd
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
from matplotlib import rcParams, cycler, colors
params = {
# 'text.latex.preamble': ['\\usepackage{gensymb}'],
# 'text.usetex': True,
'font.family': 'Helvetica',
}
rcParams.update(params)
from matplotlib.lines import Line2D
def main():
sns.set_context("paper", font_scale=1.3, rc={"lines.linewidth": 0.5})
sns.set_style('white')
sns.set_palette("cividis")
marker_size=6
dir_path = os.path.dirname(os.path.realpath(__file__))
data = pd.read_csv('model_sensitivities.csv',header=0,index_col=0) # ,low_memory=False)
data_cols = ['experiment', 'model_id', 'inputs', 'training_error', 'complexity',
'test_error', 'cluster', 'category', 'lag', 'neighborhood', 'S1',
'S1_conf', 'ST', 'ST_conf']
subset = ['experiment', 'model_id', 'training_error', 'complexity',
'test_error', 'cluster',]
orders = {'cluster':[0,1,2,3,4]
}
data = data[subset]
fig, axs = plt.subplots(2,1,figsize=(5,7))
data_reg = data[data['experiment'] == 'regression']
data_reg['Complexity'] = data_reg['complexity']
data_reg['Cluster'] = data_reg['cluster']
data_reg['Training Error'] = data_reg['training_error']
data_reg['Test Error'] = data_reg['test_error']
cmap = sns.cubehelix_palette(start=1.5, rot=0.0, dark=0.5, light=0.99, as_cmap=True)
axs[0].hist2d(data_reg['Complexity'].values,
data_reg['Training Error'].values,
norm=colors.LogNorm(),
bins=30,alpha=0.5,cmap=cmap)
alpha = 0.9
colors_reg=['slategray','midnightblue','darkred','Blue','slategray']
for cluster in orders['cluster']:
x = data_reg[data_reg['Cluster'] == cluster]['Complexity'].values
y = data_reg[data_reg['Cluster'] == cluster]['Test Error'].values
axs[0].scatter(x=x,y=y,facecolors=colors_reg[cluster],edgecolors=colors_reg[cluster],marker='x',alpha=alpha,s=4)
data_class = data[data['experiment'] == 'classification']
data_class['Complexity'] = data_class['complexity']
data_class['Cluster'] = data_class['cluster']
data_class['Training Error'] = data_class['training_error']
data_class['Test Error'] = data_class['test_error']
axs[1].hist2d(data_class['Complexity'].values,
data_class['Training Error'].values,
norm=colors.LogNorm(),
bins=30,alpha=0.5,cmap=cmap)
colors_class=['slategray','Red','Blue','midnightblue','slategray']
for cluster in orders['cluster']:
x = data_class[data_class['Cluster'] == cluster]['Complexity'].values
y = data_class[data_class['Cluster'] == cluster]['Test Error'].values
axs[1].scatter(x=x,y=y,marker='x',facecolors=colors_class[cluster],edgecolors=colors_class[cluster],alpha=alpha,s=4)
axs[0].text(x= 0.95,y=0.95,s='(A) - Regression Models',ha='right',va='top',transform=axs[0].transAxes)
axs[1].text(x= 0.95,y=0.95,s='(B) - Classification Models',ha='right',va='top',transform=axs[1].transAxes)
labels_reg = ['Parsimonious','Equifinal','Overfit','Other']
labels_class = ['Dominated','Overfit','Parsimonious','Other']
legend_elements_reg = [Line2D([0], [0], marker='x', color=colors_reg[cluster], label=labels_reg[cluster-1],
markerfacecolor='none', markersize=10) for cluster in [1,3,2,4]]
legend_elements_class = [Line2D([0], [0], marker='x', color=colors_class[cluster], label=labels_class[cluster-1],
markerfacecolor='none', markersize=10) for cluster in [3,2,1,4]]
axs[0].legend(frameon=False,loc=7,handles=legend_elements_reg)
axs[1].legend(frameon=False,loc=7,handles=legend_elements_class)
axs[0].set_ylabel('Test MSE')
axs[1].set_ylabel('Test Misclassification Rate')
axs[1].set_xlabel('Complexity')
fig.tight_layout(pad=1.0)
for i,ax in enumerate(axs.flatten()):
ax.margins(x=0.1,y=0.1)
if i == 0:
ax.set_ylim(bottom=0.8,top=1.05)
else:
ax.set_ylim(bottom=0.3,top=0.8)
ax.set_xlim(left=0,right=0.85)
plt.savefig('metric_space.pdf',format='pdf',bbox_inches='tight',dpi=600,transparent=True)
exit()
if __name__ == '__main__':
main() | 2.0625 | 2 |
proj files/walk.py | ZhenoSan/DistributedFileSystem | 1 | 12760012 | import os
fileDirectoryRoot = 'root'
localfilelist=[]
def generateList():
del localfilelist[:]
for root, dirs, files in os.walk(fileDirectoryRoot):
#localfilelist.append(os.path.relpath(root,fileDirectoryRoot))
prefx = os.path.relpath(root,fileDirectoryRoot)
if (prefx != '.'):
prefx = '/'+prefx
else:
prefx = ''
for f in files:
localfilelist.append(prefx+'/'+f)
for d in dirs:
localfilelist.append(prefx+'/'+d+'/')
for items in localfilelist:
print(items)
generateList()
| 3.15625 | 3 |
ida_export/preselect/utils.py | RUB-SysSec/tropyhunter | 1 | 12760013 | import idautils
import idaapi
import idc
def instructions(start_ea, end_ea):
"""
Returns the list of instruction addresses in the given address range (including).
"""
return list(idautils.Heads(start_ea, end_ea))
def basic_blocks(func_addr):
"""
Generator that yields tuples of start and end addresses of all basic blocks in the given function.
"""
f = idaapi.get_func(func_addr)
flow = idaapi.FlowChart(f)
for block in flow:
yield block.startEA, block.endEA
def func_instructions(func_addr):
"""
Generator that yields the instruction addresses of all instructions in the given function.
"""
for bb_start, bb_end in basic_blocks(func_addr):
for instr_addr in instructions(bb_start, bb_end):
yield instr_addr
def func_last_instr(func_addr):
"""
Returns the address of the last instruction of the given function.
"""
return list(func_instructions(func_addr))[-1]
def func_mnemonics(func_addr):
"""
Generator that yields the mnemonics of all instructions in the given functions.
"""
for bb_start, bb_end in basic_blocks(func_addr):
for mnemonic in mnemonics(bb_start, bb_end):
yield mnemonic
def mnemonics(start_ea, end_ea):
"""
Generator that yields all mnemonics in the given address range (including).
"""
for instr_addr in instructions(start_ea, end_ea):
yield idc.GetMnem(instr_addr)
def count_mnemonics(start_ea, end_ea, needle_mnemonics):
"""
Counts how many times the mnemonics in needle_mnemonics occur in the given address range (including).
"""
count = 0
for mnemonic in mnemonics(start_ea, end_ea):
if mnemonic in needle_mnemonics:
count += 1
return count
| 3.1875 | 3 |
terrascript/oneandone/r.py | amlodzianowski/python-terrascript | 0 | 12760014 | # terrascript/oneandone/r.py
import terrascript
class oneandone_server(terrascript.Resource):
pass
class oneandone_firewall_policy(terrascript.Resource):
pass
class oneandone_private_network(terrascript.Resource):
pass
class oneandone_public_ip(terrascript.Resource):
pass
class oneandone_shared_storage(terrascript.Resource):
pass
class oneandone_monitoring_policy(terrascript.Resource):
pass
class oneandone_loadbalancer(terrascript.Resource):
pass
class oneandone_vpn(terrascript.Resource):
pass
class oneandone_ssh_key(terrascript.Resource):
pass
class oneandone_block_storage(terrascript.Resource):
pass
class oneandone_image(terrascript.Resource):
pass
class oneandone_baremetal(terrascript.Resource):
pass
| 1.539063 | 2 |
rigl/experimental/jax/prune_test.py | vishalbelsare/rigl | 276 | 12760015 | # coding=utf-8
# Copyright 2021 RigL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Tests for weight_symmetry.prune."""
import glob
from os import path
from absl.testing import absltest
from absl.testing import flagsaver
from rigl.experimental.jax import prune
class PruneTest(absltest.TestCase):
def test_prune_fixed_schedule(self):
"""Tests training/pruning driver with a fixed global sparsity."""
experiment_dir = self.create_tempdir().full_path
eval_flags = dict(
epochs=1,
pruning_rate=0.95,
experiment_dir=experiment_dir,
)
with flagsaver.flagsaver(**eval_flags):
prune.main([])
outfile = path.join(experiment_dir, '*', 'events.out.tfevents.*')
files = glob.glob(outfile)
self.assertTrue(len(files) == 1 and path.exists(files[0]))
def test_prune_global_pruning_schedule(self):
"""Tests training/pruning driver with a global sparsity schedule."""
experiment_dir = self.create_tempdir().full_path
eval_flags = dict(
epochs=10,
pruning_schedule='[(5, 0.33), (7, 0.66), (9, 0.95)]',
experiment_dir=experiment_dir,
)
with flagsaver.flagsaver(**eval_flags):
prune.main([])
outfile = path.join(experiment_dir, '*', 'events.out.tfevents.*')
files = glob.glob(outfile)
self.assertTrue(len(files) == 1 and path.exists(files[0]))
def test_prune_local_pruning_schedule(self):
"""Tests training/pruning driver with a single layer sparsity schedule."""
experiment_dir = self.create_tempdir().full_path
eval_flags = dict(
epochs=10,
pruning_schedule='{1:[(5, 0.33), (7, 0.66), (9, 0.95)]}',
experiment_dir=experiment_dir,
)
with flagsaver.flagsaver(**eval_flags):
prune.main([])
outfile = path.join(experiment_dir, '*', 'events.out.tfevents.*')
files = glob.glob(outfile)
self.assertTrue(len(files) == 1 and path.exists(files[0]))
if __name__ == '__main__':
absltest.main()
| 1.914063 | 2 |
supertools/subclass_detector.py | zagaran/instant-census | 1 | 12760016 | from utils.database import CHILD_TEMPLATE, DatabaseObject, DatabaseCollection
def all_children(cls):
import app # @UnusedImport to get scope of codebase
import cron # @UnusedImport to get scope of codebase
def inner(_class):
children = _class.__subclasses__()
if not children:
return []
return children + [grandchild for child in children for grandchild in inner(child)]
return inner(cls)
def all_databases():
return [child for child in all_children(DatabaseObject) if child.PATH != CHILD_TEMPLATE]
def all_collections():
return [child for child in all_children(DatabaseCollection) if child.__objtype__.__db__ != CHILD_TEMPLATE]
def all_database_paths():
return set(child.PATH for child in all_databases())
| 2.28125 | 2 |
dwh_analytic/dags/data_warehouse_prod/schema/dim_field.py | dnguyenngoc/analytic | 0 | 12760017 | class DimFieldModel:
def __init__(
self,
*kwargs,
field_key: str,
project_id: str,
name: str,
control_type: str,
default_value: str = None,
counted_character: bool,
counted_character_date_from_key: int,
counted_character_time_from_key: int,
counted_character_date_to_key: int,
counted_character_time_to_key: int,
counted_character_from_timestamp: str,
counted_character_to_timestamp: str,
is_sub_field: bool = False,
):
self.field_key = field_key
self.project_id = project_id
self.name = name
self.control_type = control_type
self.default_value = default_value
self.counted_character = counted_character
self.counted_character_date_from_key = counted_character_date_from_key
self.counted_character_time_from_key = counted_character_time_from_key
self.counted_character_date_to_key = counted_character_date_to_key
self.counted_character_time_to_key = counted_character_time_to_key
self.counted_character_from_timestamp = counted_character_from_timestamp
self.counted_character_to_timestamp = counted_character_to_timestamp
self.is_sub_field = is_sub_field | 2.234375 | 2 |
roseasy/main.py | ckrivacic/roseasy | 1 | 12760018 | <gh_stars>1-10
#!/usr/bin/env python2
"""\
RosEasy is a workspace and script manager to help organize PyRosetta projects.
It interfaces with PyRosetta and has helpful default design scripts, as
well as Python classes that make interfacing with C++ Rosetta classes
simpler.
Usage:
roseasy <command> [<args>...]
roseasy --version
roseasy --help
Arguments:
<command>
The name of the command you want to run. You only need to specify
enough of the name to be unique.
{command_table}
<args>...
The necessary arguments depend on the command being run. For more
information, pass the '--help' flag to the command you want to run.
Options:
-v, --version
Display the version of PIP that's installed.
-h, --help
Display this help message.
RosEasy's design pipeline has the following steps:
1. Define your project. This entails creating an input PDB file and preparing
it for use with rosetta, creating a restraints file that specifies your
desired geometry, creating a resfile that specifies which residues are
allowed to design, and creating a loop file that specifies where backbone
flexibility will be considered.
$ roseasy setup_workspace ...
2. Submit a design or refinement job to the cluster, or generate fragments.
$ pull_into_place setup_model_fragments ...
$ pull_into_place submit ...
3. [NOT YET IMPLEMENTED] Filter out models that don't meet your quality criteria.
$ pull_into_place pick_models ...
"""
import sys, re
from klab import scripting
import docopt
from . import __version__
def make_command_table(entry_points):
"""
Return a nicely formatted table of all the PIP commands installed on this
system to incorporate into the help text. The table will have two columns.
The first will list the commands that comprise the main pipeline and the
second will list all the other miscellaneous helper functions.
"""
import itertools
# Split every command installed on the system into two categories: those
# that are part of the main pipeline and those that are just utilities or
# helpers. Pipeline scripts start with numbers, helper scripts don't.
pipeline_commands = []
helper_commands = []
for command in sorted(entry_points):
if re.match('\d+_', command):
pipeline_commands.append(command)
else:
helper_commands.append(command)
# Figure out what the longest pipeline command is, so we know how much to
# indent the helper commands.
longest_pipeline_command = 0
for command in pipeline_commands:
longest_pipeline_command = max(len(command), longest_pipeline_command)
# Make the table.
rows = []
columns = itertools.zip_longest(
pipeline_commands, helper_commands, fillvalue='')
for commands in columns:
#row = ' {0[0]:{1}} {0[1]}'.format(
row = ' {0[0]} {0[1]}'.format(
commands, longest_pipeline_command)
rows.append(row)
return '\n'.join(rows)
def did_you_mean(unknown_command, entry_points):
"""
Return the command with the name most similar to what the user typed. This
is used to suggest a correct command when the user types an illegal
command.
"""
from difflib import SequenceMatcher
similarity = lambda x: SequenceMatcher(None, x, unknown_command).ratio()
did_you_mean = sorted(entry_points, key=similarity, reverse=True)
return did_you_mean[0]
@scripting.catch_and_print_errors()
def main():
from pkg_resources import iter_entry_points, DistributionNotFound
# Load every PIP command installed on this system. This is cool because by
# using ``pkg_resources``, other packages can add commands to PIP!
entry_points = {}
for entry_point in iter_entry_points(group='roseasy.commands'):
entry_points[entry_point.name] = entry_point
# Read the command the user typed on the command line.
command_table = make_command_table(entry_points)
arguments = docopt.docopt(
__doc__.format(**locals()),
version=__version__,
options_first=True,
)
command_name = arguments['<command>']
# Find all the commands that match what the user typed.
matching_entry_points = [
name for name in entry_points
if name.startswith(command_name)]
# If no commands match, print out an error and suggest a command the user
# might have been trying to type.
if len(matching_entry_points) == 0:
scripting.print_error_and_die("""\
Unknown command '{0}'. Did you mean:
$ roseasy {1} {2}
""", command_name, did_you_mean(command_name, entry_points), ' '.join(arguments['<args>']))
# If two or more commands match, print all the ambiguous commands and tell
# the user to be more specific.
elif len(matching_entry_points) > 1:
message = "Command '{0}' is ambiguous. Did you mean:\n\n"
for matching_entry_point in matching_entry_points:
message += " $ roseasy {0} {{1}}\n".format(matching_entry_point)
message += '\n'
scripting.print_error_and_die(message, command_name, ' '.join(arguments['<args>']))
# If a unique command was given, make sure all of its dependencies are
# installed (because the dependencies for the analysis scripts are not by
# default). If there is a problem, suggest how to fix it. Otherwise, run
# the command.
else:
entry_point = entry_points[matching_entry_points[0]]
try:
entry_point.require()
except DistributionNotFound as error:
scripting.print_error_and_die("""\
The '{0}' command requires the '{1.req}' package.
The analysis scripts have a number of dependencies that aren't installed by
default, because they would make PIP needlessly hard to install on clusters.
You can install all of these dependencies at once with the following command:
$ pip install 'roseasy [analysis]'
""".format(command_name, error))
sys.argv = sys.argv[:1] + matching_entry_points + arguments['<args>']
entry_point.load()()
| 2.265625 | 2 |
core/middleware/checks/crc.py | hirenchalodiya1/replication-of-icp-ip-model | 1 | 12760019 | <reponame>hirenchalodiya1/replication-of-icp-ip-model
import settings
from core.utils import log
from core.utils.mod2div import mod2div
# Class used to apply crc to the data
# by appending remainder of modular division
# at the end of data and returning
class CRC:
def __init__(self, data):
self.data = data
self.key = settings.CRC_KEY
self.remainder = ''
self.divide()
def divide(self):
l_key = len(self.key)
# Appends n-1 zeroes at end of data
appended_data = self.data + '0' * (l_key - 1)
self.remainder = mod2div(appended_data, self.key)
def codeword(self):
return self.data + self.remainder
def encode(self):
log("Encoding data...", 4)
log("Data: %s" % self.data, 4)
log("Remainder: %s" % self.remainder, 3)
return self.data + self.remainder
def decode(self):
zeroes = "0" * (len(self.key) - 1)
log("Decoding data...", 4)
log("Data: %s" % self.data, 4)
log("Remainder: %s" % self.remainder, 3)
if self.remainder == zeroes:
return self.data[:-(len(self.key) - 1)]
else:
return ""
| 2.703125 | 3 |
run.py | ChristopherKlix/logic_gates | 0 | 12760020 | <reponame>ChristopherKlix/logic_gates
# dependencies for run.py
# bundled in import file for cleaner code
from imports import *
'''
If the script is run w/o additional flags (args) in
the command-line, then the main function acts as
a menu.
color_print() is simply a custom print() that allows for easy
colored print statements.
Change the color to 'default' if you prefer the default
print color of your command-line. Or simply change
it to the default print() function.
help_custom() is called that way, bc help()
is a Python reserved keyword (like print()).
'''
def main():
color_print('blue', '[try] [compare] [add] [subtract] [explain] [help] [quit]')
program = input('program ')
if re.search(r'^compare$', program):
compare()
elif re.search(r'^try$', program):
try_custom()
elif re.search(r'^add$', program):
out = add_numbers()
print(f'out: {out}', end='\n\n')
elif re.search(r'^subtract$', program):
out = subtract_numbers()
print(f'out: {out}', end='\n\n')
elif re.search(r'^explain$', program):
explain()
elif re.search(r'^help$', program):
help_custom()
main()
elif re.search(r'^quit$', program):
print('\n Thanks for using logic_gates!', end='\n\n')
exit(0)
else:
print('no such program')
main()
if __name__ == "__main__":
if len(argv) == 2:
if re.search(r'^compare$', argv[1]):
compare()
elif re.search(r'^run$', argv[1]):
try_custom()
elif re.search(r'^add$', argv[1]):
out = add_numbers()
print(f'out: {out}', end='\n\n')
elif re.search(r'^subtract$', argv[1]):
out = subtract_numbers()
print(f'out: {out}', end='\n\n')
else:
print(f'no argument called [{argv[1]}].')
elif len(argv) > 2:
color_print('red', 'too many arguments parsed')
else:
main()
| 3.875 | 4 |
stage/configuration/test_whole_file_transformer_processor.py | Sentienz/datacollector-tests | 0 | 12760021 | import pytest
from streamsets.testframework.decorators import stub
@stub
def test_buffer_size_in_bytes(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'job_type': 'AVRO_PARQUET'}])
def test_compression_codec(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_data_time_zone(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'job_type': 'AVRO_PARQUET'}])
def test_dictionary_page_size(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_files_prefix(sdc_builder, sdc_executor):
pass
@stub
def test_files_suffix(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'job_type': 'AVRO_PARQUET'}])
def test_job_type(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'job_type': 'AVRO_PARQUET'}])
def test_max_padding_size(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'on_record_error': 'DISCARD'},
{'on_record_error': 'STOP_PIPELINE'},
{'on_record_error': 'TO_ERROR'}])
def test_on_record_error(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'job_type': 'AVRO_PARQUET'}])
def test_page_size(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_preconditions(sdc_builder, sdc_executor):
pass
@stub
def test_rate_per_second(sdc_builder, sdc_executor):
pass
@stub
def test_required_fields(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'job_type': 'AVRO_PARQUET'}])
def test_row_group_size(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_temporary_file_directory(sdc_builder, sdc_executor):
pass
| 1.96875 | 2 |
app/__init__.py | fredpan/Prosopagnosia_Web_Server | 0 | 12760022 | <gh_stars>0
# Copyright 2020 EraO Prosopagnosia Helper Dev Team, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>
#
# Supervised by Prof. <NAME> (http://www.eecg.toronto.edu/~mann/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask import Flask, session
webapp = Flask(__name__)
webapp.secret_key = '\<KEY>'
from app import RouteMapper
from app import FileUploader
from app import PicIdentify
from app import AccountManagment
from app.api import Apis
# set the max size of img allowed
webapp.config['MAX_CONTENT_LENGTH'] = 1024 * 1024 * 5
@webapp.before_request
def make_session_permanent():
session.permanent = True
| 1.65625 | 2 |
scripts/dashboard_simulator.py | zabrewer/adaptive-policy-ise-sync | 6 | 12760023 | from django.views.decorators.csrf import csrf_exempt
import string
import random
import json
import os
import datetime
import time
from random import randint
from django.http import HttpResponseBadRequest
from .base_simulator import handle_request
from scripts.dblog import append_log, db_log
first_db_record = 0
def write_file(out_filename, content):
with open(os.path.join("scripts", out_filename), 'w') as out_file:
out_file.write(content)
def read_file(in_filename):
with open(os.path.join("scripts", in_filename), 'r+') as in_file:
return in_file.read().splitlines()
def read_file_all(in_filename):
with open(os.path.join("scripts", in_filename), 'r+') as in_file:
return in_file.read()
def read_json_file(in_filename, log):
fail_count = 0
while True:
try:
r = read_file_all(in_filename)
out = json.loads(r)
return out
except Exception as e:
fail_count += 1
time.sleep(1)
append_log(log, "dashboard_monitor::read_json_file::", fail_count, e)
def random_words(size):
out = []
lst = read_file("words.txt")
for s in range(0, size):
o = random.choice(lst)
out.append(o)
return out
def string_num_generator(size):
chars = string.digits
return ''.join(random.choice(chars) for _ in range(size))
def string_generator(size):
chars = string.digits + string.ascii_uppercase + string.ascii_lowercase
return ''.join(random.choice(chars) for _ in range(size))
def get_rules():
rules = []
r_count = randint(2, 6)
has_anyany = False
for r in range(0, r_count):
rule = {}
s_choice = random.choice([0, 1, 2, 3])
if s_choice == 1:
srclist = []
s_len = randint(2, 10)
for s in range(0, s_len):
srclist.append(str(randint(1, 65535)))
src = ",".join(srclist)
elif s_choice == 2:
s_start = randint(1, 65500)
s_end = randint(s_start, 65535)
src = str(s_start) + "-" + str(s_end)
elif s_choice == 3:
s_port = randint(1, 65500)
src = str(s_port)
else:
src = "any"
d_choice = random.choice([0, 1, 2, 3])
if d_choice == 1:
dstlist = []
d_len = randint(2, 10)
for d in range(0, d_len):
dstlist.append(str(randint(1, 65535)))
dst = ",".join(dstlist)
elif d_choice == 2:
d_start = randint(1, 65500)
d_end = randint(d_start, 65535)
dst = str(d_start) + "-" + str(d_end)
elif d_choice == 3:
d_port = randint(1, 65500)
dst = str(d_port)
else:
dst = "any"
rule["policy"] = random.choice(["allow", "deny"])
if has_anyany:
rule["protocol"] = random.choice(["tcp", "udp"])
else:
rule["protocol"] = random.choice(["any", "tcp", "udp", "icmp"])
if rule["protocol"] == "icmp" or rule["protocol"] == "any":
has_anyany = True
rule["srcPort"] = "any"
rule["dstPort"] = "any"
else:
rule["srcPort"] = src
rule["dstPort"] = dst
rules.append(rule)
return rules
def run(orgs, tags, acls, policies):
t = int(time.time() * 1000.0)
random.seed(((t & 0xff000000) >> 24) +
((t & 0x00ff0000) >> 8) +
((t & 0x0000ff00) << 8) +
((t & 0x000000ff) << 24))
neworgs = []
newtags = {}
newacls = {}
newpolicies = {}
isotime = datetime.datetime.now().isoformat()
for o in range(0, int(orgs)):
w = random_words(2)
org_name = (w[0] + " " + w[1]).title()
org_id = string_num_generator(18)
org_code = string_generator(7)
org_url = "{{url}}/o/" + org_code + "/manage/organization/overview"
neworgs.append({"id": org_id, "name": org_name, "url": org_url})
used_tags = [0, 2]
used_names = ["Unknown", "MerakiInternal"]
newtags[org_id] = []
t0_desc = "Unknown group applies when a policy is specified for unsuccessful group classification"
t2_desc = "MerakiInternal group is used by Meraki devices for internal and dashboard communication"
newtags[org_id].append({"groupId": 0 + first_db_record, "value": 0, "name": "Unknown", "description": t0_desc,
"versionNum": 0, "networkObjectId": None, "createdAt": isotime,
"updatedAt": isotime})
newtags[org_id].append({"groupId": 1 + first_db_record, "value": 2, "name": "MerakiInternal",
"description": t2_desc, "versionNum": 0, "networkObjectId": None,
"createdAt": isotime, "updatedAt": isotime})
for t in range(0, int(tags)):
while True:
tw = random_words(6)
tag_name = (tw[0] + " " + tw[1]).title()
if tag_name not in used_names:
used_names.append(tag_name)
break
tag_desc = (tw[2] + " " + tw[3] + " " + tw[4] + " " + tw[5]).title()
while True:
tag_num = randint(3, 65529)
if tag_num not in used_tags:
break
used_tags.append(tag_num)
newtags[org_id].append({"groupId": t + 2, "value": tag_num, "name": tag_name, "description": tag_desc,
"versionNum": 1, "networkObjectId": None, "createdAt": isotime,
"updatedAt": isotime})
newacls[org_id] = []
for a in range(0, int(acls)):
while True:
tw = random_words(6)
acl_name = (tw[0] + " " + tw[1]).title()
if acl_name not in used_names:
used_names.append(acl_name)
break
acl_desc = (tw[2] + " " + tw[3] + " " + tw[4] + " " + tw[5]).title()
acl_ver = random.choice(["ipv4", "ipv6", "agnostic"])
acl_rules = get_rules()
newacls[org_id].append({"aclId": a + first_db_record, "name": acl_name, "description": acl_desc,
"ipVersion": acl_ver, "rules": acl_rules, "versionNum": 1, "createdAt": isotime,
"updatedAt": isotime})
newpolicies[org_id] = []
for b in range(0, int(policies)):
while True:
tw = random_words(6)
pol_name = (tw[0] + " " + tw[1]).title()
if pol_name not in used_names:
used_names.append(pol_name)
break
pol_desc = (tw[2] + " " + tw[3] + " " + tw[4] + " " + tw[5]).title()
pol_catch = random.choice(["global", "deny all", "allow all"])
pol_acls = []
apply_acl = random.choice([True, False])
if apply_acl:
for x in range(0, randint(2, 9)):
newpol = random.choice(newacls[org_id])["aclId"]
if newpol not in pol_acls:
pol_acls.append(newpol)
pol_src = random.choice(newtags[org_id])["groupId"]
pol_dst = random.choice(newtags[org_id])["groupId"]
newpolicies[org_id].append({"name": pol_name, "description": pol_desc, "monitorModeEnabled": False,
"versionNum": 1, "catchAllRule": pol_catch, "bindingEnabled": True,
"aclIds": pol_acls, "updatedAt": isotime, "srcGroupId": pol_src,
"dstGroupId": pol_dst})
write_file("orgs.json", json.dumps(neworgs, indent=4))
write_file("groups.json", json.dumps(newtags, indent=4))
write_file("acls.json", json.dumps(newacls, indent=4))
write_file("bindings.json", json.dumps(newpolicies, indent=4))
@csrf_exempt
def parse_url(request):
log = []
baseurl = "/".join(request.build_absolute_uri().split("/")[:3])
p = request.path.replace("/meraki/api/v1/organizations/", "").replace("/meraki/api/v1/organizations", "")
arr = p.split("/")
isotime = datetime.datetime.now().isoformat()
org_id = arr[0]
fixedvals = {"organizations": {"id": "{{id-num:18}}", "url": "{{url}}/o/{{id-mix:7}}/manage/organization/overview"},
"groups": {"groupId": "{{length}}", "versionNum": 1, "createdAt": isotime, "updatedAt": isotime},
"acls": {"aclId": "{{length}}", "versionNum": 1, "createdAt": isotime, "updatedAt": isotime},
"bindings": {"versionNum": 1, "updatedAt": isotime}}
postvals = {"organizations": {"name": None},
"groups": {"name": None, "description": None, "value": None, "networkObjectId": None},
"acls": {"name": None, "description": None, "ipVersion": None, "rules": None},
"bindings": {"srcGroupId": None, "dstGroupId": None, "name": None, "description": None, "aclIds": None,
"catchAllRule": None, "bindingEnabled": None, "monitorModeEnabled": None}}
info = {"organizations": {"id": "id", "unique": [{"id": []}]},
"groups": {"id": "groupId", "unique": [{"value": [], "groupId": []}]},
"acls": {"id": "aclId", "unique": [{"name": [], "aclId": []}]},
"bindings": {"none_as_delete_key": "aclIds", "put_unique": ["srcGroupId", "dstGroupId"],
"unique_results": []}}
append_log(log, "dashboard_simulator::", request.path)
ret = None
try:
if len(arr) == 1:
file_type = "orgs.json"
full_dataset = []
dataset = read_json_file(file_type, log)
if arr[0] == "":
elem_id = None
else:
elem_id = arr[0]
endpoint = "organizations"
else:
file_type = arr[2] + ".json"
full_dataset = read_json_file(file_type, log)
dataset = full_dataset.pop(org_id, [])
if len(arr) == 3 or request.method == "POST":
elem_id = None
else:
elem_id = arr[3]
endpoint = arr[2]
if endpoint == "bindings" and (request.method == "POST" or request.method == "DELETE"):
append_log(log, "dashboard_monitor::bindings::Unsupported Method")
db_log("dashboard_simulator", log)
return HttpResponseBadRequest("Unsupported Method")
if request.body:
jd = json.loads(request.body)
else:
jd = None
updated_data, ret = handle_request(request.method, jd, baseurl, endpoint, elem_id, dataset, fixedvals, postvals,
info)
if updated_data:
if isinstance(full_dataset, list):
write_file(file_type, json.dumps(full_dataset + [updated_data], indent=4))
else:
full_dataset[org_id] = updated_data
write_file(file_type, json.dumps(full_dataset, indent=4))
except Exception as e:
append_log(log, "dashboard_simulator::Exception.", e)
db_log("dashboard_simulator", log)
return ret
| 2.09375 | 2 |
course/admin.py | khanhduy8/relate | 284 | 12760024 | <filename>course/admin.py
__copyright__ = "Copyright (C) 2014 <NAME>"
__license__ = """
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from django.utils.translation import (
gettext_lazy as _, pgettext)
from django.contrib import admin
from course.models import (
Course, Event,
ParticipationTag,
Participation, ParticipationPermission,
ParticipationRole, ParticipationRolePermission,
ParticipationPreapproval,
AuthenticationToken,
InstantFlowRequest,
FlowSession, FlowPageData,
FlowPageVisit, FlowPageVisitGrade,
FlowRuleException,
GradingOpportunity, GradeChange, InstantMessage,
Exam, ExamTicket)
from django import forms
from relate.utils import string_concat
from course.enrollment import (approve_enrollment, deny_enrollment)
from course.constants import (
participation_permission as pperm,
exam_ticket_states
)
from typing import Any, Text, Tuple # noqa
# {{{ permission helpers
def _filter_courses_for_user(queryset, user):
if user.is_superuser:
return queryset
z = queryset.filter(
participations__user=user,
participations__roles__permissions__permission=pperm.use_admin_interface)
return z
def _filter_course_linked_obj_for_user(queryset, user):
if user.is_superuser:
return queryset
return queryset.filter(
course__participations__user=user,
course__participations__roles__permissions__permission # noqa
=pperm.use_admin_interface
)
def _filter_participation_linked_obj_for_user(queryset, user):
if user.is_superuser:
return queryset
return queryset.filter(
participation__course__participations__user=user,
participation__course__participations__roles__permissions__permission # noqa
=pperm.use_admin_interface)
# }}}
# {{{ list filter helper
def _filter_related_only(filter_arg: str) -> Tuple[str, Any]:
return (filter_arg, admin.RelatedOnlyFieldListFilter)
# }}}
# {{{ course
class UnsafePasswordInput(forms.TextInput):
# This sends passwords back to the user--not ideal, but OK for the XMPP
# password.
input_type = "password"
class CourseAdminForm(forms.ModelForm):
class Meta:
model = Course
widgets = {
"course_xmpp_password": UnsafePasswordInput
}
exclude = ()
class CourseAdmin(admin.ModelAdmin):
list_display = (
"identifier",
"number",
"name",
"time_period",
"start_date",
"end_date",
"hidden",
"listed",
"accepts_enrollment")
list_editable = (
"number",
"name",
"time_period",
"start_date",
"end_date",
"hidden",
"listed",
"accepts_enrollment")
list_filter = (
"number",
"time_period",
"hidden",
"listed",
"accepts_enrollment")
date_hierarchy = "start_date"
search_fields = (
"identifier",
"number",
"name",
"time_period")
form = CourseAdminForm
save_on_top = True
# {{{ permissions
def has_add_permission(self, request):
# These are created only through the course creation form.
return False
def get_queryset(self, request):
qs = super().get_queryset(request)
return _filter_courses_for_user(qs, request.user)
# }}}
admin.site.register(Course, CourseAdmin)
# }}}
# {{{ events
class EventAdmin(admin.ModelAdmin):
list_display = (
"course",
"kind",
"ordinal",
"time",
"end_time",
"shown_in_calendar")
list_filter = (_filter_related_only("course"), "kind", "shown_in_calendar")
date_hierarchy = "time"
search_fields = (
"course__identifier",
"kind",
)
def __unicode__(self): # pragma: no cover # not used
return "{}{} in {}".format(
self.kind,
" (%s)" % str(self.ordinal) if self.ordinal is not None else "",
self.course)
__str__ = __unicode__
list_editable = ("ordinal", "time", "end_time", "shown_in_calendar")
# {{{ permissions
def get_queryset(self, request):
qs = super().get_queryset(request)
return _filter_course_linked_obj_for_user(qs, request.user)
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "course":
kwargs["queryset"] = _filter_courses_for_user(
Course.objects, request.user)
return super().formfield_for_foreignkey(
db_field, request, **kwargs)
# }}}
admin.site.register(Event, EventAdmin)
# }}}
# {{{ participation tags
class ParticipationTagAdmin(admin.ModelAdmin):
list_filter = (_filter_related_only("course"),)
# {{{ permissions
def get_queryset(self, request):
qs = super().get_queryset(request)
return _filter_course_linked_obj_for_user(qs, request.user)
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "course":
kwargs["queryset"] = _filter_courses_for_user(
Course.objects, request.user)
return super().formfield_for_foreignkey(
db_field, request, **kwargs)
# }}}
admin.site.register(ParticipationTag, ParticipationTagAdmin)
# }}}
# {{{ participations
class ParticipationRolePermissionInline(admin.TabularInline):
model = ParticipationRolePermission
extra = 3
class ParticipationRoleAdmin(admin.ModelAdmin):
inlines = (ParticipationRolePermissionInline,)
list_filter = (_filter_related_only("course"), "identifier")
def get_queryset(self, request):
qs = super().get_queryset(request)
if request.user.is_superuser:
return qs
return _filter_course_linked_obj_for_user(qs, request.user)
admin.site.register(ParticipationRole, ParticipationRoleAdmin)
class ParticipationPermissionInline(admin.TabularInline):
model = ParticipationPermission
extra = 3
class ParticipationForm(forms.ModelForm):
class Meta:
model = Participation
exclude = ("role",)
def clean(self):
super().clean()
for tag in self.cleaned_data.get("tags", []):
if tag.course != self.cleaned_data.get("course"):
from django.core.exceptions import ValidationError
raise ValidationError(
{"tags": _("Tags must belong to same course as "
"participation.")})
for role in self.cleaned_data.get("roles", []):
if role.course != self.cleaned_data.get("course"):
from django.core.exceptions import ValidationError
raise ValidationError(
{"roles": _("Role must belong to same course as "
"participation.")})
class ParticipationAdmin(admin.ModelAdmin):
form = ParticipationForm
def get_roles(self, obj):
return ", ".join(str(role.name) for role in obj.roles.all())
get_roles.short_description = _("Roles") # type: ignore
def get_tags(self, obj):
return ", ".join(str(tag.name) for tag in obj.tags.all())
get_tags.short_description = _("Tags") # type: ignore
# Fixme: This can be misleading when Non-superuser click on the
# link of a user who also attend other courses.
def get_user(self, obj):
from django.urls import reverse
from django.conf import settings
from django.utils.html import mark_safe
return mark_safe(string_concat(
"<a href='%(link)s'>", "%(user_fullname)s",
"</a>"
) % {
"link": reverse(
"admin:%s_change"
% settings.AUTH_USER_MODEL.replace(".", "_")
.lower(),
args=(obj.user.id,)),
"user_fullname": obj.user.get_full_name(
force_verbose_blank=True),
})
get_user.short_description = pgettext("real name of a user", "Name") # type:ignore # noqa
get_user.admin_order_field = "user__last_name" # type: ignore
get_user.allow_tags = True # type: ignore
list_display = (
"user",
"get_user",
"course",
"get_roles",
"status",
"get_tags",
)
def get_list_filter(self, request):
if request is not None and request.user.is_superuser:
return ("course",
"roles__name",
"status",
"tags")
return (_filter_related_only("course"),
_filter_related_only("roles"),
"status",
_filter_related_only("tags"))
raw_id_fields = ("user",)
filter_horizontal = ("tags", "roles",)
search_fields = (
"course__identifier",
"user__username",
"user__first_name",
"user__last_name",
)
actions = [approve_enrollment, deny_enrollment]
inlines = (ParticipationPermissionInline,)
save_on_top = True
# {{{ permissions
def get_queryset(self, request):
qs = super().get_queryset(request)
return _filter_course_linked_obj_for_user(qs, request.user)
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "course":
kwargs["queryset"] = _filter_courses_for_user(
Course.objects, request.user)
# Fixme: This seems not to be not reachable
if db_field.name == "tags":
kwargs["queryset"] = _filter_course_linked_obj_for_user(
ParticipationTag.objects, request.user)
return super().formfield_for_foreignkey(
db_field, request, **kwargs)
# }}}
admin.site.register(Participation, ParticipationAdmin)
class ParticipationPreapprovalAdmin(admin.ModelAdmin):
def get_roles(self, obj):
return ", ".join(str(role.name) for role in obj.roles.all())
get_roles.short_description = _("Roles") # type: ignore
list_display = ("email", "institutional_id", "course", "get_roles",
"creation_time", "creator")
list_filter = (_filter_related_only("course"), _filter_related_only("roles"))
search_fields = (
"email", "institutional_id",
)
# {{{ permissions
def get_queryset(self, request):
qs = super().get_queryset(request)
if request.user.is_superuser:
return qs
return _filter_course_linked_obj_for_user(qs, request.user)
exclude = ("creator", "creation_time", "role")
def save_model(self, request, obj, form, change):
obj.creator = request.user
obj.save()
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "course":
kwargs["queryset"] = _filter_courses_for_user(
Course.objects, request.user)
return super().formfield_for_foreignkey(
db_field, request, **kwargs)
# }}}
admin.site.register(ParticipationPreapproval, ParticipationPreapprovalAdmin)
# }}}
class AuthenticationTokenAdmin(admin.ModelAdmin):
list_display = ("id", "participation", "restrict_to_participation_role",
"description", "valid_until", "revocation_time")
date_hierarchy = "creation_time"
search_fields = (
"id", "description", "participation__user__username"
)
admin.site.register(AuthenticationToken, AuthenticationTokenAdmin)
class InstantFlowRequestAdmin(admin.ModelAdmin):
list_display = ("course", "flow_id", "start_time", "end_time", "cancelled")
list_filter = (_filter_related_only("course"),)
date_hierarchy = "start_time"
search_fields = (
"email",
)
admin.site.register(InstantFlowRequest, InstantFlowRequestAdmin)
# {{{ flow sessions
class FlowPageDataInline(admin.TabularInline):
model = FlowPageData
extra = 0
class FlowSessionAdmin(admin.ModelAdmin):
def get_participant(self, obj):
if obj.participation is None:
return None
return obj.participation.user
get_participant.short_description = _("Participant") # type: ignore
get_participant.admin_order_field = "participation__user" # type: ignore
search_fields = (
"=id",
"flow_id",
"access_rules_tag",
"participation__user__username",
"participation__user__first_name",
"participation__user__last_name",
"user__username",
"user__first_name",
"user__last_name",
)
list_display = (
"id",
"flow_id",
"get_participant",
"course",
"start_time",
"completion_time",
"access_rules_tag",
"in_progress",
#"expiration_mode",
)
list_display_links = (
"flow_id",
"get_participant",
)
date_hierarchy = "start_time"
list_filter = (
_filter_related_only("course"),
"flow_id",
"in_progress",
"access_rules_tag",
"expiration_mode",
)
inlines = (FlowPageDataInline,)
raw_id_fields = ("participation", "user")
save_on_top = True
# {{{ permissions
def has_add_permission(self, request):
# These are only created automatically.
return False
def get_queryset(self, request):
qs = super().get_queryset(request)
return _filter_course_linked_obj_for_user(qs, request.user)
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "course":
kwargs["queryset"] = _filter_courses_for_user(
Course.objects, request.user)
return super().formfield_for_foreignkey(
db_field, request, **kwargs)
# }}}
admin.site.register(FlowSession, FlowSessionAdmin)
# }}}
# {{{ flow page visit
class FlowPageVisitGradeInline(admin.TabularInline):
model = FlowPageVisitGrade
extra = 0
class HasAnswerListFilter(admin.SimpleListFilter):
title = "has answer"
parameter_name = "has_answer"
def lookups(self, request, model_admin):
return (
("y", _("Yes")),
("n", _("No")),
)
def queryset(self, request, queryset):
if self.value() is None:
return queryset
return queryset.filter(answer__isnull=self.value() != "y")
class FlowIdListFilter(admin.SimpleListFilter):
"""
This is only necessary when flow_id is only accessible by FlowSession, which is
a ForeignKey in the model
"""
title = _("Flow ID")
parameter_name = "flow_id"
def lookups(self, request, model_admin):
qs = model_admin.get_queryset(request)
if not request.user.is_superuser:
qs = qs.filter(
flow_session__course__participations__user=request.user,
flow_session__course__participations__roles__permissions__permission # noqa
=pperm.use_admin_interface)
flow_ids = qs.values_list("flow_session__flow_id", flat=True).distinct()
return zip(flow_ids, flow_ids)
def queryset(self, request, queryset):
if self.value():
return queryset.filter(flow_session__flow_id=self.value())
else:
return queryset
class FlowPageVisitAdmin(admin.ModelAdmin):
def get_course(self, obj):
return obj.flow_session.course
get_course.short_description = _("Course") # type: ignore
get_course.admin_order_field = "flow_session__course" # type: ignore
def get_flow_id(self, obj):
return obj.flow_session.flow_id
get_flow_id.short_description = _("Flow ID") # type: ignore
get_flow_id.admin_order_field = "flow_session__flow_id" # type: ignore
def get_page_id(self, obj):
if obj.page_data.page_ordinal is None:
return string_concat("%s/%s (", _("not in use"), ")") % (
obj.page_data.group_id,
obj.page_data.page_id)
else:
return "{}/{} ({})".format(
obj.page_data.group_id,
obj.page_data.page_id,
obj.page_data.page_ordinal)
get_page_id.short_description = _("Page ID") # type: ignore
get_page_id.admin_order_field = "page_data__page_id" # type: ignore
def get_participant(self, obj):
if obj.flow_session.participation:
return obj.flow_session.participation.user
else:
return string_concat("(", _("anonymous"), ")")
get_participant.short_description = _("Owner") # type: ignore
get_participant.admin_order_field = "flow_session__participation" # type: ignore
def get_answer_is_null(self, obj):
return obj.answer is not None
get_answer_is_null.short_description = _("Has answer") # type: ignore
get_answer_is_null.boolean = True # type: ignore
def get_flow_session_id(self, obj):
return obj.flow_session.id
get_flow_session_id.short_description = _("Flow Session ID") # type: ignore
get_flow_session_id.admin_order_field = "flow_session__id" # type: ignore
list_filter = (
HasAnswerListFilter,
"is_submitted_answer",
"is_synthetic",
_filter_related_only("flow_session__participation__course"),
FlowIdListFilter,
)
date_hierarchy = "visit_time"
list_display = (
"id",
"get_course",
"get_flow_id",
"get_page_id",
"get_participant",
"get_flow_session_id",
"visit_time",
"get_answer_is_null",
"is_submitted_answer",
"is_synthetic",
"user",
"impersonated_by",
)
list_display_links = (
"id",
)
search_fields = (
"=id",
"=flow_session__id",
"flow_session__flow_id",
"page_data__group_id",
"page_data__page_id",
"flow_session__participation__user__username",
"flow_session__participation__user__first_name",
"flow_session__participation__user__last_name",
)
raw_id_fields = ("flow_session", "page_data")
inlines = (FlowPageVisitGradeInline,)
save_on_top = True
# {{{ permissions
def has_add_permission(self, request):
# These are created only automatically.
return False
def get_queryset(self, request):
qs = super().get_queryset(request)
if request.user.is_superuser:
return qs
return qs.filter(
flow_session__course__participations__user=request.user,
flow_session__course__participations__roles__permissions__permission # noqa
=pperm.use_admin_interface)
# }}}
admin.site.register(FlowPageVisit, FlowPageVisitAdmin)
# }}}
# {{{ flow access
class FlowRuleExceptionAdmin(admin.ModelAdmin):
def get_course(self, obj):
return obj.participation.course
get_course.short_description = _("Course") # type: ignore
get_course.admin_order_field = "participation__course" # type: ignore
def get_participant(self, obj):
return obj.participation.user
get_participant.short_description = _("Participant") # type: ignore
get_participant.admin_order_field = "participation__user" # type: ignore
ordering = ("-creation_time",)
search_fields = (
"flow_id",
"participation__user__username",
"participation__user__first_name",
"participation__user__last_name",
"comment",
)
list_display = (
"get_participant",
"get_course",
"flow_id",
"kind",
"expiration",
"creation_time",
)
list_display_links = (
"get_participant",
"flow_id",
)
list_filter = (
_filter_related_only("participation__course"),
"flow_id",
"kind",
)
date_hierarchy = "creation_time"
raw_id_fields = ("participation",)
# {{{ permissions
def has_add_permission(self, request):
# These are only created automatically.
return False
def get_queryset(self, request):
qs = super().get_queryset(request)
return _filter_participation_linked_obj_for_user(qs, request.user)
exclude = ("creator", "creation_time")
def save_model(self, request, obj, form, change): # pragma: no cover
# This won't work since it's not allowed to add
obj.creator = request.user
obj.save()
# }}}
admin.site.register(FlowRuleException, FlowRuleExceptionAdmin)
# }}}
# {{{ grading
class GradingOpportunityAdmin(admin.ModelAdmin):
list_display = (
"name",
"course",
"identifier",
"due_time",
"shown_in_grade_book",
"shown_in_participant_grade_book",
)
list_filter = (
_filter_related_only("course"),
"shown_in_grade_book",
"shown_in_participant_grade_book",
)
list_editable = (
"shown_in_grade_book",
"shown_in_participant_grade_book",
)
# {{{ permissions
exclude = ("creation_time",)
def get_queryset(self, request):
qs = super().get_queryset(request)
return _filter_course_linked_obj_for_user(qs, request.user)
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "course":
kwargs["queryset"] = _filter_courses_for_user(
Course.objects, request.user)
return super().formfield_for_foreignkey(
db_field, request, **kwargs)
# }}}
admin.site.register(GradingOpportunity, GradingOpportunityAdmin)
class GradeChangeAdmin(admin.ModelAdmin):
def get_course(self, obj):
return obj.participation.course
get_course.short_description = _("Course") # type: ignore
get_course.admin_order_field = "participation__course" # type: ignore
def get_opportunity(self, obj):
return obj.opportunity.name
get_opportunity.short_description = _("Opportunity") # type: ignore
get_opportunity.admin_order_field = "opportunity" # type: ignore
def get_participant(self, obj):
return obj.participation.user
get_participant.short_description = _("Participant") # type: ignore
get_participant.admin_order_field = "participation__user" # type: ignore
def get_percentage(self, obj):
if obj.points is None or obj.max_points is None:
return None
else:
return round(100*obj.points/obj.max_points)
get_percentage.short_description = "%" # type: ignore
list_display = (
"get_opportunity",
"get_participant",
"get_course",
"state",
"points",
"max_points",
"get_percentage",
"attempt_id",
"grade_time",
)
list_display_links = (
"get_opportunity",
"get_participant",
)
date_hierarchy = "grade_time"
search_fields = (
"opportunity__name",
"opportunity__flow_id",
"opportunity__identifier",
"participation__user__username",
"participation__user__first_name",
"participation__user__last_name",
"attempt_id",
)
list_filter = (
_filter_related_only("opportunity__course"),
_filter_related_only("opportunity"),
"state",
)
raw_id_fields = ("participation", "flow_session", "opportunity")
# {{{ permission
def get_queryset(self, request):
qs = super().get_queryset(request)
return _filter_participation_linked_obj_for_user(qs, request.user)
exclude = ("creator", "grade_time")
def save_model(self, request, obj, form, change):
obj.creator = request.user
obj.save()
# }}}
admin.site.register(GradeChange, GradeChangeAdmin)
# }}}
# {{{ instant message
class InstantMessageAdmin(admin.ModelAdmin):
def get_course(self, obj):
return obj.participation.course
get_course.short_description = _("Course") # type: ignore
get_course.admin_order_field = "participation__course" # type: ignore
def get_participant(self, obj):
return obj.participation.user
get_participant.short_description = _("Participant") # type: ignore
get_participant.admin_order_field = "participation__user" # type: ignore
list_filter = (_filter_related_only("participation__course"),)
list_display = (
"get_course",
"get_participant",
"time",
"text",
)
date_hierarchy = "time"
search_fields = (
"text",
"participation__user__username",
"participation__user__first_name",
"participation__user__last_name",
)
raw_id_fields = ("participation",)
# {{{ permissions
def has_add_permission(self, request):
# These are created only automatically.
return False
def get_queryset(self, request):
qs = super().get_queryset(request)
return _filter_participation_linked_obj_for_user(qs, request.user)
# }}}
admin.site.register(InstantMessage, InstantMessageAdmin)
# }}}
# {{{ exam tickets
class ExamAdmin(admin.ModelAdmin):
list_filter = (
_filter_related_only("course"),
"active",
"listed",
)
list_display = (
"course",
"flow_id",
"active",
"listed",
"no_exams_before",
)
search_fields = (
"flow_id",
)
date_hierarchy = "no_exams_before"
# {{{ permissions
def get_queryset(self, request):
qs = super().get_queryset(request)
return _filter_course_linked_obj_for_user(qs, request.user)
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "course":
kwargs["queryset"] = _filter_courses_for_user(
Course.objects, request.user)
return super().formfield_for_foreignkey(
db_field, request, **kwargs)
# }}}
admin.site.register(Exam, ExamAdmin)
class ExamTicketAdmin(admin.ModelAdmin):
def get_course(self, obj):
return obj.participation.course
get_course.short_description = _("Course") # type: ignore
get_course.admin_order_field = "participation__course" # type: ignore
list_filter = (
_filter_related_only("participation__course"),
"state",
)
raw_id_fields = ("participation",)
list_display = (
"get_course",
"exam",
"participation",
"state",
"creation_time",
"usage_time",
)
date_hierarchy = "usage_time"
search_fields = (
"exam__course__identifier",
"exam__flow_id",
"exam__description",
"participation__user__username",
"participation__user__first_name",
"participation__user__last_name",
)
# {{{ permissions
def get_queryset(self, request):
qs = super().get_queryset(request)
return _filter_participation_linked_obj_for_user(qs, request.user)
exclude = ("creator",)
def save_model(self, request, obj, form, change):
obj.creator = request.user
obj.save()
# }}}
def revoke_exam_tickets(self, request, queryset): # noqa
queryset \
.filter(state=exam_ticket_states.valid) \
.update(state=exam_ticket_states.revoked)
revoke_exam_tickets.short_description = _("Revoke Exam Tickets") # type: ignore
actions = [revoke_exam_tickets]
admin.site.register(ExamTicket, ExamTicketAdmin)
# }}}
# vim: foldmethod=marker
| 1.828125 | 2 |
sirepo/auth/__init__.py | mkeilman/sirepo | 49 | 12760025 | <gh_stars>10-100
# -*- coding: utf-8 -*-
u"""Authentication
:copyright: Copyright (c) 2018-2019 RadiaSoft LLC. All Rights Reserved.
:license: http://www.apache.org/licenses/LICENSE-2.0.html
"""
from __future__ import absolute_import, division, print_function
from pykern import pkcollections
from pykern import pkconfig
from pykern import pkinspect
from pykern.pkcollections import PKDict
from pykern.pkdebug import pkdc, pkdlog, pkdp
from sirepo import api_perm
from sirepo import auth_db
from sirepo import cookie
from sirepo import events
from sirepo import http_reply
from sirepo import http_request
from sirepo import job
from sirepo import util
import contextlib
import datetime
import importlib
import sirepo.auth_role
import sirepo.feature_config
import sirepo.template
import sirepo.uri
import werkzeug.exceptions
#: what routeName to return in the event user is logged out in require_user
LOGIN_ROUTE_NAME = 'login'
#: Guest is a special method
METHOD_GUEST = 'guest'
#: key for auth method for login state
_COOKIE_METHOD = 'sram'
#: There will always be this value in the cookie, if there is a cookie.
_COOKIE_STATE = 'sras'
#: Identifies the user in the cookie
_COOKIE_USER = 'srau'
_GUEST_USER_DISPLAY_NAME = 'Guest User'
_PAYMENT_PLAN_BASIC = 'basic'
_PAYMENT_PLAN_ENTERPRISE = sirepo.auth_role.ROLE_PAYMENT_PLAN_ENTERPRISE
_PAYMENT_PLAN_PREMIUM = sirepo.auth_role.ROLE_PAYMENT_PLAN_PREMIUM
_ALL_PAYMENT_PLANS = (_PAYMENT_PLAN_BASIC, _PAYMENT_PLAN_ENTERPRISE, _PAYMENT_PLAN_PREMIUM)
_STATE_LOGGED_IN = 'li'
_STATE_LOGGED_OUT = 'lo'
_STATE_COMPLETE_REGISTRATION = 'cr'
#: name to module object
_METHOD_MODULES = pkcollections.Dict()
#: Identifies the user in uWSGI logging (read by uwsgi.yml.jinja)
_UWSGI_LOG_KEY_USER = 'sirepo_user'
#TODO(robnagler) probably from the schema
#: For formatting the size parameter to an avatar_uri
_AVATAR_SIZE = 40
#: methods + deprecated_methods
valid_methods = None
#: Methods that the user is allowed to see
visible_methods = None
#: visible_methods excluding guest
non_guest_methods = None
#: avoid circular import issues by importing in init_apis
uri_router = None
cfg = None
@api_perm.require_cookie_sentinel
def api_authCompleteRegistration():
# Needs to be explicit, because we would need a special permission
# for just this API.
if not _is_logged_in():
raise util.SRException(LOGIN_ROUTE_NAME, None)
complete_registration(
_parse_display_name(http_request.parse_json().get('displayName')),
)
return http_reply.gen_json_ok()
@api_perm.allow_visitor
def api_authState():
return http_reply.render_static_jinja(
'auth-state',
'js',
PKDict(auth_state=_auth_state()),
)
@api_perm.allow_visitor
def api_authLogout(simulation_type=None):
"""Set the current user as logged out.
Redirects to root simulation page.
"""
req = None
if simulation_type:
try:
req = http_request.parse_params(type=simulation_type)
except AssertionError:
pass
if _is_logged_in():
events.emit('auth_logout', PKDict(uid=_get_user()))
cookie.set_value(_COOKIE_STATE, _STATE_LOGGED_OUT)
_set_log_user()
return http_reply.gen_redirect_for_app_root(req and req.type)
def check_user_has_role(uid, role, raise_forbidden=True):
if auth_db.UserRole.has_role(uid, role):
return True
if raise_forbidden:
sirepo.util.raise_forbidden('uid={} role={} not found'.format(uid, role))
return False
def complete_registration(name=None):
"""Update the database with the user's display_name and sets state to logged-in.
Guests will have no name.
"""
u = _get_user()
with util.THREAD_LOCK:
r = user_registration(u)
if cookie.unchecked_get_value(_COOKIE_METHOD) is METHOD_GUEST:
assert name is None, \
'Cookie method is {} and name is {}. Expected name to be None'.format(METHOD_GUEST, name)
r.display_name = name
r.save()
cookie.set_value(_COOKIE_STATE, _STATE_LOGGED_IN)
def create_new_user(uid_generated_callback, module):
import sirepo.simulation_db
u = sirepo.simulation_db.user_create()
uid_generated_callback(u)
_create_roles_for_new_user(u, module.AUTH_METHOD)
return u
def guest_uids():
"""All of the uids corresponding to guest users."""
return auth_db.UserRegistration.search_all_for_column('uid', display_name=None)
def get_module(name):
return _METHOD_MODULES[name]
def init_apis(*args, **kwargs):
global uri_router
assert not cfg.logged_in_user, \
'Do not set $SIREPO_AUTH_LOGGED_IN_USER in server'
uri_router = importlib.import_module('sirepo.uri_router')
for m in _METHOD_MODULES.values():
uri_router.register_api_module(m)
import sirepo.simulation_db
s = list(sirepo.simulation_db.SCHEMA_COMMON.common.constants.paymentPlans.keys())
assert sorted(s) == sorted(_ALL_PAYMENT_PLANS), \
f'payment plans from SCHEMA_COMMON={s} not equal to _ALL_PAYMENT_PLANS={_ALL_PAYMENT_PLANS}'
def is_premium_user():
return check_user_has_role(
logged_in_user(),
sirepo.auth_role.ROLE_PAYMENT_PLAN_PREMIUM,
raise_forbidden=False,
)
def logged_in_user(check_path=True):
"""Get the logged in user
Args:
check_path (bool): call `simulation_db.user_path` [True]
Returns:
str: uid of authenticated user
"""
u = _get_user()
if not _is_logged_in():
raise util.SRException(
'login',
None,
'user not logged in uid={}',
u,
)
assert u, \
'no user in cookie: state={} method={}'.format(
cookie.unchecked_get_value(_COOKIE_STATE),
cookie.unchecked_get_value(_COOKIE_METHOD),
)
if check_path:
import sirepo.simulation_db
sirepo.simulation_db.user_path(u, check=True)
return u
def login(module, uid=None, model=None, sim_type=None, display_name=None, is_mock=False, want_redirect=False):
"""Login the user
Raises an exception if successful, except in the case of methods
Args:
module (module): method module
uid (str): user to login
model (auth_db.UserDbBase): user to login (overrides uid)
sim_type (str): app to redirect to
"""
_validate_method(module, sim_type=sim_type)
guest_uid = None
if model:
uid = model.uid
# if previously cookied as a guest, move the non-example simulations into uid below
m = cookie.unchecked_get_value(_COOKIE_METHOD)
if m == METHOD_GUEST and module.AUTH_METHOD != METHOD_GUEST:
guest_uid = _get_user() if _is_logged_in() else None
if uid:
_login_user(module, uid)
if module.AUTH_METHOD in cfg.deprecated_methods:
pkdlog('deprecated auth method={} uid={}'.format(module.AUTH_METHOD, uid))
if not uid:
# No user so clear cookie so this method is removed
reset_state()
# We are logged in with a deprecated method, and now the user
# needs to login with an allowed method.
login_fail_redirect(sim_type, module, 'deprecated', reload_js=not uid)
if not uid:
# No user in the cookie and method didn't provide one so
# the user might be switching methods (e.g. github to email or guest to email).
# Not allowed to go to guest from other methods, because there's
# no authentication for guest.
# Or, this is just a new user, and we'll create one.
uid = _get_user() if _is_logged_in() else None
m = cookie.unchecked_get_value(_COOKIE_METHOD)
if uid and module.AUTH_METHOD not in (m, METHOD_GUEST):
# switch this method to this uid (even for methods)
# except if the same method, then assuming logging in as different user.
# This handles the case where logging in as guest, creates a user every time
_login_user(module, uid)
else:
uid = create_new_user(lambda u: _login_user(module, u), module)
if model:
model.uid = uid
model.save()
if display_name:
complete_registration(_parse_display_name(display_name))
if is_mock:
return
if sim_type:
if guest_uid and guest_uid != uid:
import sirepo.simulation_db
sirepo.simulation_db.move_user_simulations(guest_uid, uid)
login_success_response(sim_type, want_redirect)
assert not module.AUTH_METHOD_VISIBLE
def login_fail_redirect(sim_type=None, module=None, reason=None, reload_js=False):
raise util.SRException(
'loginFail',
PKDict(
method=module.AUTH_METHOD,
reason=reason,
reload_js=reload_js,
sim_type=sim_type,
),
'login failed: reason={} method={}',
reason,
module.AUTH_METHOD,
)
def login_success_response(sim_type, want_redirect=False):
r = None
if (
cookie.get_value(_COOKIE_STATE) == _STATE_COMPLETE_REGISTRATION
and cookie.get_value(_COOKIE_METHOD) == METHOD_GUEST
):
complete_registration()
if want_redirect:
r = 'completeRegistration' if (
cookie.get_value(_COOKIE_STATE) == _STATE_COMPLETE_REGISTRATION
) else None
raise sirepo.util.Redirect(sirepo.uri.local_route(sim_type, route_name=r))
raise sirepo.util.Response(
response=http_reply.gen_json_ok(PKDict(authState=_auth_state())),
)
def need_complete_registration(model):
"""Does unauthenticated user need to complete registration?
If the current method is deprecated, then we will end up asking
the user for a name again, but that's ok.
Does not work for guest (which don't have their own models anyway).
Args:
model (auth_db.UserDbBase): unauthenticated user record
Returns:
bool: True if user will be redirected to needCompleteRegistration
"""
if not model.uid:
return True
return not auth_db.UserRegistration.search_by(uid=model.uid).display_name
@contextlib.contextmanager
def process_request(unit_test=None):
with auth_db.session(), cookie.process_header(unit_test):
# Logging happens after the return to Flask so the log user must persist
# beyond the life of process_request
_set_log_user()
yield
def require_auth_basic():
m = _METHOD_MODULES['basic']
_validate_method(m)
uid = m.require_user()
if not uid:
raise sirepo.util.Response(
http_reply.gen_response(
status=401,
headers={'WWW-Authenticate': 'Basic realm="*"'},
),
)
cookie.set_sentinel()
login(m, uid=uid)
def require_sim_type(sim_type):
if sim_type not in sirepo.feature_config.auth_controlled_sim_types():
return
if not _is_logged_in():
# If a user is not logged in, we allow any sim_type, because
# the GUI has to be able to get access to certain APIs before
# logging in.
return
check_user_has_role(
logged_in_user(),
sirepo.auth_role.for_sim_type(sim_type),
)
def require_user():
e = None
m = cookie.unchecked_get_value(_COOKIE_METHOD)
p = None
r = 'login'
s = cookie.unchecked_get_value(_COOKIE_STATE)
u = _get_user()
if s is None:
pass
elif s == _STATE_LOGGED_IN:
if m in cfg.methods:
f = getattr(_METHOD_MODULES[m], 'validate_login', None)
if f:
pkdc('validate_login method={}', m)
f()
return
if m in cfg.deprecated_methods:
e = 'deprecated'
else:
e = 'invalid'
reset_state()
p = PKDict(reload_js=True)
e = 'auth_method={} is {}, forcing login: uid='.format(m, e, u)
elif s == _STATE_LOGGED_OUT:
e = 'logged out uid={}'.format(u)
if m in cfg.deprecated_methods:
# Force login to this specific method so we can migrate to valid method
r = 'loginWith'
p = PKDict({':method': m})
e = 'forced {}={} uid={}'.format(m, r, p)
elif s == _STATE_COMPLETE_REGISTRATION:
if m == METHOD_GUEST:
pkdc('guest completeRegistration={}', u)
complete_registration()
return
r = 'completeRegistration'
e = 'uid={} needs to complete registration'.format(u)
else:
cookie.reset_state('uid={} state={} invalid, cannot continue'.format(s, u))
p = PKDict(reload_js=True)
e = 'invalid cookie state={} uid={}'.format(s, u)
pkdc('SRException uid={} route={} params={} method={} error={}', u, r, p, m, e)
raise util.SRException(r, p, *(('user not logged in: {}', e) if e else ()))
def reset_state():
cookie.unchecked_remove(_COOKIE_USER)
cookie.unchecked_remove(_COOKIE_METHOD)
cookie.set_value(_COOKIE_STATE, _STATE_LOGGED_OUT)
_set_log_user()
@contextlib.contextmanager
def set_user_outside_of_http_request(uid):
"""A user set explicitly outside of flask request cycle
This will try to guess the auth method the user used to authenticate.
"""
def _auth_module():
for m in cfg.methods:
a = _METHOD_MODULES[m]
if _method_user_model(a, uid):
return a
# Only try methods without UserModel after methods with have been
# exhausted. This ensures that if there is a method with a UserModel
# we use it so calls like `user_name` work.
for m in cfg.methods:
a = _METHOD_MODULES[m]
if not hasattr(a, 'UserModel'):
return a
raise AssertionError(
f'no module found for uid={uid} in cfg.methods={cfg.methods}',
)
assert not util.in_flask_request(), \
'Only call from outside a flask request context'
assert auth_db.UserRegistration.search_by(uid=uid), \
f'no registered user with uid={uid}'
with cookie.set_cookie_outside_of_flask_request():
_login_user(
_auth_module(),
uid,
)
yield
def unchecked_get_user(uid):
with util.THREAD_LOCK:
u = auth_db.UserRegistration.search_by(uid=uid)
if u:
return u.uid
return None
def user_dir_not_found(user_dir, uid):
"""Called by simulation_db when user_dir is not found
Deletes any user records
Args:
uid (str): user that does not exist
"""
with util.THREAD_LOCK:
for m in _METHOD_MODULES.values():
u = _method_user_model(m, uid)
if u:
u.delete()
u = auth_db.UserRegistration.search_by(uid=uid)
if u:
u.delete()
reset_state()
raise util.Redirect(
sirepo.uri.ROOT,
'simulation_db dir={} not found, deleted uid={}',
user_dir,
uid,
)
def user_if_logged_in(method):
"""Verify user is logged in and method matches
Args:
method (str): method must be logged in as
"""
if not _is_logged_in():
return None
m = cookie.unchecked_get_value(_COOKIE_METHOD)
if m != method:
return None
return _get_user()
def user_name():
m = cookie.unchecked_get_value(_COOKIE_METHOD)
u = getattr(
_METHOD_MODULES[m],
'UserModel',
)
if u:
with util.THREAD_LOCK:
return u.search_by(uid=logged_in_user()).user_name
raise AssertionError(
f'user_name not found for uid={logged_in_user()} with method={m}',
)
def user_registration(uid, display_name=None):
"""Get UserRegistration record or create one
Args:
uid (str): registrant
display_name (str): display_name of user
Returns:
auth.UserRegistration: record (potentially blank)
"""
res = auth_db.UserRegistration.search_by(uid=uid)
if not res:
res = auth_db.UserRegistration(
created=datetime.datetime.utcnow(),
display_name=display_name,
uid=uid,
)
res.save()
return res
def _auth_hook_from_header(values):
"""Migrate from old cookie values
Always sets _COOKIE_STATE, which is our sentinel.
Args:
values (dict): just parsed values
Returns:
dict: unmodified or migrated values
"""
if values.get(_COOKIE_STATE):
# normal case: we've seen a cookie at least once
# check for cfg.methods changes
m = values.get(_COOKIE_METHOD)
if m and m not in valid_methods:
# invalid method (changed config), reset state
pkdlog(
'possibly misconfigured server: invalid cookie_method={}, clearing values={}',
m,
values,
)
pkcollections.unchecked_del(
values,
_COOKIE_METHOD,
_COOKIE_USER,
_COOKIE_STATE,
)
return values
u = values.get('sru') or values.get('uid')
if not u:
# normal case: new visitor, and no user/state; set logged out
# and return all values
values[_COOKIE_STATE] = _STATE_LOGGED_OUT
return values
# Migrate
o = values.get('sros') or values.get('oauth_login_state')
s = _STATE_COMPLETE_REGISTRATION
if o is None or o in ('anonymous', 'a'):
m = METHOD_GUEST
elif o in ('logged_in', 'li', 'logged_out', 'lo'):
m = 'github'
if 'i' not in o:
s = _STATE_LOGGED_OUT
else:
pkdlog('unknown cookie values, clearing, not migrating: {}', values)
return {}
# Upgrade cookie to current structure. Set the sentinel, too.
values = {
_COOKIE_USER: u,
_COOKIE_METHOD: m,
_COOKIE_STATE: s,
}
cookie.set_sentinel(values)
pkdlog('migrated cookie={}', values)
return values
def _auth_state():
import sirepo.simulation_db
s = cookie.unchecked_get_value(_COOKIE_STATE)
v = pkcollections.Dict(
avatarUrl=None,
displayName=None,
guestIsOnlyMethod=not non_guest_methods,
isGuestUser=False,
isLoggedIn=_is_logged_in(s),
isLoginExpired=False,
jobRunModeMap=sirepo.simulation_db.JOB_RUN_MODE_MAP,
method=cookie.unchecked_get_value(_COOKIE_METHOD),
needCompleteRegistration=s == _STATE_COMPLETE_REGISTRATION,
roles=[],
userName=None,
visibleMethods=visible_methods,
)
if 'sbatch' in v.jobRunModeMap:
v.sbatchQueueMaxes=job.NERSC_QUEUE_MAX
u = cookie.unchecked_get_value(_COOKIE_USER)
if v.isLoggedIn:
if v.method == METHOD_GUEST:
# currently only method to expire login
v.displayName = _GUEST_USER_DISPLAY_NAME
v.isGuestUser = True
v.isLoginExpired = _METHOD_MODULES[METHOD_GUEST].is_login_expired()
v.needCompleteRegistration = False
v.visibleMethods = non_guest_methods
else:
r = auth_db.UserRegistration.search_by(uid=u)
if r:
v.displayName = r.display_name
v.roles = auth_db.UserRole.get_roles(u)
_plan(v)
_method_auth_state(v, u)
if pkconfig.channel_in_internal_test():
# useful for testing/debugging
v.uid = u
pkdc('state={}', v)
return v
def _create_roles_for_new_user(uid, method):
r = sirepo.auth_role.for_new_user(method == METHOD_GUEST)
if r:
auth_db.UserRole.add_roles(uid, r)
def _get_user():
return cookie.unchecked_get_value(_COOKIE_USER)
def _init():
global cfg
if cfg:
return
cfg = pkconfig.init(
methods=((METHOD_GUEST,), set, 'for logging in'),
deprecated_methods=(set(), set, 'for migrating to methods'),
logged_in_user=(None, str, 'Only for sirepo.job_supervisor'),
)
if cfg.logged_in_user:
_init_logged_in_user()
else:
_init_full()
def _init_full():
global visible_methods, valid_methods, non_guest_methods
auth_db.init()
p = pkinspect.this_module().__name__
visible_methods = []
valid_methods = cfg.methods.union(cfg.deprecated_methods)
for n in valid_methods:
m = importlib.import_module(pkinspect.module_name_join((p, n)))
_METHOD_MODULES[n] = m
if m.AUTH_METHOD_VISIBLE and n in cfg.methods:
visible_methods.append(n)
visible_methods = tuple(sorted(visible_methods))
non_guest_methods = tuple(m for m in visible_methods if m != METHOD_GUEST)
cookie.auth_hook_from_header = _auth_hook_from_header
def _init_logged_in_user():
global logged_in_user, user_dir_not_found
def logged_in_user(*args, **kwargs):
return cfg.logged_in_user
def user_dir_not_found(user_dir, *args, **kwargs):
# can't raise in a lambda so do something like this
raise AssertionError('user_dir={} not found'.format(user_dir))
cfg.deprecated_methods = set()
cfg.methods = set((METHOD_GUEST,))
def _is_logged_in(state=None):
"""Logged in is either needing to complete registration or done
Args:
state (str): logged in state [None: from cookie]
Returns:
bool: is in one of the logged in states
"""
s = state or cookie.unchecked_get_value(_COOKIE_STATE)
return s in (_STATE_COMPLETE_REGISTRATION, _STATE_LOGGED_IN)
def _login_user(module, uid):
"""Set up the cookie for logged in state
If a deprecated or non-visible method, just login. Otherwise, check the db
for registration.
Args:
module (module): what auth method
uid (str): which uid
"""
cookie.set_value(_COOKIE_USER, uid)
cookie.set_value(_COOKIE_METHOD, module.AUTH_METHOD)
s = _STATE_LOGGED_IN
if module.AUTH_METHOD_VISIBLE and module.AUTH_METHOD in cfg.methods:
u = user_registration(uid)
if not u.display_name:
s = _STATE_COMPLETE_REGISTRATION
cookie.set_value(_COOKIE_STATE, s)
_set_log_user()
def _method_auth_state(values, uid):
if values.method not in _METHOD_MODULES:
pkdlog('auth state method: "{}" not present in supported methods: {}', values.method, _METHOD_MODULES.keys())
return
m = _METHOD_MODULES[values.method]
u = _method_user_model(m, uid)
if not u:
return
values.userName = u.user_name
if hasattr(m, 'avatar_uri'):
values.avatarUrl = m.avatar_uri(u, _AVATAR_SIZE)
def _method_user_model(module, uid):
if not hasattr(module, 'UserModel'):
return None
return module.UserModel.search_by(uid=uid)
def _parse_display_name(value):
res = value.strip()
assert res, \
'invalid post data: displayName={}'.format(value)
return res
def _plan(data):
r = data.roles
if sirepo.auth_role.ROLE_PAYMENT_PLAN_ENTERPRISE in r:
data.paymentPlan = _PAYMENT_PLAN_ENTERPRISE
data.upgradeToPlan = None
elif sirepo.auth_role.ROLE_PAYMENT_PLAN_PREMIUM in r:
data.paymentPlan = _PAYMENT_PLAN_PREMIUM
data.upgradeToPlan = _PAYMENT_PLAN_ENTERPRISE
else:
data.paymentPlan = _PAYMENT_PLAN_BASIC
data.upgradeToPlan = _PAYMENT_PLAN_PREMIUM
def _set_log_user():
a = sirepo.util.flask_app()
if not a or not a.sirepo_uwsgi:
# Only works for uWSGI (service.uwsgi). sirepo.service.http uses
# the limited http server for development only. This uses
# werkzeug.serving.WSGIRequestHandler.log which hardwires the
# common log format to: '%s - - [%s] %s\n'. Could monkeypatch
# but we only use the limited http server for development.
return
u = _get_user()
if u:
u = cookie.unchecked_get_value(_COOKIE_STATE) + '-' + u
else:
u = '-'
a.sirepo_uwsgi.set_logvar(_UWSGI_LOG_KEY_USER, u)
def _validate_method(module, sim_type=None):
if module.AUTH_METHOD in valid_methods:
return None
pkdlog('invalid auth method={}'.format(module.AUTH_METHOD))
login_fail_redirect(sim_type, module, 'invalid-method', reload_js=True)
_init()
| 1.820313 | 2 |
basic.py | Cheaterman/basic | 4 | 12760026 | <filename>basic.py<gh_stars>1-10
import collections
import math
from sly.lex import Lexer, Token
from sly.yacc import Parser
Variable = collections.namedtuple('Variable', ['name'])
Expression = collections.namedtuple('Expression', ['operation', 'arguments'])
Statement = collections.namedtuple('Statement', ['operation', 'arguments'])
class BasicLexer(Lexer):
tokens = {
ID,
REM,
PRINT,
IF,
THEN,
ELSE,
LIST,
RUN,
GOTO,
STRING,
LINENO,
NUMBER,
PLUS,
MINUS,
MULTIPLY,
DIVIDE,
EQUALS,
COLON,
}
ignore = ' '
PLUS = r'\+'
MINUS = r'-'
MULTIPLY = r'\*'
DIVIDE = r'/'
EQUALS = r'='
COLON = r':'
REM = r"(?:REM|').*"
PRINT = r'PRINT'
IF = r'IF'
THEN = r'THEN'
ELSE = r'ELSE'
LIST = r'LIST'
RUN = r'RUN'
GOTO = r'GOTO'
ID = r'[A-Za-z_][A-Za-z0-9_]*'
@_(r'(?:[0-9]+(?:\.[0-9]*)?|\.[0-9]+)')
def NUMBER(self, token):
if(
self.index
and self.text[:token.index] != token.index * ' '
):
float_value = float(token.value)
int_value = int(float_value)
token.value = (
int_value
if math.isclose(int_value, float_value)
else float_value
)
else:
if '.' not in token.value:
token.value = int(token.value)
else:
dot_index = token.value.index('.')
self.index -= len(token.value) - dot_index
token.value = int(token.value[:dot_index])
token.type = 'LINENO'
if self.text[self.index:].strip(' '):
self.begin(LineLexer)
return token
@_(r'"[^"]*"?')
def STRING(self, token):
token.value = token.value[1:]
if token.value.endswith('"'):
token.value = token.value[:-1]
return token
class LineLexer(Lexer):
tokens = {LINE}
ignore = ' '
@_(r'.+')
def LINE(self, token):
self.begin(BasicLexer)
return token
class BasicParser(Parser):
tokens = BasicLexer.tokens.union(LineLexer.tokens)
precedence = (
('nonassoc', IF, THEN),
('left', COLON),
('nonassoc', ELSE),
('left', EQUALS),
('left', CREATE_EXPRS, APPEND_EXPRS),
('left', PLUS, MINUS),
('left', MULTIPLY, DIVIDE),
('nonassoc', UNARY_MINUS),
)
def __init__(self, interpreter):
self.interpreter = interpreter
@_('statement')
def statements(self, parsed):
if parsed.statement:
return [parsed.statement]
@_('statements COLON statement')
def statements(self, parsed):
parsed.statements.append(parsed.statement)
return parsed.statements
@_(
'statements COLON empty',
'empty COLON statements',
)
def statements(self, parsed):
return parsed.statements
@_('')
def empty(self, parsed):
pass
@_('LINENO LINE')
def statement(self, parsed):
return Statement('add_program_line', (parsed.LINENO, parsed.LINE))
@_('LINENO')
def statement(self, parsed):
return Statement('remove_program_line', [parsed.LINENO])
@_('IF expr THEN statements')
def statement(self, parsed):
return Statement('conditional', (parsed.expr, parsed.statements))
@_('IF expr THEN statements ELSE statement')
def statement(self, parsed):
return Statement(
'conditional',
(parsed.expr, parsed.statements, parsed.statement),
)
@_('variable EQUALS expr')
def statement(self, parsed):
return Statement('set_variable', (parsed.variable.name, parsed.expr))
@_('REM')
def statement(self, parsed):
return Statement('noop', [])
@_('PRINT exprs')
def statement(self, parsed):
return Statement('print', parsed.exprs)
@_('LIST')
def statement(self, parsed):
return Statement('list', [])
@_('RUN')
def statement(self, parsed):
return Statement('run_program', [])
@_('GOTO expr')
def statement(self, parsed):
return Statement('goto', [parsed.expr])
@_('expr %prec CREATE_EXPRS')
def exprs(self, parsed):
return [parsed.expr]
@_('exprs expr %prec APPEND_EXPRS')
def exprs(self, parsed):
parsed.exprs.append(parsed.expr)
return parsed.exprs
@_('variable EQUALS expr')
def expr(self, parsed):
return Expression(
'compare_variable',
[parsed.variable.name, parsed.expr],
)
@_('MINUS expr %prec UNARY_MINUS')
def expr(self, parsed):
return Expression('negative', [parsed.expr])
@_('expr PLUS expr')
def expr(self, parsed):
return Expression('add', [parsed.expr0, parsed.expr1])
@_('expr MINUS expr')
def expr(self, parsed):
return Expression('subtract', [parsed.expr0, parsed.expr1])
@_('expr MULTIPLY expr')
def expr(self, parsed):
return Expression('multiply', [parsed.expr0, parsed.expr1])
@_('expr DIVIDE expr')
def expr(self, parsed):
return Expression('divide', [parsed.expr0, parsed.expr1])
@_(
'NUMBER',
'STRING',
)
def expr(self, parsed):
return parsed[0]
@_('variable')
def expr(self, parsed):
return Expression('get_variable', [parsed.variable.name])
@_('ID')
def variable(self, parsed):
return Variable(parsed.ID)
def error(self, token):
if not token:
raise EOFError('Parse error in input, unexpected EOF')
raise SyntaxError(
f'Syntax error at line {token.lineno}, token={token.type}'
)
class BasicInterpreter:
def __init__(self):
self.lexer = BasicLexer()
self.parser = BasicParser(self)
self.variables = collections.defaultdict(int)
self.program = {}
self.running_program = False
def interpret(self, line):
try:
statements = self.parser.parse(self.lexer.tokenize(line))
except EOFError:
raise SyntaxError('Unexpected EOF')
for statement in statements:
self.execute(*statement)
if statement.operation in ('list', 'run_program', 'goto'):
break
def execute(self, instruction, arguments):
return getattr(self, instruction)(*arguments)
def evaluate(self, expression):
evaluation_stack = collections.deque()
argument_index_stack = collections.deque()
node = expression
last_visited_node = None
while evaluation_stack or node is not None:
if node is not None:
evaluation_stack.append(node)
if isinstance(node, Expression):
argument_index_stack.append(0)
node = node.arguments[0]
else:
node = None
else:
next_node = evaluation_stack[-1]
if(
isinstance(next_node, Expression)
and len(next_node.arguments) > 1
and last_visited_node != next_node.arguments[1]
):
argument_index_stack.append(1)
node = next_node.arguments[1]
elif argument_index_stack:
evaluation_stack[-1].arguments[
argument_index_stack.pop()
] = last_visited_node = self.visit(evaluation_stack.pop())
else:
return self.visit(next_node)
def visit(self, node):
return_value = node
if isinstance(node, Expression):
return_value = self.execute(*node)
if isinstance(return_value, float):
int_return_value = int(return_value)
return_value = (
int_return_value
if math.isclose(int_return_value, return_value)
else return_value
)
return return_value
def negative(self, a):
return -a
def add(self, a, b):
return a + b
def subtract(self, a, b):
return a - b
def multiply(self, a, b):
return a * b
def divide(self, a, b):
return a / b
def get_variable(self, name):
return self.variables.get(name, 0)
def set_variable(self, name, value):
self.variables[name] = self.evaluate(value)
def compare_variable(self, name, value):
return -1 if self.variables[name] == value else 0
def add_program_line(self, lineno, line):
self.program[lineno] = line
def remove_program_line(self, lineno):
self.program.pop(lineno, None)
def run_program(self, lineno=None):
if not self.program:
return
self.running_program = True
linenos = sorted(self.program)
current_line_index = 0
self.current_program_lineno = linenos[0]
if lineno is not None:
current_line_index = linenos.index(lineno)
self.current_program_lineno = lineno
while True:
if self.current_program_lineno is not None:
current_line_index = linenos.index(self.current_program_lineno)
else:
try:
current_line_index += 1
self.current_program_lineno = linenos[current_line_index]
except IndexError:
break
current_program_line = self.program[self.current_program_lineno]
self.last_program_lineno = self.current_program_lineno
self.current_program_lineno = None
self.interpret(current_program_line)
self.running_program = False
def goto(self, expr):
try:
int(expr)
except ValueError:
raise SyntaxError('Type mismatch error')
if not self.running_program:
self.run_program(lineno=int(expr))
else:
self.current_program_lineno = int(expr)
def conditional(self, expr, then_statements, else_statement=None):
if self.evaluate(expr):
for statement in then_statements:
self.execute(*statement)
elif else_statement:
self.execute(*else_statement)
def noop(self):
pass
def print(self, *args):
print(*(self.evaluate(arg) for arg in args))
def list(self):
for lineno, line in sorted(self.program.items()):
print(f'{lineno} {line}')
| 2.890625 | 3 |
parse_papers.py | WideOpen/datawatch | 9 | 12760027 | <reponame>WideOpen/datawatch<gh_stars>1-10
# coding: utf-8
import tarfile
import xml.etree.ElementTree as ET
import datetime
import tqdm
import re
import sqlite3
import unicodedata
from contextlib import closing
import os
import logging
logging.basicConfig(filename='parse_papers.log', level=logging.DEBUG)
def innertext(el):
return "".join(el.itertext())
def get_meta_tag(root, tagtype):
els = root.findall(
".//article-meta//article-id[@pub-id-type='%s']" % tagtype)
if len(els) == 0:
els = root.findall(".//article-id[@pub-id-type='%s']" % tagtype)
if len(els) > 1:
logging.warning("Warning, more than one %s detected" %
tagtype, extra={"n": len(els)})
return None
elif len(els) == 1:
return els[0].text
else:
return None
def get_pmid(root):
return get_meta_tag(root, "pmid")
def get_pmc(root):
return get_meta_tag(root, "pmc")
def get_doi(root):
return get_meta_tag(root, "doi")
def get_title(root):
els = root.findall(".//article-meta//title-group/article-title")
if len(els) == 0:
els = root.findall(".//title-group/article-title")
if len(els) > 1:
logging.warning("Warning, more than one title detected",
extra={"n": len(els)})
return None
elif len(els) == 1:
return innertext(els[0])
else:
return None
def get_journal_nlm(root):
els = root.findall(".//journal-id[@journal-id-type='nlm-ta']")
if len(els) > 1:
logging.warning("Warning, more than one NLM detected",
extra={"n": len(els)})
return None
elif len(els) == 1:
return innertext(els[0])
else:
return None
def nametotext(el):
return ", ".join(map(innertext, el))
def get_authors(root):
authors = root.findall(
".//contrib-group/contrib[@contrib-type='author']/name")
return map(nametotext, authors)
def parse_date(el, allow_month_only=False):
if el is None:
return None
year = el.find("year")
month = el.find("month")
day = el.find("day")
if allow_month_only and day is None:
day = 1
if year is None or month is None or day is None:
return None
try:
year = int(innertext(year))
month = int(innertext(month))
if day != 1:
day = int(innertext(day))
return datetime.date(year, month, day)
except Exception as e:
logging.warning("parse_date exception", exc_info=e,
extra={"inner": innertext(el)})
return None
return ""
def get_date(root):
result = parse_date(root.find(".//pub-date[@pub-type='epub']"))
if result is not None:
return result
result = parse_date(root.find(".//pub-date[@pub-type='pmc-release']"))
if result is not None:
return result
result = parse_date(root.find(".//pub-date[@date-type='pub']"))
if result is not None:
return result
result = parse_date(
root.find(".//pub-date[@pub-type='ppub']"), allow_month_only=True)
if result is not None:
return result
tmp = root.findall(".//pub-date")
logging.warning("Failed to parse date", extra={
"data": map(lambda x: x.attrib, tmp)})
return None
def parse_file(data):
root = ET.fromstring(data)
result = {"date": get_date(root),
"title": get_title(root),
"authors": get_authors(root),
"pmc": get_pmc(root),
"pmid": get_pmid(root),
"doi": get_doi(root),
"journal": get_journal_nlm(root)}
return result
gse_expr = re.compile("GSE[0-9]+")
srx_expr = re.compile("SRX[0-9]+")
all_accessions_regexps = [gse_expr, srx_expr]
def find_accs(text):
result = []
for r in all_accessions_regexps:
result.extend(r.findall(text))
return result
def process_tar_file(fn):
tf = tarfile.open(fn, "r")
all_results = []
for m in tqdm.tqdm(tf):
if m.isfile():
data = tf.extractfile(m).read()
all_accs = find_accs(data)
if len(all_accs) == 0:
continue
result = parse_file(data)
result["gses"] = list(set(all_accs))
all_results.append(result)
tf.close()
return all_results
def normalize(s):
if isinstance(s, unicode):
s = unicodedata.normalize('NFKD', s).encode('ASCII', 'ignore')
s = s.replace("\n", " ").replace(" ", " ")
return s
def setup_db(dbcon):
dbcon.executescript("""
-- core tables
create table authors(authorid integer primary key, name text);
create table papers(paperid integer primary key, title text, doi text UNIQUE, pmid integer UNIQUE, pmc integer UNIQUE, published_on date, journal_nlm text);
create table datasets(acc text primary key, title text, first_public_on date, first_submitted_on date, pmid_ref integer);
create table authorof(authorid int not null, paperid int not null);
create table mentions(paperid int not null, acc text not null);
create index authors_name_idx on authors(name);
create index mentions_paperid_idx on mentions(paperid);
create index mentions_acc_idx on mentions(acc);
create index authorof_authorid_idx on authorof(authorid);
create index authorof_paperid_idx on authorof(paperid);
PRAGMA synchronous=OFF;
""")
def get_author_id(dbcon, author):
author = normalize(author)
with closing(dbcon.cursor()) as cur:
cur.execute("SELECT authorid from authors where name=?", (author,))
data = cur.fetchall()
if len(data) == 0:
cur.execute("INSERT INTO authors(name) VALUES (?)", (author,))
data = cur.lastrowid
# dbcon.commit()
return data
else:
return data[0][0]
def try_find_paper_by_column(dbcon, paper, column):
with closing(dbcon.cursor()) as cur:
if paper[column] is not None:
cur.execute("select paperid from papers where " +
column + "=?", (paper[column],))
data = cur.fetchall()
if len(data) > 0:
return data[0][0]
else:
return None
def try_find_paper(dbcon, paper):
i = try_find_paper_by_column(dbcon, paper, "pmc")
if i is not None:
return i
i = try_find_paper_by_column(dbcon, paper, "pmid")
if i is not None:
return i
return try_find_paper_by_column(dbcon, paper, "doi")
def try_insert_paper(dbcon, paper):
paperid = try_find_paper(dbcon, paper)
if paperid is not None:
return paperid
# need to insert
authorids = map(lambda x: get_author_id(dbcon, x), paper["authors"])
with closing(dbcon.cursor()) as cur:
cur.execute("insert into papers(title, doi, pmid, pmc, published_on, journal_nlm) values (?, ?, ?, ?, ?, ?)",
(paper["title"], paper["doi"], paper["pmid"], paper["pmc"], paper["date"], paper["journal"]))
paperid = cur.lastrowid
for aid in authorids:
cur.execute(
"insert into authorof(authorid, paperid) values (?, ?)", (aid, paperid))
for gse in paper["gses"]:
cur.execute(
"insert into mentions(paperid, acc) values (?, ?)", (paperid, gse))
# dbcon.commit()
return paperid
def process_tar_to_db(dbcon, fn):
print "Processing", fn
all_results = process_tar_file(fn)
print "Trying to add", len(all_results), "papers to the database"
for res in tqdm.tqdm(all_results):
try_insert_paper(dbcon, res)
dbcon.commit()
def main():
dbcon = sqlite3.connect("data/odw.sqlite")
setup_db(dbcon)
basedir = "rawdata/"
files = os.listdir(basedir)
for fn in files:
if fn.endswith("tar.gz"):
process_tar_to_db(dbcon, basedir + fn)
dbcon.close()
print "Done"
if __name__ == "__main__":
main()
| 2.046875 | 2 |
tests/vault/testDetokenize.py | skyflowapi/skyflow-python | 2 | 12760028 | <filename>tests/vault/testDetokenize.py<gh_stars>1-10
import unittest
import os
from skyflow.vault._detokenize import getDetokenizeRequestBody, createDetokenizeResponseBody
from skyflow.errors._skyflowerrors import SkyflowError, SkyflowErrorCodes, SkyflowErrorMessages
from skyflow.vault._client import Client, Configuration
from skyflow.service_account import generate_bearer_token
from dotenv import dotenv_values
import warnings
import json
import asyncio
class TestDetokenize(unittest.TestCase):
def setUp(self) -> None:
self.envValues = dotenv_values(".env")
self.dataPath = os.path.join(os.getcwd(), 'tests/vault/data/')
self.testToken = self.envValues["DETOKENIZE_TEST_TOKEN"]
self.tokenField = {
"token": self.envValues["DETOKENIZE_TEST_TOKEN"]
}
self.data = {"records": [self.tokenField]}
self.mocked_futures = []
self.event_loop = asyncio.new_event_loop()
def tokenProvider():
token, _ = generate_bearer_token(
self.envValues["CREDENTIALS_FILE_PATH"])
return token
config = Configuration(
self.envValues["VAULT_ID"], self.envValues["VAULT_URL"], tokenProvider)
self.client = Client(config)
warnings.filterwarnings(
action="ignore", message="unclosed", category=ResourceWarning)
return super().setUp()
def add_mock_response(self, response, statusCode, encode=True):
future = asyncio.Future(loop=self.event_loop)
if encode:
future.set_result((json.dumps(response).encode(), statusCode))
else:
future.set_result((response, statusCode))
future.done()
self.mocked_futures.append(future)
def getDataPath(self, file):
return self.dataPath + file + '.json'
def testGetDetokenizeRequestBodyWithValidBody(self):
body = getDetokenizeRequestBody(self.tokenField)
expectedOutput = {
"detokenizationParameters": [{
"token": self.testToken
}]
}
self.assertEqual(body, expectedOutput)
def testDetokenizeNoRecords(self):
invalidData = {"invalidKey": self.tokenField}
try:
self.client.detokenize(invalidData)
self.fail('Should have thrown an error')
except SkyflowError as e:
self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value)
self.assertEqual(
e.message, SkyflowErrorMessages.RECORDS_KEY_ERROR.value)
def testDetokenizeRecordsInvalidType(self):
invalidData = {"records": "invalid"}
try:
self.client.detokenize(invalidData)
self.fail('Should have thrown an error')
except SkyflowError as e:
self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value)
self.assertEqual(
e.message, SkyflowErrorMessages.INVALID_RECORDS_TYPE.value % (str))
def testDetokenizeNoToken(self):
invalidData = {"records": [{"invalid": "invalid"}]}
try:
self.client.detokenize(invalidData)
self.fail('Should have thrown an error')
except SkyflowError as e:
self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value)
self.assertEqual(
e.message, SkyflowErrorMessages.TOKEN_KEY_ERROR.value)
def testDetokenizeTokenInvalidType(self):
invalidData = {"records": [{"token": ["invalid"]}]}
try:
self.client.detokenize(invalidData)
self.fail('Should have thrown an error')
except SkyflowError as e:
self.assertEqual(e.code, SkyflowErrorCodes.INVALID_INPUT.value)
self.assertEqual(
e.message, SkyflowErrorMessages.INVALID_TOKEN_TYPE.value % (list))
def testResponseBodySuccess(self):
response = {"records": [{"token": "abc", "value": "secret"}]}
self.add_mock_response(response, 200)
res, partial = createDetokenizeResponseBody(self.mocked_futures)
self.assertEqual(partial, False)
self.assertEqual(res, {"records": response["records"], "errors": []})
def testResponseBodyPartialSuccess(self):
success_response = {"records": [{"token": "abc", "value": "secret"}]}
error_response = {"error": {"http_code": 404, "message": "not found"}}
self.add_mock_response(success_response, 200)
self.add_mock_response(error_response, 404)
res, partial = createDetokenizeResponseBody(self.mocked_futures)
self.assertTrue(partial)
self.assertEqual(res["records"], success_response["records"])
errors = res["errors"]
self.assertIsNotNone(errors)
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0]["error"]["code"],
error_response["error"]["http_code"])
self.assertEqual(
errors[0]["error"]["description"], error_response["error"]["message"])
def testResponseNotJson(self):
response = "not a valid json".encode()
self.add_mock_response(response, 200, encode=False)
try:
createDetokenizeResponseBody(self.mocked_futures)
except SkyflowError as error:
expectedError = SkyflowErrorMessages.RESPONSE_NOT_JSON
self.assertEqual(error.code, 200)
self.assertEqual(error.message, expectedError.value %
response.decode('utf-8'))
| 2.140625 | 2 |
fragscapy/modgenerator.py | Fratso/Fragscapy | 56 | 12760029 | """Generator for modifications and modifications lists.
The objects in this module are generator used to generate the `Mod`
and the `ModList` based on precise parametrization.
The `ModOption`-derived classes are used to generates 1 option (e.g. an
integer, a string, ...) based on a type of option (e.g. a sequence, a range,
...).
The `ModGenerator` contains multiple `ModOption` and generates 1 `Mod`
object by enumerating all the different combination of options.
The `ModListGenerator` contains multiple `ModGenerator` and generates 1
`ModList` object by enumerating all the different combination of mods.
"""
import abc
import importlib
import os
import inflection
from fragscapy.modlist import ModList
from fragscapy.modifications.mod import Mod
# Package where the modifications are stored (and loaded from)
MOD_PACKAGE = 'fragscapy.modifications'
# Directory where the modifications are stored
MOD_DIR = 'modifications'
class ModGeneratorError(ValueError):
"""Error with the mods generation."""
class ModOption(abc.ABC):
"""Abstract generator for an option in a modification.
This class can generates a single option in a mod (i.e. 1 of the
parameter passed to the constructor of the mod). Any subclass should
implement a `.get_option(i)` and a `.nb_options()` methods. It can then
be used to generate 1 instance of the option (based on the parameter
given on init).
It can be used as a generator or as list-like object.
Args:
mod_name: The name of the modification (used only for errors
messages).
opt_name: The name of this option (used only for errors messages).
Attributes:
mod_name: The name of the modification.
opt_name: The name of this option.
Examples:
>>> for opt in mod_option: # Used in for-loops
... print(opt)
>>> n = len(mod_option) # Size = number of different instances
>>> opt = mod_option[n-1] # Retrieve last instance
"""
def __init__(self, mod_name, opt_name):
self.opt_name = opt_name
self.mod_name = mod_name
@abc.abstractmethod
def get_option(self, i):
"""Returns the i-th instance of the option.
The result must be deterministic, constant for a given `i`. E.g.
asking for `.get_option(10)` must always output the same result.
Args:
i: the number of the configuration.
Raises:
ModGeneratorError: `i` is out of bounds (i<0 or i>=len).
Returns:
The i-th option.
"""
raise NotImplementedError
def inbound_or_raise(self, i):
"""Raises a `ModGeneratorError` if is out of bound (i<0 or i>=len)."""
if not isinstance(i, int):
self._raise_error("Index is not an integer, got '{}'".format(i))
if i < 0 or i >= self.nb_options():
self._raise_error(
"Index should be between 0 and {}, got '{}'".format(
self.nb_options()-1, i
)
)
@abc.abstractmethod
def nb_options(self):
"""Returns the number of possible options for this generator."""
raise NotImplementedError
def _raise_error(self, msg):
"""Raises a `ModGeneratorError` along with indication of the option and
the name of the mod."""
raise ModGeneratorError("Error with option '{}' of mod '{}': {}".format(
self.opt_name, self.mod_name, msg))
def __len__(self):
return self.nb_options()
def __getitem__(self, i):
return self.get_option(i)
def __iter__(self):
return (self.get_option(i) for i in range(self.nb_options()))
def __str__(self):
return "{}".format(self.opt_name)
def __repr__(self):
return "{}".format(self.__class__.__name__)
class ModOptionRange(ModOption):
"""Modification option generator for range of integer.
Its behavior is the same as the built-in python function `range`.
The argument is a list of 1, 2 or 3 integers (positives or negatives are
supported). If 1 integer is passed, the range goes from 0 to arg[0] with a
step of 1. If 2 integers are passed, the range goes from arg[0] to arg[1]
with a step of 1. If 3 integers are passed, the range goes from arg[0] to
arg[1] with a step of arg[2].
Args:
mod_name: The name of the mod (used only for error messages).
args: A list of 1, 2 or 3 integers.
Attributes:
mod_name: The name of the modification.
opt_name: The name of this option ('range').
start: The start of the range.
stop: The stop of the range.
step: The step if the range.
Raises:
ModGeneratorError: See the message for details.
Examples:
>>> list(ModOptionRange("foo", [1]))
[0, 1]
>>> list(ModOptionRange("foo", [5,8]))
[5, 6, 7, 8]
>>> list(ModOptionRange("foo", [-10,-1]))
[-10, -9, -8, -7, -6, -5, -4, -3, -2, -1]
>>> list(ModOptionRange("foo", [-10,-1, 3]))
[-10, -7, -4, -1]
"""
def __init__(self, mod_name, args):
super(ModOptionRange, self).__init__(mod_name, "range")
# Parsing of options
self.start = 0
self.stop = None
self.step = 1
if not args:
self._raise_error("Too few arguments, got none")
elif len(args) == 1:
self.stop = self._int(args, 0)
elif len(args) == 2:
self.start = self._int(args, 0)
self.stop = self._int(args, 1)
elif len(args) == 3:
self.start = self._int(args, 0)
self.stop = self._int(args, 1)
self.step = self._int(args, 2)
else:
self._raise_error("Too much arguments, got '{}'".format(args))
# Checking validity of options
if self.step == 0:
self._raise_error("'step' can't be 0")
if self.step > 0 and self.start > self.stop:
self._raise_error(
"'start' ('{}') can't be bigger than 'stop' ('{}')".format(
self.start, self.stop
)
)
if self.step < 0 and self.start < self.stop:
self._raise_error(
"'start' ('{}') can't be smaller than 'stop' ('{}')".format(
self.start, self.stop
)
)
def _int(self, l, i):
"""Small function to cast the i-th value of l to an integer or raises
a ModGeneratorError if not possible."""
try:
return int(l[i])
except ValueError:
self._raise_error(
"Can't cast argument n°{} to int, got '{}'".format(i, l[0])
)
def get_option(self, i):
"""See `ModOption.get_option`."""
self.inbound_or_raise(i)
return self.start + self.step * i
def nb_options(self):
"""See `ModOption.nb_options`."""
return (self.stop - self.start)//self.step + 1
def __str__(self):
return "range {} {} {}".format(self.start, self.stop, self.step)
def __repr__(self):
return "ModOptionRange({}, [{}, {}, {}])".format(
self.mod_name, self.start, self.stop, self.step
)
class ModOptionSequenceStr(ModOption):
"""Modification option generator for a sequence of strings.
The argument is a list of strings which will be the different
values used in the same order.
Args:
mod_name: The name of the mod (used only for error messages).
args: The list of arguments to parametrize the generator.
Attributes:
mod_name: The name of the modification.
opt_name: The name of this option ('seq_str').
seq: The sequence of strings.
Raises:
ModGeneratorError: See the message for details.
Examples:
>>> list(ModOptionSequenceStr("foo", ["a", "b", "c", "d"]))
['a', 'b', 'c', 'd']
"""
def __init__(self, mod_name, args):
super(ModOptionSequenceStr, self).__init__(mod_name, "seq_str")
# Verify there is at least 1 element
if not args:
self._raise_error("No string in sequence")
self.seq = args
def get_option(self, i):
"""See `ModOption.get_option`."""
self.inbound_or_raise(i)
return self.seq[i]
def nb_options(self):
"""See `Option.nb_options`."""
return len(self.seq)
def __str__(self):
return "seq_str {}".format(" ".join(self.seq))
def __repr__(self):
return "ModOptionSequenceStr({}, {})".format(self.mod_name, self.seq)
class ModOptionSequenceInt(ModOption):
"""Modification option generator for a sequence of integers.
The argument is a list of integers which will be the different
values used in the same order.
Args:
mod_name: The name of the mod (used only for error messages).
args: A list of integers.
Attributes:
mod_name: The name of the modification.
opt_name: The name of this option ('seq_int').
seq: The sequence of integers.
Raises:
ModGeneratorError: See the message for details.
Examples:
>>> list(ModOptionSequenceInt("foo", [1, 10, 2, 20, 3, 30]))
[1, 10, 2, 20, 3, 30]
"""
def __init__(self, mod_name, args):
super(ModOptionSequenceInt, self).__init__(mod_name, "seq_int")
# Verify there is at least 1 element
if not args:
self._raise_error("No number in sequence")
self.seq = list()
for arg in args:
try:
self.seq.append(int(arg))
except ValueError:
self._raise_error("Non-int argument, got '{}'".format(arg))
def get_option(self, i):
"""See `ModOption.get_option`."""
self.inbound_or_raise(i)
return self.seq[i]
def nb_options(self):
"""See `ModOption.nb_options`."""
return len(self.seq)
def __str__(self):
return "seq_int {}".format(" ".join(str(n) for n in self.seq))
def __repr__(self):
return "ModOptionSequenceInt({}, {})".format(self.mod_name, self.seq)
class ModOptionSequenceFloat(ModOption):
"""Modification option generator for a sequence of floats.
The argument is a list of floats which will be the different
values used in the same order.
Args:
mod_name: The name of the mod (used only for error messages).
args: A list of floats.
Attributes:
mod_name: The name of the modification.
opt_name: The name of this option ('seq_float').
seq: The sequence of floats.
Raises:
ModGeneratorError: See the message for details.
Examples:
>>> list(ModOptionSequenceFloat("foo", [1, 10.5, 2.4, 20, 3, 30.48]))
[1.0, 10.5, 2.4, 20, 3, 30.48]
"""
def __init__(self, mod_name, args):
super(ModOptionSequenceFloat, self).__init__(mod_name, "seq_float")
# Verify there is at least 1 element
if not args:
self._raise_error("No number in sequence")
self.seq = list()
for arg in args:
try:
self.seq.append(float(arg))
except ValueError:
self._raise_error("Non-float argument, got '{}'".format(arg))
def get_option(self, i):
"""See `ModOption.get_option`."""
self.inbound_or_raise(i)
return self.seq[i]
def nb_options(self):
"""See `ModOption.nb_options`."""
return len(self.seq)
def __str__(self):
return "seq_float {}".format(" ".join(str(n) for n in self.seq))
def __repr__(self):
return "ModOptionSequenceFloat({}, {})".format(self.mod_name, self.seq)
class ModOptionStr(ModOption):
"""Modification option generator with 1 possibility: a string.
The args is a list (for consistency with other mod options) with a single
element: the string.
Args:
mod_name: The name of the mod (used only for error messages).
args: A list with 1 string.
Attributes:
mod_name: The name of the modification.
opt_name: The name of this option ('str').
s: The string.
Raises:
ModGeneratorError: See the message for details.
Examples:
>>> list(ModOptionStr("foo", ["bar"]))
["bar"]
"""
def __init__(self, mod_name, args):
super(ModOptionStr, self).__init__(mod_name, "str")
# Verify there is exactly 1 argument
if len(args) != 1:
self._raise_error(
"There should be only 1 element, got '{}'".format(args)
)
self.s = args[0]
def get_option(self, i):
"""See `ModOption.get_option`."""
self.inbound_or_raise(i)
return self.s
def nb_options(self):
"""Returns always 1 because there is ony 1 instance possible. See
`ModOption.nb_options` for more info."""
return 1
def __str__(self):
return "str {}".format(self.s)
def __repr__(self):
return "ModOptionStr({}, [{}])".format(self.mod_name, self.s)
class ModOptionInt(ModOption):
"""Modification option generator with 1 possibility: an int.
The args is a list (for consistency with other mod options) with a single
element: the integer.
Args:
mod_name: The name of the mod (used only for error messages).
args: A list with 1 int.
Attributes:
mod_name: The name of the modification.
opt_name: The name of this option ('int').
n: The integer.
Raises:
ModGeneratorError: See the message for details.
Examples:
>>> list(ModOptionInt("foo", [18]))
[18]
"""
def __init__(self, mod_name, args):
super(ModOptionInt, self).__init__(mod_name, "int")
# Verify there is exactly 1 argument
if len(args) != 1:
self._raise_error(
"There should be only 1 element, got '{}'".format(args)
)
try:
self.n = int(args[0])
except ValueError:
self._raise_error("Can't cast '{}' to an integer".format(args[0]))
def get_option(self, i):
"""See `ModOption.get_option`."""
self.inbound_or_raise(i)
return self.n
def nb_options(self):
"""Returns always 1 because there is ony 1 instance possible. See
`ModOption.nb_options` for more info."""
return 1
def __str__(self):
return "int {}".format(self.n)
def __repr__(self):
return "ModOptionInt({}, [{}])".format(self.mod_name, self.n)
class ModOptionFloat(ModOption):
"""Modification option generator with 1 possibility: a float.
The args is a list (for consistency with other mod options) with a single
element: the float.
Args:
mod_name: The name of the mod (used only for error messages).
args: A list with 1 float.
Attributes:
mod_name: The name of the modification.
opt_name: The name of this option ('float').
n: The integer.
Raises:
ModGeneratorError: See the message for details.
Examples:
>>> list(ModOptionFloat("foo", [18]))
[18.0]
>>> list(ModOptionFloat("foo", [42.58]))
[42.58]
"""
def __init__(self, mod_name, args):
super(ModOptionFloat, self).__init__(mod_name, "float")
# Verify there is exactly 1 argument
if len(args) != 1:
self._raise_error(
"There should be only 1 element, got '{}'".format(args)
)
try:
self.n = float(args[0])
except ValueError:
self._raise_error("Can't cast '{}' to a float".format(args[0]))
def get_option(self, i):
"""See `ModOption.get_option`."""
self.inbound_or_raise(i)
return self.n
def nb_options(self):
"""Returns always 1 because there is ony 1 instance possible. See
`ModOption.nb_options` for more info."""
return 1
def __str__(self):
return "float {}".format(self.n)
def __repr__(self):
return "ModOptionFloat({}, [{}])".format(self.mod_name, self.n)
class ModOptionNone(ModOption):
"""Modification option generator with 1 possibility: None.
The args is a list (for consistency with other mod options) with no
elements.
Args:
mod_name: The name of the mod (used only for error messages).
args: A list with 0 elements.
Attributes:
mod_name: The name of the modification.
opt_name: The name of this option ('none').
Raises:
ModGeneratorError: See the message for details.
Examples:
>>> list(ModOptionNone("foo", []))
[None]
"""
def __init__(self, mod_name, args):
super(ModOptionNone, self).__init__(mod_name, "none")
# Verify there is exactly 1 argument
if args:
self._raise_error(
"There should be no element, got '{}'".format(args)
)
def get_option(self, i):
"""See `ModOption.get_option`."""
self.inbound_or_raise(i)
def nb_options(self):
"""Returns always 1 because there is ony 1 instance possible. See
`ModOption.nb_options` for more info."""
return 1
def __str__(self):
return "none"
def __repr__(self):
return "ModOptionNone({}, [])".format(self.mod_name)
class ModGenerator(object):
"""Generator for a modification.
It can also generate 'None' instead of a modification. This means that the
modification should not be used (e.g. not included in a modlist). This
"possibility" makes the length of the generator 1 bigger than what could be
expected.
For dynamic and evolution purposes, the `Mod` object is imported based on
the `mod_name` given. It can then be used to generate all the possible
mods with all the possible combinations for the options, as described in
`mod_opts`.
Args:
mod_name: The name of the modification (for importing the correct mod
and improve error messages).
mod_opts: A list with the options to use to build `ModOption`
objects.
optional: True if the mod is optional (the modlist can be generated
without this mod). Default is 'False'.
Attributes:
mod_name: The name of the modification (for importing the correct mod
and improve error messages).
optional: Is the mod optional.
Examples:
It can be used as a generator or as list-like object.
>>> for mod in ModGenerator("echo", ["seq_str foo bar"]):
... print(repr(mod))
None
Echo<string: foo>
Echo<string: bar>
>>> print(ModGenerator("ipv6_frag", ["range 1280 6000 50"])[50])
Ipv6Frag 3730
>>> len(ModGenerator("select", [0, 2, "seq_int 3 4 5", "range 7 20"]))
43
"""
def __init__(self, mod_name, mod_opts, optional=False):
self.mod_name = mod_name
self._mod = get_mod(mod_name)
self.optional = optional
self._mod_opts = list()
for opt in mod_opts:
# Find the right ModOption or default to Str or Int
if isinstance(opt, str):
opt_args = opt.split()
opt_type = opt_args[0]
if opt_type == "range":
self._mod_opts.append(
ModOptionRange(mod_name, opt_args[1:])
)
elif opt_type == "seq_str":
self._mod_opts.append(
ModOptionSequenceStr(mod_name, opt_args[1:])
)
elif opt_type == "seq_int":
self._mod_opts.append(
ModOptionSequenceInt(mod_name, opt_args[1:])
)
elif opt_type == "seq_float":
self._mod_opts.append(
ModOptionSequenceFloat(mod_name, opt_args[1:])
)
elif opt_type == "str":
self._mod_opts.append(
ModOptionStr(mod_name, opt_args[1:])
)
elif opt_type == "int":
self._mod_opts.append(
ModOptionInt(mod_name, opt_args[1:])
)
elif opt_type == "float":
self._mod_opts.append(
ModOptionFloat(mod_name, opt_args[1:])
)
elif opt_type == "none":
self._mod_opts.append(
ModOptionNone(mod_name, opt_args[1:])
)
else: # By default consider it as a string
self._mod_opts.append(
ModOptionStr(mod_name, [opt])
)
else: # By default consider it as an int
self._mod_opts.append(
ModOptionInt(mod_name, [opt])
)
def get_mod(self, i):
"""Returns the i-th instance of the mod.
The result must be deterministic, constant for a given `i`. E.g.
asking for `.get_mod(10)` must always output the same result.
Args:
i: the number of the configuration.
Raises:
ModGeneratorError: `i` is out of bounds (i<0 or i>=len).
Returns:
The i-th `Mod` instance or 'None' if the instance of the mod is
the one with no mod at all. By implementation 'None' is always
returned by the 0-th instance.
"""
# Check the correctness of i
if not isinstance(i, int):
raise ModGeneratorError(
"Index is not an integer, got '{}'".format(i)
)
if i < 0 or i >= self.nb_mods():
raise ModGeneratorError(
"Error with mod '{}': 'i' should be between 0 and {}, got '{}'"
.format(self.mod_name, self.nb_mods()-1, i)
)
# Handle the 'no-mod' possibility
if self.optional:
if i == 0:
return None
i -= 1
# Generate one of the other possibility
opts = list()
for opt in self._mod_opts:
opts.append(opt[i % len(opt)])
i -= i % len(opt)
i //= len(opt)
return self._mod(*opts)
def nb_mods(self):
"""Returns the number of different mods possible.
It is basically the multiplication of the length of the different
`ModOption` it is composed of. And 1 more possibility for the
'no-mod' possibility.
"""
ret = 1
for opt in self._mod_opts:
ret *= len(opt)
if self.optional:
ret += 1 # The 'no-mod' possibility
return ret
def __getitem__(self, i):
return self.get_mod(i)
def __len__(self):
return self.nb_mods()
def __iter__(self):
return (self.get_mod(i) for i in range(self.nb_mods()))
def __str__(self):
return (
"{{ \n"
" \"mod_name\": \"{}\",\n"
" \"mod_opts\": [{}],\n"
" \"optional\": \"{}\"\n"
"}}"
).format(
self.mod_name,
", ".join("\""+str(opt)+"\"" for opt in self._mod_opts),
str(self.optional)
)
def __repr__(self):
return "ModGenerator({}, opts=[{}]{})".format(
self.mod_name,
", ".join(opt.opt_name for opt in self._mod_opts),
", optional=True" if self.optional else ""
)
class ModListGenerator(object):
"""Generator for a modification list.
The `ModList` object is created based on the specifications for each of
its mods as it come from the `Config` object.
It simply creates a `ModGenerator` for each of the defined mod, store
them and use them to generate 1 modlist instance (i.e. A ModList with
1 mod instance from the ModGenerator for each mod).
Args:
mods: A list of mods where each element is a dictionary containing the
key 'mod_name' with the name of the mod and the key 'mod_opts'
with the list of options to use to build the ModGenerator.
Examples:
>>> modlist_gen = ModListGenerator([
... {"mod_name": "ipv6_frag", "mod_opts": ["seq_str 1280 1500"]},
... {"mod_name": "echo", "mod_opts": ["seq_str foo bar fuz ball"]},
... {"mod_name": "select", "mod_opts": [1, 2, 3, 4, 5]}
... ])
>>> print(repr(modlist_gen))
ModListGenerator(mods=[ipv6_frag, echo, select])
>>> len(modlist_gen)
30
>>> modlist_gen[5]
ModList [
- Ipv6Frag<fragsize: 1500>
- Echo<string: fuz>
]
>>> modlist_gen[25]
ModList [
- Ipv6Frag<fragsize=1280>
- Echo<string=fuz>
- Select<sequence=[1, 2, 3, 4, 5]>
]
"""
def __init__(self, mods):
self._mod_generators = [
ModGenerator(mod['mod_name'], mod['mod_opts'], mod['optional'])
for mod in mods
]
def get_modlist(self, i):
"""Returns the i-th instance of the modlist.
The result must be deterministic, constant for a given `i`. E.g.
asking for `.get_modlist(10)` must always output the same result.
Args:
i: the number of the configuration.
Raises:
ModGeneratorError: `i` is out of bounds (i<0 or i>=len).
Returns:
The i-th `ModList` instance.
"""
if not isinstance(i, int):
raise ModGeneratorError(
"Index is not an integer, got '{}'".format(i)
)
if i < 0 or i >= self.nb_modlists():
raise ModGeneratorError(
"Index should be between 0 and {}, got '{}'"
.format(self.nb_modlists()-1, i)
)
modlist = ModList()
for mod_generator in self._mod_generators:
mod = mod_generator[i % len(mod_generator)]
if mod is not None:
modlist.append(mod)
i -= i % len(mod_generator)
i //= len(mod_generator)
return modlist
def nb_modlists(self):
"""Returns the number of different modlsits possible.
It is basically the multiplication of the length of the different
`ModGenerator` it is composed of.
"""
ret = 1
for mod_generator in self._mod_generators:
ret *= len(mod_generator)
return ret
def __getitem__(self, i):
return self.get_modlist(i)
def __len__(self):
return self.nb_modlists()
def __iter__(self):
return (self.get_modlist(i) for i in range(self.nb_modlists()))
def __str__(self):
return "[\n {}\n]".format(
",\n ".join(str(mod_gen).replace('\n', '\n ')
for mod_gen in self._mod_generators)
)
def __repr__(self):
return "ModListGenerator(mods=[{}])".format(
", ".join(mod_gen.mod_name for mod_gen in self._mod_generators)
)
def get_all_mods():
"""Retrieves all the available mods using `importlib` and `os.listdir`.
Returns:
A list of python classes which are all the modifications found and
that can be used. All the objects returned are subclass of `Mod`.
"""
dirname = os.path.dirname(__file__)
all_mods = list()
for mod_name in os.listdir(os.path.join(dirname, MOD_DIR)):
if not mod_name.endswith('.py'):
continue
if mod_name in ('__init__.py', 'mod.py'):
continue
mod_name = mod_name[:-3]
try:
all_mods.append(get_mod(mod_name))
except ImportError:
# The mod could no be loaded or was not a subclass of Mod
continue
return all_mods
def get_mod(mod_name):
"""Imports a mod from its name using `importlib`.
Args:
mod_name: The name of the mod (snake_case of CamelCase are accepted).
Returns:
The python class which corresponds to the modification.
Raises:
ImportError: The class was not found or it is not a subclass of `Mod`.
Examples:
>>> get_mod("DropOne")
<class 'fragscapy.modifications.drop_one.DropOne'>
>>> get_mod("drop_one")
<class 'fragscapy.modifications.drop_one.DropOne'>
"""
pkg_name = "{}.{}".format(MOD_PACKAGE, inflection.underscore(mod_name))
mod_name = inflection.camelize(mod_name)
pkg = importlib.import_module(pkg_name)
try:
mod = getattr(pkg, mod_name)
except AttributeError: # There is no class named correctly
raise ImportError(
"No class named {} in module {}"
.format(mod_name, pkg_name)
)
if not issubclass(mod, Mod):
raise ImportError(
"{}.{} is not a subclass of `fragscapy.modifications.mod.Mod`"
.format(pkg_name, mod_name)
)
return mod
| 3.5 | 4 |
src/dataset.py | fkong7/DeformNet | 21 | 12760030 | #Copyright (C) 2021 <NAME>, <NAME>, University of California, Berkeley
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
import tensorflow as tf
import functools
from augmentation import _augment, _augment_deformnet
def _parse_function(example_proto):
features = {"X": tf.VarLenFeature(tf.float32),
"S": tf.VarLenFeature(tf.int64),
"shape0": tf.FixedLenFeature((), tf.int64),
"shape1": tf.FixedLenFeature((), tf.int64),
"shape2": tf.FixedLenFeature((), tf.int64)}
parsed_features = tf.parse_single_example(example_proto, features)
img = tf.sparse_tensor_to_dense(parsed_features["X"])
depth = tf.cast(parsed_features["shape0"], tf.int32)
height = tf.cast(parsed_features["shape1"], tf.int32)
width = tf.cast(parsed_features["shape2"], tf.int32)
label = tf.sparse_tensor_to_dense(parsed_features["S"])
img = tf.reshape(img, tf.stack([depth,height, width, 1]))
label = tf.reshape(label, tf.stack([depth, height, width, 1]))
label = tf.cast(label, tf.int32)
return img, label
def _parse_function_all(mode):
def __parse(example_proto):
if mode=='img':
features = {"X": tf.VarLenFeature(tf.float32),
"shape0": tf.FixedLenFeature((), tf.int64),
"shape1": tf.FixedLenFeature((), tf.int64),
"shape2": tf.FixedLenFeature((), tf.int64),
}
parsed_features = tf.parse_single_example(example_proto, features)
img = tf.sparse_tensor_to_dense(parsed_features["X"])
depth = tf.cast(parsed_features["shape0"], tf.int32)
height = tf.cast(parsed_features["shape1"], tf.int32)
width = tf.cast(parsed_features["shape2"], tf.int32)
img = tf.reshape(img, tf.stack([depth,height, width, 1]))
return img
elif mode=='seg':
features = {"S": tf.VarLenFeature(tf.int64),
"shape0": tf.FixedLenFeature((), tf.int64),
"shape1": tf.FixedLenFeature((), tf.int64),
"shape2": tf.FixedLenFeature((), tf.int64),
}
parsed_features = tf.parse_single_example(example_proto, features)
seg = tf.sparse_tensor_to_dense(parsed_features["S"])
depth = tf.cast(parsed_features["shape0"], tf.int32)
height = tf.cast(parsed_features["shape1"], tf.int32)
width = tf.cast(parsed_features["shape2"], tf.int32)
seg = tf.reshape(seg, tf.stack([depth,height, width, 1]))
return seg
elif 'mesh' in mode:
mesh_id = mode.split('_')[-1]
features = {"Y_"+mesh_id: tf.VarLenFeature(tf.float32)
}
parsed_features = tf.parse_single_example(example_proto, features)
mesh = tf.sparse_tensor_to_dense(parsed_features["Y_"+mesh_id])
node_num = tf.cast(tf.shape(mesh)[0]/6, tf.int32)
mesh = tf.reshape(mesh, tf.stack([node_num, 6 ]))
return mesh
elif mode=='transform':
features = {"Transform": tf.VarLenFeature(tf.float32)}
parsed_features = tf.parse_single_example(example_proto, features)
transform = tf.sparse_tensor_to_dense(parsed_features["Transform"])
transform = tf.reshape(transform, [4, 4])
return transform
elif mode=='spacing':
features = {"Spacing": tf.VarLenFeature(tf.float32)}
parsed_features = tf.parse_single_example(example_proto, features)
spacing = tf.sparse_tensor_to_dense(parsed_features["Spacing"])
spacing = tf.reshape(spacing, [3])
return spacing
else:
raise ValueError('invalid name')
return __parse
def get_baseline_dataset(filenames, preproc_fn=functools.partial(_augment),
threads=5,
batch_size=1,
shuffle=True):
num_x = len(filenames)
# Create a dataset from the filenames and labels
files = tf.data.Dataset.from_tensor_slices(filenames)
dataset = files.apply(tf.contrib.data.parallel_interleave(
tf.data.TFRecordDataset, cycle_length=threads))
# Map our preprocessing function to every element in our dataset, taking
# advantage of multithreading
dataset = dataset.map(_parse_function, num_parallel_calls=threads)
# dataset = dataset.map(_process_pathnames, num_parallel_calls=threads)
dataset = dataset.map(preproc_fn, num_parallel_calls=threads)
if shuffle:
dataset = dataset.shuffle(384)
# It's necessary to repeat our data for all epochs
dataset = dataset.repeat().batch(batch_size)
dataset = dataset.prefetch(buffer_size=batch_size)
return dataset
def get_baseline_dataset_deformnet(filenames, preproc_fn=functools.partial(_augment_deformnet),
threads=1,
batch_size=0,
mesh_ids = [2], # default is LV blood pool 2
shuffle=True,
if_seg=True,
shuffle_buffer=10000,
num_gcn_blocks=3):
num_x = len(filenames)
# Create a dataset from the filenames and labels
files = tf.data.Dataset.from_tensor_slices(filenames)
if shuffle:
files = files.shuffle(shuffle_buffer)
dataset = files.apply(tf.contrib.data.parallel_interleave(tf.data.TFRecordDataset, cycle_length=threads))
# Map our preprocessing function to every element in our dataset, taking
# advantage of multithreading
dataset_input = dataset.map(_parse_function_all('img'))
mesh_list = []
for i in mesh_ids:
dataset_mesh = dataset.map(_parse_function_all('mesh_'+str(i)))
mesh_list.append(dataset_mesh)
out_list = []
for i in range(num_gcn_blocks):
out_list += mesh_list
if if_seg:
dataset_seg = dataset.map(_parse_function_all('seg'))
out_list = [dataset_seg]+out_list
dataset_output = tf.data.Dataset.zip(tuple(out_list))
#dataset_output = tf.data.Dataset.zip((dataset_seg, tuple(mesh_list), tuple(mesh_list), tuple(mesh_list)))
dataset = tf.data.Dataset.zip((dataset_input, dataset_output))
dataset = dataset.map(preproc_fn)
dataset = dataset.repeat()
if batch_size >0:
dataset = dataset.batch(batch_size, drop_remainder=True)
return dataset
| 2.4375 | 2 |
dev/stream.py | walchko/the-collector | 0 | 12760031 | <reponame>walchko/the-collector
#!/usr/bin/env python3
from collections import namedtuple
import msgpack
import time
from array import array
Test = namedtuple("Test", "x")
def serialize(x):
print('serialize', x)
if x.__class__.__name__ in ['Quaternion', 'Vector', 'Pose', 'Image', 'Lidar', 'IMU']:
return msgpack.ExtType(1, msgpack.packb([x.__class__.__name__,] + list(x[:]), default=serialize, strict_types=True))
# return msgpack.ExtType(1, msgpack.packb([x.__class__.__name__,] + list(x[:]), default=serialize))
return x
# def ext_unpack(code, data):
def deserialize(code, data):
print('deserialize', code, data)
if code == 1:
# you call this again to unpack and ext_hook for nested
d = msgpack.unpackb(data, ext_hook=deserialize, raw=False)
# print d[0] # holds class name
# print d[1:] # holds data inorder
# finds constructor in namespace and calls it
return globals()[d[0]](*d[1:])
return msgpack.ExtType(code, data)
save = []
packer = msgpack.Packer(default=serialize, use_bin_type=True)
fd = open('test.bag', 'wb')
tmp = []
for i in range(20):
d = tuple(range(5)) # this has to be a tuple, because use_list=False below
d += (time.time(),)
tmp.append(d)
save.append(d)
# tmp.append(array('d', [1.2, 3.4]))
d = Test(i)
tmp.append(d)
save.append(d)
if len(tmp) >= 10:
print('>> flushing buffer')
for t in tmp:
fd.write(packer.pack(t))
tmp = []
fd.close()
fd = open('test.bag', 'rb')
unpacker = msgpack.Unpacker(fd, raw=False)
load = []
for o in unpacker:
# print(o)
load.append(o)
print("Is it the same?", load == save)
print('-'*40)
print(save)
print('-'*40)
print(load)
| 2.40625 | 2 |
Deprecated Source Code/RollingBeta.py | TFSM00/Markowitz-Efficient-Frontier | 0 | 12760032 | <reponame>TFSM00/Markowitz-Efficient-Frontier<filename>Deprecated Source Code/RollingBeta.py
from stockStatistics import stockReturnsforSingle
from marketData import marketReturnswithDates
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
tickers = ["AAPL","GOOG","AMZN","MSFT","INTC","IBM","ORCL","CSCO","NVDA"]
def RollingBetaGraphs(tickerList):
"""
Opens a pdf and displays the rolling beta for every ticker in the list
"""
with PdfPages("RollingBeta.pdf") as pdf:
for tick in tickerList:
stockRet = stockReturnsforSingle(tick)
mktRet = marketReturnswithDates()
stockRet["S&P500"] = mktRet["S&P500"]
betas = [None]*36
stddevs = [None]*36
UpperBound95 = [None]*36
LowerBound95 = [None]*36
const = 2.03224450931772
for i in range(0, len(stockRet["S&P500"])-36):
tickRet = stockRet[tick][i:i+36]
marketRet = mktRet["S&P500"][i:i+36]
beta, stddev = np.polyfit(marketRet,tickRet,1)
betas.append(beta)
stddevs.append(stddev)
UpperBound95.append(float(beta + (const*stddev)))
LowerBound95.append(float(beta - (const*stddev)))
stockRet[f"{tick} Beta"]=betas
stockRet["Standard Deviation"]=stddevs
stockRet[r"95% Lower Bound"]=LowerBound95
stockRet[r"95% Upper Bound"]=UpperBound95
newTable = stockRet
del newTable[tick]
del newTable["S&P500"]
del newTable["Standard Deviation"]
newTable = newTable[36:]
plt.figure()
plt.plot(newTable)
plt.title(f"Rolling Beta for {tick}")
plt.xlabel("Date")
plt.ylabel("Rolling Beta")
plt.grid()
plt.legend([f"{tick} Beta",r"95% Lower Bound", r"95% Upper Bound"])
pdf.savefig() # Adds plot to the pdf
if __name__ == "__main__":
RollingBetaGraphs(tickers)
| 2.625 | 3 |
listcom.py | Can-Guo/IMU_Python_Reading | 0 | 12760033 | '''
Date: 2021-08-04 22:49:53
LastEditors: <NAME>,<EMAIL>
LastEditTime: 2021-09-18 12:46:46
FilePath: \Python\listcom.py
'''
import serial #导入模块
import serial.tools.list_ports
port_list = list(serial.tools.list_ports.comports())
print(port_list)
if len(port_list) == 0:
print('无可用串口')
else:
for i in range(0,len(port_list)):
print(port_list[i]) | 2.703125 | 3 |
modulo 1/aulas/1.0 - Print, input.py | GabrielBrotas/Python | 0 | 12760034 | nome = input('what is your name? ')
idade = input('how old are you ' + nome + '?')
peso = input('what is your weight {}?'.format(nome) + '?')
print('dados: {}'.format(nome) + ' Weight:' + peso + 'Idade:' + idade)
| 3.84375 | 4 |
swissdta/records/record836.py | BitySA/swissdta | 0 | 12760035 | """Implementation of TA 836 Record"""
from datetime import datetime, timedelta
from itertools import combinations
from typing import Tuple
from schwifty import BIC, IBAN
from swissdta.constants import ChargesRule, IdentificationBankAddress, IdentificationPurpose, FillSide, PaymentType
from swissdta.fields import AlphaNumeric, Amount, Currency, Date, Iban, Numeric
from swissdta.records.record import DTARecord
from swissdta.util import remove_whitespace, is_swiss_iban
class DTARecord836(DTARecord): # pylint: disable=too-many-instance-attributes
"""TA 836 Record implementation.
Payments with an IBAN in Switzerland and abroad, in all currencies.
This type of transaction can only be used if
the beneficiary's account number corresponds
to the IBAN standard for the country concerned.
The constructor of this class should not accept record
values. All fields should be set after initialization and all
field attributes must use a subclass of `dta.fields.Field`.
Attributes:
reference: 11 characters transaction no. defined by the
ordering party; must be unique within a data file. The
first r characters sender id are added automatically.
client_account: Account to be debited (Only IBAN
is accepted, despite the fact that the
standard accepts both with or without IBAN)
value_date: The date at which the payment should be processed
currency: The currency for the amount of the payment
amount: The actual amount of the payment
conversion_rate: Only indicated if previously agreed
on the basis of the bank's foreign exchange rate.
A maximum of 6 decimal places is permitted.
client_address1: Ordering party's address (first 35 characters)
client_address2: Ordering party's address (middle 35 characters)
client_address3: Ordering party's address (last 35 characters)
bank_address_type: Identification bank address,
use ``IdentificationBankAddress`` for the values.
bank_address1: Beneficiary's institution
When option ``IdentificationBankAddress.BIC_ADDRESS`` or
``IdentificationBankAddress.SWIFTH_ADDRESS`` (``'A'``):
8- or 11-digit BIC address (=SWIFT address)
When option
``IdentificationBankAddress.BENEFICIARY_ADDRESS``:
Name and address of the beneficiary's institution
If Field 58 contains a CH or LI IBAN, no details on the
financial institution are required. In this case, option
``IdentificationBankAddress.BENEFICIARY_ADDRESS`` (``'D'``)
must be chosen in disc format and the address field
completed with blanks.
bank_address2: Beneficiary's institution
When option ``IdentificationBankAddress.BIC_ADDRESS`` or
``IdentificationBankAddress.SWIFTH_ADDRESS`` (``'A'``):
Must be blank and bank_address1 must be a 8- or 11-digit
BIC address (=SWIFT address). When option
``IdentificationBankAddress.BENEFICIARY_ADDRESS``:
Name and address of the beneficiary's institution
If Field 58 contains a CH or LI IBAN, no details on the
financial institution are required. In this case, option
``IdentificationBankAddress.BENEFICIARY_ADDRESS`` (``'D'``)
must be chosen in disc format and the address field
completed with blanks.
recipient_iban: The beneficiary's IBAN
recipient_name: Name of the beneficiary
recipient_address1: Address of the beneficiary (first 35 characters)
recipient_address2: Address of the beneficiary (last 35 characters)
identification_purpose: Identification of purpose,
use ``IdentificationPurpose`` for the values.
purpose1: Purpose of the payment
Structured reference number:
1 line of 20 positions fixed (without blanks),
commencing with 2-digit check-digit (PP), rest blank
Unstructured, free text: first of
up to 3 lines of 35 characters
purpose2: Purpose of the payment
Structured reference number: Must be blank
Unstructured, free text: second of
up to 3 lines of 35 characters
purpose3: Purpose of the payment
Structured reference number: Must be blank
Unstructured, free text: third of
up to 3 lines of 35 characters
charges_rules: Rules for charges, use ``ChargesRule`` for the values
"""
reference = AlphaNumeric(length=11, fillchar='0', fillside=FillSide.LEFT)
client_account = Iban(length=24)
value_date = Date()
currency = Currency()
amount = Amount(length=15)
conversion_rate = Amount(length=12)
client_address1 = AlphaNumeric(length=35, truncate=True)
client_address2 = AlphaNumeric(length=35, truncate=True)
client_address3 = AlphaNumeric(length=35, truncate=True)
bank_address_type = AlphaNumeric(length=1, allowed_values=IdentificationBankAddress)
bank_address1 = AlphaNumeric(length=35)
bank_address2 = AlphaNumeric(length=35)
recipient_iban = Iban(length=34)
recipient_name = AlphaNumeric(length=35, truncate=True)
recipient_address1 = AlphaNumeric(length=35, truncate=True)
recipient_address2 = AlphaNumeric(length=35, truncate=True)
identification_purpose = AlphaNumeric(length=1, allowed_values=IdentificationPurpose)
purpose1 = AlphaNumeric(length=35)
purpose2 = AlphaNumeric(length=35)
purpose3 = AlphaNumeric(length=35)
charges_rules = Numeric(length=1, allowed_values=ChargesRule)
_template = (
'01{header}{reference}{client_account}{value_date}{currency}{amount}{padding:<11}\r\n'
'02{conversion_rate}{client_address1}{client_address2}{client_address3}{padding:<9}\r\n'
'03{bank_address_type}{bank_address1}{bank_address2}{recipient_iban}{padding:<21}\r\n'
'04{recipient_name}{recipient_address1}{recipient_address2}{padding:<21}\r\n'
'05{identification_purpose}{purpose1}{purpose2}{purpose3}{charges_rules}{padding:<19}'
)
def __init__(self):
super().__init__()
self.header.transaction_type = 836
@property
def client_address(self) -> Tuple[str, str, str]:
"""The 3 lines of the client address as a tuple of 3 strings."""
return self.client_address1, self.client_address2, self.client_address3
@client_address.setter
def client_address(self, client_address: Tuple[str, str, str]) -> None:
self.client_address1, self.client_address2, self.client_address3 = client_address
@property
def bank_address(self) -> Tuple[str, str]:
"""The 2 lines of the bank address as a tuple of 2 strings."""
return self.bank_address1, self.bank_address2
@bank_address.setter
def bank_address(self, bank_address: Tuple[str, str]) -> None:
self.bank_address1, self.bank_address2 = bank_address
@property
def recipient_address(self) -> Tuple[str, str]:
"""The 2 lines of the recipient address as a tuple of 2 strings."""
return self.recipient_address1, self.recipient_address2
@recipient_address.setter
def recipient_address(self, recipient_address: Tuple[str, str]) -> None:
self.recipient_address1, self.recipient_address2 = recipient_address
@property
def purpose(self) -> Tuple[str, str, str]:
"""The 3 lines of the purpose as a tuple of 3 strings."""
return self.purpose1, self.purpose2, self.purpose3
@purpose.setter
def purpose(self, purpose: Tuple[str, str, str]) -> None:
self.purpose1, self.purpose2, self.purpose3 = purpose
def generate(self) -> str:
"""Generate a TA 836 record as a string.
The returned value is a simple string. Make sure
to encode it to the ISO Latincode 8859-1 format
in accordance with the DTA Standard and Formats.
Returns: A TA 836 record as a string.
"""
return self._template.format(
header=self.header.generate(),
# First 5 positions must contain a valid DTA identification (sender id).
# Remaining 11 positions must contain a transaction reference number.
# The generation of the full (16x) reference from the valid DTA identification is done automatically here
reference=f'{self.header.sender_id}{self.reference}',
client_account=self.client_account,
value_date=self.value_date,
currency=self.currency,
amount=self.amount,
conversion_rate=self.conversion_rate,
client_address1=self.client_address1,
client_address2=self.client_address2,
client_address3=self.client_address3,
bank_address_type=self.bank_address_type,
bank_address1=self.bank_address1,
bank_address2=self.bank_address2,
recipient_iban=self.recipient_iban,
recipient_name=self.recipient_name,
recipient_address1=self.recipient_address1,
recipient_address2=self.recipient_address2,
identification_purpose=self.identification_purpose,
purpose1=self.purpose1,
purpose2=self.purpose2,
purpose3=self.purpose3,
charges_rules=self.charges_rules,
padding=''
)
def validate(self) -> None: # pylint: disable=too-complex, too-many-branches
"""Validate the field's value of the record."""
super().validate()
if self.header.processing_date != '000000':
self.header.add_error('processing_date', "NOT PERMITTED: header processing date must be '000000'.")
if self.header.recipient_clearing.strip():
self.header.add_error('recipient_clearing',
"NOT ALLOWED: beneficiary's bank clearing number must be blank.")
if self.header.transaction_type != '836':
self.header.add_error('transaction_type', "INVALID: Transaction type must be TA 836.")
if self.header.payment_type not in {str(payment_type.value) for payment_type in PaymentType}:
self.header.add_error('payment_type', "INVALID: Payment type must be 0 or 1 TA 836.")
if not remove_whitespace(self.reference):
self.add_error('reference', "MISSING TRANSACTION NUMBER: Reference may not be blank.")
try:
client_iban = IBAN(self.client_account, allow_invalid=False)
except ValueError: # Will throw ValueError if it is not a valid IBAN
self.add_error(
'client_account',
"IBAN INVALID: Client account must be a valid with a 21 digit Swiss IBAN (CH resp. LI) ."
)
else:
if not is_swiss_iban(client_iban):
self.add_error(
'client_account',
"IBAN INVALID: Client account must be a valid with a 21 digit Swiss IBAN (CH resp. LI) ."
)
# Bank clearing is at pos 5-9 in IBAN
if self.client_account[4:9].lstrip('0') != self.header.client_clearing.strip():
self.add_error('client_account',
"IID IN IBAN NOT IDENTICAL WITH BC-NO: IID in IBAN (pos. 5 to 9) must concur with the "
"ordering party's BC no.")
now = datetime.now()
ten_days_ago = now - timedelta(days=10)
sixty_days_ahead = now + timedelta(days=60)
try:
value_date = datetime.strptime(self.value_date, Date.DATE_FORMAT)
except ValueError:
self.add_error('value_date', "INVALID: Must contain a valid date.")
else:
if value_date < ten_days_ago:
self.add_error('value_date', "EXPIRED: value date may not be elapsed more than 10 calendar days.")
elif value_date > sixty_days_ahead:
self.add_error('value_date', "TOO FAR AHEAD: value date may not exceed the reading in date + 60 days.")
decimal_places = len(self.amount.strip().split(',', maxsplit=1)[1])
if self.currency == 'CHF' and decimal_places > 2:
self.add_error('currency',
"MORE THAN 2 DECIMAL PLACES: Amount may not contain more than 2 decimal places.")
elif self.currency != 'CHF' and decimal_places > 3:
self.add_error(
'currency',
" MORE THAN 3 DECIMAL PLACES: Amount may not contain more than 3 decimal places (foreign currencies)."
)
if not any(self.client_address):
self.add_error('client_address', "INCOMPLETE: Ordering party address, at least one line must exist.")
if self.bank_address_type == IdentificationBankAddress.SWIFT_ADDRESS:
try:
BIC(self.bank_address1).validate()
except ValueError:
self.add_error(
'bank_address_type',
f"INCORRECT FIELD IDENTIFICATION: bank address type {IdentificationBankAddress.SWIFT_ADDRESS} "
f"may only be used if an 8 or 11 character BIC address (SWIFT) exists."
)
# No specification on how to validate a bank's address if the `bank_address_type` is not SWIFT.
if all(not line1.strip() or not line2.strip() for line1, line2 in combinations(self.client_address, 2)):
self.add_error('client_address', "INCOMPLETE: At least two address lines must exist.")
if any('/C/' in address for address in self.client_address):
self.add_error('client_address', "INVALID: /C/ may not be present for TA 836.")
# XXX Missing validation of IPI reference if identification purpose is structured (I)
| 2.375 | 2 |
api/api/resources/post.py | medieteknik-kth/medieteknik.com | 7 | 12760036 | <reponame>medieteknik-kth/medieteknik.com<gh_stars>1-10
from flask import jsonify, session, request, make_response
from flask_restful import Resource
from sqlalchemy import or_, and_, cast
from datetime import datetime
import json
from api.db import db
from sqlalchemy import and_, exc
from api.models.post import Post
from api.models.post_tag import PostTag
from api.models.user import User
from api.models.committee import Committee
from api.resources.authentication import requires_auth
from api.utility.storage import upload_b64_image
import os
from werkzeug.utils import secure_filename
from werkzeug.datastructures import ImmutableMultiDict
import uuid
from api.resources.common import parseBoolean
SAVE_FOLDER = os.path.join(os.getcwd(), "api", "static", "posts")
IMAGE_PATH = "static/posts/"
IMAGE_COL = "header_image"
ISO_DATE_DEF = "%Y-%m-%dT%H:%M:%S.%fZ"
class PostResource(Resource):
def get(self, id):
"""
Returns a post by id.
---
tags:
- Posts
parameters:
- name: id
in: query
schema:
type: integer
responses:
200:
description: OK
"""
post = Post.query.get_or_404(id)
return jsonify(post.to_dict())
@requires_auth
def put(self, id, user):
"""
Edits a post by id.
---
tags:
- Posts
security:
- authenticated: []
parameters:
- name: id
in: query
schema:
type: integer
- name: post
in: body
schema:
type: object
properties:
committee_id:
type: number
header_image:
type: string
format: binary
title:
type: string
title_en:
type: string
body:
type: string
body_en:
type: string
scheduled_date:
type: string
format: date-time
draft:
type: boolean
tags:
type: array
items:
type: integer
responses:
200:
description: OK
400:
description: Missing authentication token
401:
description: Not authenticated
404:
description: Did not find post with id
"""
post = Post.query.get_or_404(id)
post_user = (user.id == post.user_id)
committee_member = False
if(not post_user):
for post_term in user.post_terms:
if((post_term.post.committee_id == post.committee_id) and (post_term.start_date < datetime.today() < post_term.end_date)):
committee_member = True
data = request.json
if user.is_admin or post_user or committee_member:
if data.get("title"):
title = data.get("title")
if title.get('se'):
post.title = title.get('se')
if title.get('en'):
post.title_en = title.get('en')
if data.get("body"):
body = data.get("body")
if body.get('se'):
post.body = body.get('se')
if title.get('en'):
post.body_en = body.get('en')
if data.get('date'):
post.date = data.get('date')
if data.get('scheduled_date'):
post.scheduled_date = data.get('scheduled_date')
if data.get('draft'):
post.draft = data.get('draft')
if data.get('header_image'):
post.header_image = upload_b64_image(data.get('header_image'))
if data.get('committee_id'):
post.committee_id = data.get('committee_id')
if data.get('tags'):
post.tags = data.get('tags')
db.session.commit()
return make_response(jsonify(success=True))
else:
return make_response(jsonify(success=False, error=str(error)), 401)
@requires_auth
def delete(self, id, user):
post = Post.query.get_or_404(id)
db.session.delete(post)
db.session.commit()
return jsonify({"message": "ok"})
class PostListResource(Resource):
def get(self):
"""
Returns a list of all posts.
---
tags:
- Posts
responses:
200:
description: OK
"""
show_unpublished = request.args.get('showUnpublished', False, type=bool)
page = request.args.get('page', 1, type=int)
per_page = request.args.get('perPage', 20, type=int)
data = []
total_count = 0
## TODO: Only show unpublished if logged in
if show_unpublished:
posts = Post.query.order_by(Post.date.desc()).paginate(page=page, per_page=per_page)
data = [post.to_dict() for post in posts.items]
total_count = posts.total
else:
scheduled_condition = [Post.scheduled_date <= datetime.now(), Post.scheduled_date == None]
posts = Post.query.filter(
and_(
Post.draft == False,
or_(*scheduled_condition)
)).order_by(
Post.scheduled_date.desc(),
Post.date.desc()
).paginate(page=page, per_page=per_page)
data = [post.to_dict() for post in posts.items]
total_count = posts.total
return jsonify({"data": data, "totalCount": total_count})
@requires_auth
def post(self, user):
"""
Adds a new post.
---
tags:
- Posts
security:
- authenticated: []
parameters:
- name: id
in: query
schema:
type: integer
- name: post
in: body
schema:
type: object
properties:
committee_id:
type: number
header_image:
type: string
format: binary
title:
type: string
title_en:
type: string
body:
type: string
body_en:
type: string
scheduled_date:
type: string
format: date-time
draft:
type: boolean
tags:
type: array
items:
type: integer
responses:
200:
description: OK
400:
description: Missing authentication token
402:
description: Not authenticated
"""
data = request.json
if user.id:
post = Post()
post.user_id = user.id
if data.get("title"):
title = data.get("title")
if title.get('se'):
post.title = title.get('se')
if title.get('en'):
post.title_en = title.get('en')
if data.get("body"):
body = data.get("body")
if body.get('se'):
post.body = body.get('se')
if title.get('en'):
post.body_en = body.get('en')
if data.get('date'):
post.date = data.get('date')
if data.get('scheduled_date'):
post.scheduled_date = data.get('scheduled_date')
if data.get('draft'):
post.draft = data.get('draft')
if data.get('header_image'):
post.header_image = upload_b64_image(data.get('header_image'))
if data.get('committee_id'):
post.committee_id = data.get('committee_id')
if data.get('tags'):
post.tags = data.get('tags')
db.session.add(post)
db.session.commit()
return jsonify(post.to_dict())
else:
return make_response(jsonify(success=False, error=str(error)), 403)
def parseBoolean(string):
d = {'true': True, 'false': False}
return d.get(string, string)
def add_cols(data, post, request):
dynamic_cols = ["committee_id", "title", "body", "title_en", "body_en"]
date_cols = ["scheduled_date"]
boolean_cols = ["draft"]
for col in dynamic_cols:
if data.get(col):
setattr(post, col, data.get(col))
for col in date_cols:
if data.get(col):
setattr(post, col, datetime.strptime(data.get(col), ISO_DATE_DEF))
for col in boolean_cols:
if data.get(col):
setattr(post, col, parseBoolean(data.get(col)))
if IMAGE_COL in request.files:
image = request.files[IMAGE_COL]
post.header_image = upload_image(image)
if data.get("tags"):
tags = json.loads(data["tags"])
for tag_id in tags:
post.tags.append(PostTag.query.get(tag_id))
def save_image(image, path):
local_path = ""
ALLOWED_EXTENTIONS = [".png", ".jpg", ".jpeg"]
original_filename, extension = os.path.splitext(secure_filename(image.filename))
filename = str(uuid.uuid4()) + extension
if extension in ALLOWED_EXTENTIONS:
path = os.path.join(path, filename)
local_path = os.path.join(SAVE_FOLDER, filename)
image.save(local_path)
return path
else:
raise "you can only upload .png or .jpg-files." | 2.265625 | 2 |
Data Science Mini Projects/Data Science Mini Proj 2/2.2.py | c-u-p/data-science-project | 0 | 12760037 | import matplotlib.pyplot as plt
import seaborn as sea
sea.set(style = 'whitegrid')
iris = sea.load_dataset('iris')
ax = sea.stripplot(x = 'species', y = 'sepal_length', data = iris)
plt.title('Graph')
plt.show() | 2.984375 | 3 |
User/urls.py | zhoubin1022/StarFLowBackend | 2 | 12760038 | from django.urls import path
from User import admin, views
urlpatterns = [
path('wxlogin', views.wxLogin),
path('githublogin', views.githubLogin),
path('repo_search', views.repo_search),
path('repo_request', views.repo_request),
path('reply_request', views.reply_request),
path('request_info', views.request_info),
path('test', views.test),
]
| 1.664063 | 2 |
test/integration/smoke/test_usage_events.py | serbaut/cloudstack | 14 | 12760039 | <filename>test/integration/smoke/test_usage_events.py<gh_stars>10-100
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" Test cases for checking usage events
"""
from nose.plugins.attrib import attr
from marvin.cloudstackTestCase import cloudstackTestCase
from marvin.lib.utils import (cleanup_resources, validateList)
from marvin.lib.base import (Account,
ServiceOffering,
VirtualMachine)
from marvin.lib.common import (get_domain,
get_zone,
get_template)
from marvin.codes import (PASS)
class TestUsageEvents(cloudstackTestCase):
@classmethod
def setUpClass(cls):
testClient = super(TestUsageEvents, cls).getClsTestClient()
cls.apiclient = testClient.getApiClient()
cls.testdata = testClient.getParsedTestDataConfig()
# Get Zone, Domain and templates
cls.domain = get_domain(cls.apiclient)
cls.zone = get_zone(cls.apiclient, testClient.getZoneForTests())
cls.template = get_template(
cls.apiclient,
cls.zone.id,
cls.testdata["ostype"])
cls._cleanup = []
try:
# Create large service offering so that VM creation fails
cls.testdata["service_offering"]["cpunumber"] = "8"
cls.testdata["service_offering"]["cpuspeed"] = "8096"
cls.testdata["service_offering"]["memory"] = "8096"
cls.service_offering = ServiceOffering.create(
cls.apiclient,
cls.testdata["service_offering"]
)
cls._cleanup.append(cls.service_offering)
except Exception as e:
cls.tearDownClass()
raise e
return
@classmethod
def tearDownClass(cls):
try:
cleanup_resources(cls.apiclient, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
# Create an account
self.account = Account.create(
self.apiclient,
self.testdata["account"],
domainid=self.domain.id
)
self.cleanup.append(self.account)
def tearDown(self):
try:
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced, basic"], required_hardware="true")
def test_01_positive_tests_usage(self):
""" Check events in usage_events table when VM creation fails
Steps:
1. Create service offering with large resource numbers
2. Try to deploy a VM
3. VM creation should fail and VM should be in error state
4. Destroy the VM with expunge parameter True
5. Check the events for the account in usage_events table
6. There should be VM.CREATE, VM.DESTROY, VOLUME.CREATE and
VOLUME.DELETE events present in the table
"""
# Create VM in account
with self.assertRaises(Exception):
VirtualMachine.create(
self.apiclient,
self.testdata["small"],
templateid=self.template.id,
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
zoneid=self.zone.id
)
vms = VirtualMachine.list(self.apiclient,
account=self.account.name,
domaind=self.account.domainid)
self.assertEqual(validateList(vms)[0], PASS,
"Vm list validation failed")
self.assertEqual(vms[0].state.lower(), "error",
"VM should be in error state")
qresultset = self.dbclient.execute(
"select id from account where uuid = '%s';"
% self.account.id
)
self.assertEqual(
isinstance(qresultset, list),
True,
"Check DB query result set for valid data"
)
self.assertNotEqual(
len(qresultset),
0,
"Check DB Query result set"
)
qresult = qresultset[0]
account_id = qresult[0]
self.debug("select type from usage_event where account_id = '%s';"
% account_id)
qresultset = self.dbclient.execute(
"select type from usage_event where account_id = '%s';"
% account_id
)
self.assertEqual(
isinstance(qresultset, list),
True,
"Check DB query result set for valid data"
)
self.assertNotEqual(
len(qresultset),
0,
"Check DB Query result set"
)
qresult = str(qresultset)
self.debug("Query result: %s" % qresult)
# Check if VM.CREATE, VM.DESTROY events present in usage_event table
self.assertEqual(
qresult.count('VM.CREATE'),
1,
"Check VM.CREATE event in events table"
)
self.assertEqual(
qresult.count('VM.DESTROY'),
1,
"Check VM.DESTROY in list events"
)
# Check if VOLUME.CREATE, VOLUME.DELETE events present in usage_event
# table
self.assertEqual(
qresult.count('VOLUME.CREATE'),
1,
"Check VOLUME.CREATE in events table"
)
self.assertEqual(
qresult.count('VOLUME.DELETE'),
1,
"Check VM.DELETE in events table"
)
return
| 1.71875 | 2 |
fenetpp/ltr/models/kys/conv_gru.py | Jee-King/FENet- | 12 | 12760040 | import torch
import torch.nn as nn
from ltr.models.layers.blocks import conv_block
class ConvGRUCell(nn.Module):
def __init__(self, input_dim, hidden_dim, kernel_size, padding_mode='zeros'):
" Referenced from https://github.com/happyjin/ConvGRU-pytorch"
super(ConvGRUCell, self).__init__()
self.hidden_dim = hidden_dim
if padding_mode == 'zeros':
if not isinstance(kernel_size, (list, tuple)):
kernel_size = (kernel_size, kernel_size)
padding = kernel_size[0] // 2, kernel_size[1] // 2
self.conv_reset = nn.Conv2d(input_dim + hidden_dim, self.hidden_dim, kernel_size, padding=padding)
self.conv_update = nn.Conv2d(input_dim + hidden_dim, self.hidden_dim, kernel_size, padding=padding)
self.conv_state_new = nn.Conv2d(input_dim+hidden_dim, self.hidden_dim, kernel_size, padding=padding)
else:
self.conv_reset = conv_block(input_dim + hidden_dim, hidden_dim, kernel_size=kernel_size, stride=1,
padding=int(kernel_size // 2), batch_norm=False, relu=False,
padding_mode=padding_mode)
self.conv_update = conv_block(input_dim + hidden_dim, hidden_dim, kernel_size=kernel_size, stride=1,
padding=int(kernel_size // 2), batch_norm=False, relu=False,
padding_mode=padding_mode)
self.conv_state_new = conv_block(input_dim + hidden_dim, hidden_dim, kernel_size=kernel_size, stride=1,
padding=int(kernel_size // 2), batch_norm=False, relu=False,
padding_mode=padding_mode)
def forward(self, input, state_cur):
input_state_cur = torch.cat([input, state_cur], dim=1)
reset_gate = torch.sigmoid(self.conv_reset(input_state_cur))
update_gate = torch.sigmoid(self.conv_update(input_state_cur))
input_state_cur_reset = torch.cat([input, reset_gate*state_cur], dim=1)
state_new = torch.tanh(self.conv_state_new(input_state_cur_reset))
state_next = (1.0 - update_gate) * state_cur + update_gate * state_new
return state_next
| 2.703125 | 3 |
pylas/errors.py | weyerhaeuser/pylas | 0 | 12760041 | """ All the custom exceptions types
"""
class PylasError(Exception):
pass
class UnknownExtraType(PylasError):
pass
class PointFormatNotSupported(PylasError):
pass
class FileVersionNotSupported(PylasError):
pass
class LazPerfNotFound(PylasError):
pass
class IncompatibleDataFormat(PylasError):
pass
| 2.4375 | 2 |
cad/z_axis.py | drummonds/od-robot | 0 | 12760042 | <gh_stars>0
#!/usr/bin/env python
# Building parts for an optical disc robot
# Starting to prototype general environment variables
import cadquery
import cqparts
from cqparts.params import PositiveFloat
from cqparts.display import render_props, display
from cqparts.constraint import Mate, Fixed, Coincident
from cqparts.utils.geometry import CoordSystem
# -------------------- New Start ----------------------
from cqparts_fasteners.male import MaleFastenerPart
# from cqparts_fastners.bolts import Bolt
from cqparts.display import display
from rods_04 import ThreadedRod
from stepper import Stepper_28BYJ_48, StepperGear
from z_riser import ZRiser, ZRiserHolder
from nuts_bolts import Nut, Bolt
from microswitches import M2RollerSwitchAssembly
class ZMMotorHolder(cqparts.Part):
"""This is the holder for the motor, the nut trap, wiring loom etc. The back left corner will be 0,0
so it is being built on -y and X Z. """
_render = render_props(template="red", alpha=0.2)
height = 22 # Height of main support block
wall = 4
nut_width = 6.5
riser_width = 6.8
width = wall + nut_width + wall # main support block
def motor_support(self):
"""Motor support bracket"""
h = self.wall + self.nut_width + 4
w = self.width-2
x_start = 15
offset = 0
r = (
cadquery.Workplane("XY")
.transformed(
offset=(x_start, -w, 0)
)
.rect(h, w, centered=False)
.extrude(self.height+24)
)
return r
def microswitch_support(self):
"""Motor support brackets"""
h = 8
w = 25
offset = 0
r = (
cadquery.Workplane("XY")
.transformed(
offset=(0, -w, 0)
)
.rect(h, w, centered=False)
.extrude(self.height)
)
return r
def make(self):
r = (
cadquery.Workplane("XY")
.transformed(
offset=(0, -self.width, 0)
)
.rect(50, self.width, centered=False)
.extrude(self.height)
)
r = r.union(self.motor_support())
r = r.union(self.microswitch_support())
return r
@property
def mate_bolt(self):
"""This should mate to the end of the M8 support rod, from the internal origin at the back base"""
return Mate(
self,
CoordSystem(
origin=(15, -10.5, self.height / 2), xDir=(0, 1, 0), normal=(0, 0, -1)
).rotated((90, -90, 0)),
)
@property
def mate_motor(self):
"""This should mount the motor"""
return Mate(
self,
CoordSystem((33.7, 8.5, 25), (0, 0, 1), (-1, 0, 0)).rotated((-120, 0, 90)),
)
@property
def mate_away(self):
"""This should mount the object away from where its meant to be so that you can examine details of the exploded design"""
return Mate(
self,
CoordSystem((-250, 0, 0)),
)
class ZAxis(cqparts.Assembly):
"""Z Axis stepper motor assembly"""
# default appearance
_render = render_props(template="green", alpha=0.2)
def make_components(self):
gear = StepperGear(
tooth_count=20, effective_radius=8.6, width=3.5, tooth_height=1.6
)
gear.gear_od = 8
riser = ZRiser()
return {
"bolt": Bolt(size="M8", length=40, show_cutout=True),
"holder": ZMMotorHolder(),
"motor": Stepper_28BYJ_48(),
"riser": riser,
"riser_holder": ZRiserHolder(riser),
"gear": gear,
"switch": M2RollerSwitchAssembly(),
}
def make_constraints(self):
return [
Fixed(
self.components["holder"].mate_origin,
CoordSystem((0, 0, 0), (1, 0, 0), (0, 0, 1)),
),
Coincident(
self.components["bolt"].mate_along(
0),
# self.components["holder"].mate_origin, # Put nut at end
self.components["holder"].mate_bolt,
),
# Fixed( # Test bolt
# self.components["bolt"].mate_origin,
# CoordSystem((20, 30, 0), (0, 0, 1), (1, 0, 0)).rotated((0, 0, 0)),
# ),
Coincident( # Move motor
self.components["motor"].mate_origin,
self.components["holder"].mate_motor,
),
Coincident(
self.components["gear"].mate_shaft, self.components["motor"].mate_gear
),
Fixed( # Move riser
self.components["riser_holder"].mate_origin,
CoordSystem((7.5+14.7/2.0, -18.4, 0), (-1, 0, 0), (0, 0, 1)).rotated((0, 0, 180)),
),
Coincident( # mount
self.components["riser"].mate_holder_demo,
self.components["riser_holder"].mate_origin,
),
Fixed(
self.components["switch"].mate_origin,
CoordSystem((0, -7, 3), xDir=(0, 0, 1), normal=(-1, 0, 0)).rotated((90, 0, 0)), )
]
def make_alterations(self):
"""Apply all the cutouts"""
# Cutout the motor
holder = self.components["holder"]
self.components["motor"].apply_cutout(holder)
self.components["motor"].apply_cutout(self.components["riser_holder"])
self.components["bolt"].apply_cutout(holder)
# cutout the riser
riser = self.components["riser"]
riser.apply_cutout(self.components["riser_holder"])
riser.apply_cutout(holder)
# Add mounting holes for microswitch
self.components['switch'].apply_cutout(holder)
# ------------------- Display Result -------------------
# Could also export to another format
if __name__ == "__cq_freecad_module__":
# r = Nut()
# r = ThreadedRod()
# r = ZMMotorHolder()
r = ZAxis()
display(r)
| 2.5625 | 3 |
LorisBallsBasedModel/Models/Models.py | LorisPilotto/LorisBallsBasedModel | 0 | 12760043 | <filename>LorisBallsBasedModel/Models/Models.py
import tensorflow as tf
from LorisBallsBasedModel.Layers.Step import Step, FirstStep
from LorisBallsBasedModel.Layers.Processing import InputsProcessing
from LorisBallsBasedModel.Layers.LSTMCell import LSTMCell
class SingleLayerPerceptron(tf.keras.Model):
def __init__(self,
output_layer,
processing_layer=None,
**kwargs):
super().__init__(**kwargs)
self.output_layer = output_layer
self.processing_layer = processing_layer
def call(self, inputs):
if self.processing_layer is not None:
inputs = self.processing_layer(inputs)
return self.output_layer(inputs)
class MultiLayerPerceptron(tf.keras.Model):
def __init__(self,
layers_list,
processing_layer=None,
**kwargs):
super().__init__(**kwargs)
self.layers_list = layers_list
self.processing_layer = processing_layer
def call(self, inputs):
if self.processing_layer is not None:
inputs = self.processing_layer(inputs)
for a_layer in self.layers_list:
inputs = a_layer(inputs)
return inputs
class LorisBallsBasedModel(tf.keras.Model):
def __init__(self,
output_layer,
nbr_steps,
first_step_args,
first_step_layer=FirstStep,
step_args=None,
step_layer=Step,
input_processing_layer=None,
**kwargs):
if nbr_steps < 1:
raise ValueError("Give a 'nbr_steps' strictly higher than 0.")
if nbr_steps > 1 and step_args is None:
raise ValueError("Give a 'step_args' (list or dict) for steps 2 and higher.")
super().__init__(**kwargs)
self.input_processing_layer = input_processing_layer
self.nbr_steps = nbr_steps
self.first_step_args = first_step_args
memory_cell = LSTMCell()
if 'memory_cell' not in self.first_step_args['attentive_transformer_params_dict'].keys():
self.first_step_args['attentive_transformer_params_dict']['memory_cell'] = memory_cell
self.first_step_layer = first_step_layer(**self.first_step_args)
if self.nbr_steps > 1:
self.step_args = step_args
if isinstance(self.step_args, list):
if len(self.step_args) != self.nbr_steps-1:
raise ValueError(f"'step_args' should be of size {self.nbr_steps-1} (i.e. nbr_steps-1).")
self.steps_list = []
for args in self.step_args:
if 'memory_cell' not in args['attentive_transformer_params_dict'].keys():
args['attentive_transformer_params_dict']['memory_cell'] = memory_cell
self.steps_list.append(step_layer(**args))
else:
if 'memory_cell' not in self.step_args['attentive_transformer_params_dict'].keys():
self.step_args['attentive_transformer_params_dict']['memory_cell'] = memory_cell
self.steps_list = [step_layer(**self.step_args) for s in range(self.nbr_steps-1)]
self.output_layer = output_layer
def forward(self, inputs):
if inputs.dtype.base_dtype != self._compute_dtype_object.base_dtype:
inputs = tf.cast(inputs, dtype=self._compute_dtype_object)
if self.input_processing_layer is not None:
inputs = self.input_processing_layer(inputs)
[embedding, embedding_pass_next_step, first_mask], states = self.first_step_layer(inputs)
prior_embeddings_list = [embedding_pass_next_step]
prior_masks_list = [first_mask]
if self.nbr_steps > 1:
for step in self.steps_list:
[tmp_embedding, tmp_embedding_pass_next_step, tmp_mask], states = step([inputs,
prior_embeddings_list,
prior_masks_list],
states)
embedding += tmp_embedding
prior_embeddings_list.append(tmp_embedding_pass_next_step)
prior_masks_list.append(tmp_mask)
if self.output_layer is None: # For stacked model
output = embedding
else:
output = self.output_layer(embedding)
return output, prior_masks_list, states
def call(self, inputs):
return self.forward(inputs)[0]
def masks_explain(self, inputs):
return self.forward(inputs)[1]
def final_states(self, inputs):
return self.forward(inputs)[2]
class LorisBallsBasedModelTransferLearning(tf.keras.Model):
def __init__(self,
step_layers_list,
output_layer,
input_processing_layer=None,
**kwargs):
super().__init__(**kwargs)
self.step_layers_list = step_layers_list
self.output_layer = output_layer
self.input_processing_layer = input_processing_layer
def forward(self, inputs):
if inputs.dtype.base_dtype != self._compute_dtype_object.base_dtype:
inputs = tf.cast(inputs, dtype=self._compute_dtype_object)
if self.input_processing_layer is not None:
inputs = self.input_processing_layer(inputs)
[embedding, embedding_pass_next_step, first_mask], states = self.step_layers_list[0](inputs)
prior_embeddings_list = [embedding_pass_next_step]
prior_masks_list = [first_mask]
for step in self.step_layers_list[1:]:
[tmp_embedding, tmp_embedding_pass_next_step, tmp_mask], states = step([inputs,
prior_embeddings_list,
prior_masks_list],
states)
embedding += tmp_embedding
prior_embeddings_list.append(tmp_embedding_pass_next_step)
prior_masks_list.append(tmp_mask)
if self.output_layer is None: # For stacked model
output = embedding
else:
output = self.output_layer(embedding)
return output, prior_masks_list, states
def call(self, inputs):
return self.forward(inputs)[0]
def masks_explain(self, inputs):
return self.forward(inputs)[1]
def final_states(self, inputs):
return self.forward(inputs)[2]
class StackedLorisBallsBasedModels(tf.keras.Model):
def __init__(self,
output_layer,
nbr_models,
models_args,
models=LorisBallsBasedModel,
input_processing_layer=None,
**kwargs):
super().__init__(**kwargs)
self.input_processing_layer = input_processing_layer
self.nbr_models = nbr_models
self.models_args = models_args
if isinstance(self.models_args, list):
if len(self.models_args) != self.nbr_models:
raise ValueError(f"'models_args' should be of size {self.nbr_models} (i.e. nbr_models).")
if isinstance(models, list):
if len(models) != len(self.models_args):
raise ValueError(f"'models' should be of size {self.models_args} (i.e. models_args).")
self.models_list = [models[args_id](**args) for args_id, args in enumerate(self.models_args)]
else:
self.models_list = [models(**args) for args in self.models_args]
else:
self.models_list = [models(**self.models_args) for s in range(self.nbr_models)]
self.output_layer = output_layer
def forward(self, inputs):
if inputs.dtype.base_dtype != self._compute_dtype_object.base_dtype:
inputs = tf.cast(inputs, dtype=self._compute_dtype_object)
if self.input_processing_layer is not None:
inputs = self.input_processing_layer(inputs)
first_embedding, first_prior_masks_list, first_states = self.models_list[0].forward(inputs)
embeddings_list = [first_embedding]
prior_masks_list = [first_prior_masks_list]
states_list = [first_states]
for model in self.models_list[1:]:
tmp_embedding, tmp_prior_masks_list, tmp_states = model.forward(inputs)
embeddings_list.append(tmp_embedding)
prior_masks_list.append(tmp_prior_masks_list)
states_list.append(tmp_states)
output = self.output_layer(tf.keras.layers.Concatenate()(embeddings_list))
return output, prior_masks_list, states_list
def call(self, inputs):
return self.forward(inputs)[0]
def masks_explain(self, inputs):
return self.forward(inputs)[1]
def final_states(self, inputs):
return self.forward(inputs)[2] | 2.5 | 2 |
fsw/forms/models.py | bhushanmohanraj/fsw | 0 | 12760044 | <reponame>bhushanmohanraj/fsw
"""
Convert WTForms form classes to SQLAlchemy model classes.
"""
from sqlalchemy import inspect, types
from wtforms import fields, validators
from wtforms.fields import html5
# The output formats for the HTML date and time fields.
# WTForms provides inconsistent default values,
# so these values are passed when creating the fields.
TIME_FORMAT = "%H:%M"
DATE_FORMAT = "%Y-%m-%d"
DATETIME_LOCAL_FORMAT = "%Y-%m-%dT%H:%M"
# The WTForms field types corresponding to the SQLAlchemy column types.
_column_field_types = {
types.String: fields.StringField,
types.Integer: fields.IntegerField,
types.DateTime: html5.DateTimeLocalField,
types.Date: html5.DateField,
types.Time: html5.TimeField,
types.Boolean: fields.BooleanField,
types.Enum: fields.SelectField,
}
def _column_field_type(column) -> type:
"""
The field type for constructing a WTForms field from an SQLAlchemy column.
"""
if type(column.type) in _column_field_types:
return _column_field_types[type(column.type)]
raise RuntimeError(
f"{type(column.type)} columns cannot be converted to form fields."
)
def _column_field_kwargs(column) -> dict:
"""
Keyword arguments for constructing a WTForms field from an SQLAlchemy column.
"""
# The label, description, and validators for the field.
field_kwargs = {
"label": column.name.replace("_", " ").title(),
"description": column.doc if column.doc else "",
"validators": [],
}
# The optional or input required validator.
if column.nullable:
field_kwargs["validators"] += [validators.Optional()]
else:
field_kwargs["validators"] += [validators.InputRequired()]
# Keyword arguments for specific column types.
if type(column.type) == types.String:
field_kwargs["validators"] += [validators.Length(max=column.type.length)]
elif type(column.type) == types.DateTime:
field_kwargs["format"] = DATETIME_LOCAL_FORMAT
elif type(column.type) == types.Date:
field_kwargs["format"] = DATE_FORMAT
elif type(column.type) == types.Time:
field_kwargs["format"] = TIME_FORMAT
elif type(column.type) == types.Enum:
field_kwargs["choices"] = [
(choice, choice.title()) for choice in column.type.enums
]
return field_kwargs
class ModelFormMixin:
"""
Add a class method to create WTForms form classes from SQLAlchemy model classes.
"""
@classmethod
def model_form(cls, model, fields: list[str]):
"""
Create a WTForms form class from an SQLAlchemy model class.
The `fields` parameter should specify the form fields to be created from the model.
"""
class ModelForm(cls):
"""
The form class created from the model.
"""
columns = inspect(model).c
for column in columns:
name = column.name
if name not in fields:
continue
field_type = _column_field_type(column)
field_kwargs = _column_field_kwargs(column)
setattr(ModelForm, name, field_type(**field_kwargs))
return ModelForm
| 2.890625 | 3 |
app/routes/home/__init__.py | ygorazambuja/covid-scrapper | 7 | 12760045 | from flask import Blueprint
home_blueprint = Blueprint('home_blueprint', __name__)
from . import views
| 1.179688 | 1 |
perftest1/migrations/0003_auto_20200428_1915.py | mbranko/perftest-1 | 0 | 12760046 | <filename>perftest1/migrations/0003_auto_20200428_1915.py
# Generated by Django 3.0.5 on 2020-04-28 19:15
from django.db import migrations
from django.contrib.auth.models import User
def add_user(apps, schema_editor):
user = User.objects.create_user('<EMAIL>', '<EMAIL>', '12345')
user.first_name = 'Učenik'
user.last_name = 'Učeniković'
user.save()
class Migration(migrations.Migration):
dependencies = [
('perftest1', '0002_auto_20200428_1438'),
]
operations = [
migrations.RunPython(add_user),
]
| 1.515625 | 2 |
myapp.py | mobyw/FingerprintRecognition | 0 | 12760047 | #
# 为 GUI 封装的函数 不可直接运行
# Author: Xiaohei
# Updatetime: 2021-12-01
#
import cv2
import os
import numpy
import pickle
from enhance import image_enhance
def get_descriptors(img):
clahe = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8))
img = clahe.apply(img)
img = image_enhance.image_enhance(img)
img = numpy.array(img, dtype=numpy.uint8)
# Threshold
ret, img = cv2.threshold(img, 127, 255, cv2.THRESH_BINARY_INV | cv2.THRESH_OTSU)
# Normalize to 0 and 1 range
img[img == 255] = 1
# Harris corners
harris_corners = cv2.cornerHarris(img, 3, 3, 0.04)
harris_normalized = cv2.normalize(harris_corners, 0, 255, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_32FC1)
threshold_harris = 125
# Extract keypoints
keypoints = []
for x in range(0, harris_normalized.shape[0]):
for y in range(0, harris_normalized.shape[1]):
if harris_normalized[x][y] > threshold_harris:
keypoints.append(cv2.KeyPoint(y, x, 1))
# Define descriptor
orb = cv2.ORB_create()
# Compute descriptors
_, des = orb.compute(img, keypoints)
return keypoints, des
def match(des1, path, name_lst):
avg_lst = []
if name_lst:
for name in name_lst:
with open("{}/{}".format(path, name), "rb+") as f:
des2 = pickle.load(f)
bf = cv2.BFMatcher(cv2.NORM_HAMMING, crossCheck=True)
matches = sorted(bf.match(des1, des2), key=lambda match: match.distance)
score = 0
for match in matches:
score += match.distance
avg = score / len(matches)
avg_lst.append(avg)
return avg_lst
else:
return None
def run_app(image_path, data_path):
img1 = cv2.imread(image_path, cv2.IMREAD_GRAYSCALE)
if img1 is not None:
img1 = cv2.resize(img1, dsize=(256, 364))
kp1, des1 = get_descriptors(img1)
else:
raise Exception("Invalid image path!")
address_lst = [name for name in os.listdir(data_path) if os.path.isfile(os.path.join(data_path, name))]
name_lst = list(address_lst)
avgs = match(des1, data_path, name_lst)
score_threshold = 40
if avgs is not None:
if min(avgs) < score_threshold:
flag = True
name = name_lst[avgs.index(min(avgs))]
else:
flag = False
name = name_lst[avgs.index(min(avgs))]
name1 = image_path.replace("\\", "/").split("/")[-1].split(".")[0]
# name1 = input("Input a name to save the fingerprint: ")
if name1:
with open("{}/{}".format(data_path, name1), "wb+") as f:
pickle.dump(des1, f)
else:
flag = False
name = "None"
name1 = image_path.replace("\\", "/").split("/")[-1].split(".")[0]
# name1 = input("Input a name to save the fingerprint: ")
if name1:
with open("{}/{}".format(data_path, name1), "wb+") as f:
pickle.dump(des1, f)
return flag, name
| 2.765625 | 3 |
piultrasonic/ultrasonic.py | untrobotics/IEEE-2019-R5 | 0 | 12760048 | <filename>piultrasonic/ultrasonic.py
#!/usr/bin/python
import RPi.GPIO as GPIO
import Adafruit_GPIO.SPI as SPI
import Adafruit_MCP3008
import time
CLK = 18
MISO = 23
MOSI = 24
CS = 25
mcp = Adafruit_MCP3008.MCP3008(clk=CLK, cs=CS, miso=MISO, mosi=MOSI)
try:
PIN_TRIGGER = 26
GPIO.setup(PIN_TRIGGER, GPIO.OUT)
GPIO.output(PIN_TRIGGER, GPIO.LOW)
print("Waiting for sensor to settle")
time.sleep(2)
print("Calculating distance")
GPIO.output(PIN_TRIGGER, GPIO.HIGH)
time.sleep(0.00001)
GPIO.output(PIN_TRIGGER, GPIO.LOW)
while mcp.read_adc(1) < 1000:
pulse_start_time = time.time()
pulse_started = 0
while mcp.read_adc(1) > 1000 or pulse_started == 0:
pulse_started = 1
pulse_end_time = time.time()
pulse_duration = pulse_end_time - pulse_start_time
distance = round(pulse_duration * 17150, 2)
print("Distance:", distance, "cm")
finally:
GPIO.cleanup()
| 2.8125 | 3 |
main/migrations/0008_auto_20211005_1758.py | spralja/mycalendar | 0 | 12760049 | <filename>main/migrations/0008_auto_20211005_1758.py
# Generated by Django 3.2.8 on 2021-10-05 15:58
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0007_alter_day_week'),
]
operations = [
migrations.DeleteModel(
name='Day',
),
migrations.DeleteModel(
name='Week',
),
]
| 1.390625 | 1 |
fortumo/models.py | v0y/django-fortumo | 0 | 12760050 | from django.db import models
from json_field import JSONField
class Service(models.Model):
name = models.CharField(max_length=64)
secret = models.CharField(max_length=128)
service_id = models.CharField(max_length=128)
ips = JSONField(default=[])
validate_ip = models.BooleanField(default=True)
def __str__(self):
return self.name
class Message(models.Model):
message = models.CharField(max_length=64)
sender = models.CharField(max_length=64, db_index=True)
country = models.CharField(max_length=2)
price = models.FloatField()
price_wo_vat = models.FloatField()
currency = models.CharField(max_length=3)
service_id = models.CharField(max_length=128)
message_id = models.CharField(max_length=128)
keyword = models.CharField(max_length=64)
shortcode = models.CharField(max_length=64)
operator = models.CharField(max_length=128)
billing_type = models.CharField(max_length=2)
status = models.CharField(max_length=64)
test = models.CharField(max_length=16)
sig = models.CharField(max_length=128)
def __str__(self):
return '{} - {} from {} on {}'.format(
self.keyword,
self.message,
self.sender,
self.shortcode,
)
class Payment(models.Model):
service = models.ForeignKey(Service, related_name='payments')
message = models.OneToOneField(Message)
pin = models.CharField(max_length=16, unique=True)
used = models.BooleanField(default=False)
def __str__(self):
return '{} - {}'.format(self.id, self.pin)
| 2.046875 | 2 |